ext
stringclasses
9 values
sha
stringlengths
40
40
content
stringlengths
3
1.04M
py
b4112fd5668de45a8592146f3f3f09c9dd8e9352
# ******* WARNING - AUTO GENERATED CODE - DO NOT EDIT ******* from .VmomiSupport import CreateDataType, CreateManagedType from .VmomiSupport import CreateEnumType from .VmomiSupport import AddVersion, AddVersionParent from .VmomiSupport import AddBreakingChangesInfo from .VmomiSupport import F_LINK, F_LINKABLE from .VmomiSupport import F_OPTIONAL, F_SECRET from .VmomiSupport import newestVersions, ltsVersions from .VmomiSupport import dottedVersions, oldestVersions AddVersion("vmodl.query.version.version4", "", "", 0, "vim25") AddVersion("vmodl.query.version.version3", "", "", 0, "vim25") AddVersion("vmodl.query.version.version2", "", "", 0, "vim25") AddVersion("vmodl.query.version.version1", "", "", 0, "vim25") AddVersion("vim.version.pcieHotPlugOfFPT", "vim25", "f2A24AFB2", 0, "vim25") AddVersion("vim.version.FileLockInfo_GSS34", "vim25", "f5646F82F", 0, "vim25") AddVersion("vim.version.pr1429825", "vim25", "f970D10CB", 0, "vim25") AddVersion("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim25", "f6DD20FAA", 0, "vim25") AddVersion("vim.version.MemoryTiering", "vim25", "f55387E2D", 0, "vim25") AddVersion("vim.version.version8", "vim25", "5.1", 0, "vim25") AddVersion("vim.version.version9", "vim25", "5.5", 0, "vim25") AddVersion("vim.version.version6", "vim25", "4.1", 0, "vim25") AddVersion("vim.version.version7", "vim25", "5.0", 0, "vim25") AddVersion("vim.version.DiskGroupVMC", "vim25", "fC65383A0", 0, "vim25") AddVersion("vim.version.TrustAuthority_V4", "vim25", "f1842AA94", 0, "vim25") AddVersion("vim.version.version1", "vim2", "2.0", 0, "vim25") AddVersion("vim.version.v7_0_1_1", "vim25", "7.0.1.1", 0, "vim25") AddVersion("vim.version.version4", "vim25", "2.5u2server", 0, "vim25") AddVersion("vim.version.version5", "vim25", "4.0", 0, "vim25") AddVersion("vim.version.SRIOVValidNumVFs", "vim25", "f3FE936E8", 0, "vim25") AddVersion("vim.version.version2", "vim25", "2.5", 0, "vim25") AddVersion("vim.version.version3", "vim25", "2.5u2", 1, "vim25") AddVersion("vim.version.FCD_VRA_SUPPORT", "vim25", "f87755351", 0, "vim25") AddVersion("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim25", "fBA2C0632", 0, "vim25") AddVersion("vim.version.guestDetailedData", "vim25", "fF45CC17D", 0, "vim25") AddVersion("vim.version.VmxRebootPowerOff", "vim25", "fC5B992F6", 0, "vim25") AddVersion("vim.version.SGX_MPA_VMCheck", "vim25", "f4777C100", 0, "vim25") AddVersion("vim.version.ClusteredEsx_V1", "vim25", "fD4BF5568", 0, "vim25") AddVersion("vim.version.vdcs", "vim25", "fD75ED602", 0, "vim25") AddVersion("vmodl.version.version0", "", "", 0, "vim25") AddVersion("vim.version.NFS_VMKPORTBIND", "vim25", "f100E080E", 0, "vim25") AddVersion("vmodl.version.version1", "", "", 0, "vim25") AddVersion("vmodl.version.version2", "", "", 0, "vim25") AddVersion("vim.version.v6_9_1", "vim25", "6.9.1", 0, "vim25") AddVersion("vim.version.fourKnStorageSupport", "vim25", "f58537E79", 0, "vim25") AddVersion("vim.version.batchRenameSupport", "vim25", "f910F753F", 0, "vim25") AddVersion("vim.version.resetportstatistics", "vim25", "fB93B1650", 0, "vim25") AddVersion("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim25", "f17EB30E7", 0, "vim25") AddVersion("vim.version.VMC_NFS_SUPPORT", "vim25", "f9170D778", 0, "vim25") AddVersion("vim.version.NsxLiveUpdate", "vim25", "fD9229B0A", 0, "vim25") AddVersion("vim.version.VDS_ReadOnlyDisk", "vim25", "fF3B7EFEC", 0, "vim25") AddVersion("vim.version.SGX_MPA_HostReg", "vim25", "fF8CC7A34", 0, "vim25") AddVersion("vim.version.v7_0_0_2", "vim25", "7.0.0.2", 0, "vim25") AddVersion("vim.version.GreenMetrics", "vim25", "fE9869FEA", 0, "vim25") AddVersion("vim.version.vVol_datastore_scalability", "vim25", "f9B94E1E6", 0, "vim25") AddVersion("vim.version.hostVendorSpecificStatus", "vim25", "fA87B5D34", 0, "vim25") AddVersion("vim.version.AssignHwCompositeDev", "vim25", "fB733F8D8", 0, "vim25") AddVersion("vim.version.VMcrypt_V4", "vim25", "fFDAD9FAD", 0, "vim25") AddVersion("vim.version.hostAccessManager", "vim25", "f105EB991", 0, "vim25") AddVersion("vim.version.ocmSupportedForReconfigure", "vim25", "f78C0E89B", 0, "vim25") AddVersion("vim.version.v6_8_7", "vim25", "6.8.7", 0, "vim25") AddVersion("vmodl.reflect.version.version1", "reflect", "1.0", 0, "reflect") AddVersion("vmodl.reflect.version.version2", "reflect", "2.0", 0, "reflect") AddVersion("vim.version.v8_0_0_0", "vim25", "8.0.0.0", 0, "vim25") AddVersion("vim.version.VQAT", "vim25", "f856E4131", 0, "vim25") AddVersion("vim.version.VirtualTopo", "vim25", "f333BD728", 0, "vim25") AddVersion("vim.version.VCDP_NestedFilters", "vim25", "fB4B76112", 0, "vim25") AddVersion("vim.version.Tools_Update_Health", "vim25", "f2FE41458", 0, "vim25") AddVersion("vim.version.nativeSnapshot", "vim25", "fD212522B", 0, "vim25") AddVersion("vim.version.CPU_Scheduler_Info", "vim25", "f35A4D64E", 0, "vim25") AddVersion("vim.version.hostProfiles", "vim25", "fFA1FEDF6", 0, "vim25") AddVersion("vim.version.GraphicsDRS", "vim25", "fAB1677B5", 0, "vim25") AddVersion("vim.version.VM_CLONE_REKEY_TPM", "vim25", "fECECD7C0", 0, "vim25") AddVersion("vim.version.v7_0_3_1", "vim25", "7.0.3.1", 0, "vim25") AddVersion("vim.version.FCD_CATALOG_HEALTH", "vim25", "f980D7E36", 0, "vim25") AddVersion("vim.version.v7_0_3_2", "vim25", "7.0.3.2", 0, "vim25") AddVersion("vim.version.v7_0_3_0", "vim25", "7.0.3.0", 0, "vim25") AddVersion("vim.version.hwh", "vim25", "fE3F4BE65", 0, "vim25") AddVersion("vim.version.version13", "vim25", "6.7.1", 0, "vim25") AddVersion("vim.version.smartnic_vc", "vim25", "f3F1FE1BE", 0, "vim25") AddVersion("vim.version.version14", "vim25", "6.7.2", 0, "vim25") AddVersion("vim.version.version15", "vim25", "6.7.3", 0, "vim25") AddVersion("vim.version.DRS_LB_REASONCODE", "vim25", "fF2D429B0", 0, "vim25") AddVersion("vim.version.VSAN2_Configure", "vim25", "f398B4406", 0, "vim25") AddVersion("vim.version.HWv20", "vim25", "fBCB777B5", 0, "vim25") AddVersion("vim.version.gosCrashRemediation", "vim25", "f9AFCF47C", 0, "vim25") AddVersion("vim.version.unstable", "vim25", "uE7641C73", 0, "vim25") AddVersion("vim.version.VSAN_DeltaCompEnsureDurability", "vim25", "fCF4CFF8B", 0, "vim25") AddVersion("vim.version.FT_DRS_METRO_CLUSTER", "vim25", "f41F427CD", 0, "vim25") AddVersion("vim.version.bmcInfo", "vim25", "f831D878E", 0, "vim25") AddVersion("vim.version.disabled", "vim25", "", 0, "vim25") AddVersion("vim.version.VCSOF_173", "vim25", "f406FC7D9", 0, "vim25") AddVersion("vim.version.pciSriovExtendedID", "vim25", "f53821AC6", 0, "vim25") AddVersion("vim.version.vmxnet3UPT", "vim25", "fE470178C", 0, "vim25") AddVersion("vim.version.version10", "vim25", "6.0", 0, "vim25") AddVersion("vim.version.version11", "vim25", "6.5", 0, "vim25") AddVersion("vim.version.VMcrypt_IntegrityProtection", "vim25", "fBB84F690", 0, "vim25") AddVersion("vim.version.version12", "vim25", "6.7", 0, "vim25") AddVersion("vim.version.pciDeviceExt", "vim25", "f699DCD5E", 0, "vim25") AddVersion("vim.version.toolsOffHost", "vim25", "fAE457D22", 0, "vim25") AddVersion("vim.version.vHT", "vim25", "fC3B65CB2", 0, "vim25") AddVersion("vim.version.ClusterConfigManagerV2", "vim25", "fA6EE2176", 0, "vim25") AddVersion("vim.version.smartnic_network", "vim25", "f448DA156", 0, "vim25") AddVersion("vim.version.hostCertificateManagement", "vim25", "fAFBCB321", 0, "vim25") AddVersion("vim.version.DVX", "vim25", "f0B22FAE9", 0, "vim25") AddVersion("vim.version.vmMisc", "vim25", "fC7DFE6E3", 0, "vim25") AddVersion("vim.version.VLCM_QuickLaunchPreload", "vim25", "f96E2342C", 0, "vim25") AddVersion("vim.version.VmcExternalStorageSupport", "vim25", "fEA84CCC2", 0, "vim25") AddVersion("vim.version.LSI2PVSCSI", "vim25", "f2EC4DE1F", 0, "vim25") AddVersion("vim.version.ProvisioningEventRefresh", "vim25", "f37095AFE", 0, "vim25") AddVersion("vim.version.PodVMOnVDS", "vim25", "f7304027C", 0, "vim25") AddVersion("vim.version.ClusterConfigManagerTransition", "vim25", "f9D1A1F53", 0, "vim25") AddVersion("vim.version.LiveUpdate", "vim25", "fF2990246", 0, "vim25") AddVersion("vim.version.OVF_SINGLEDEPLOY_API", "vim25", "fD25CEA44", 0, "vim25") AddVersion("vim.version.v7_0_2_0", "vim25", "7.0.2.0", 0, "vim25") AddVersion("vim.version.pr1803450", "vim25", "f1580B34E", 0, "vim25") AddVersion("vim.version.v7_0_2_1", "vim25", "7.0.2.1", 0, "vim25") AddVersion("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim25", "f13DC0E3A", 0, "vim25") AddVersion("vim.version.VMcrypt_OnlineVMEncryption", "vim25", "f5A633B8D", 0, "vim25") AddVersion("vim.version.FT_VBS_SUPPORT", "vim25", "f8707D5C3", 0, "vim25") AddVersion("vim.version.WCP_FaultDomains", "vim25", "f00EB2E84", 0, "vim25") AddVersion("vim.version.VMcrypt3_KeyCustomAttribute", "vim25", "fBB40D40F", 0, "vim25") AddVersion("vim.version.dnd", "vim25", "f60B92C9A", 0, "vim25") AddVersion("vim.version.optional_virtual_disks", "vim25", "f3E20532F", 0, "vim25") AddVersion("vim.version.hwh2_0", "vim25", "f238D0B36", 0, "vim25") AddVersion("vim.version.v7_0_1_0", "vim25", "7.0.1.0", 0, "vim25") AddVersion("vim.version.E2ENativeNVMeSupport", "vim25", "fBA12D7EA", 0, "vim25") AddVersion("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim25", "f2EFE8B5B", 0, "vim25") AddVersion("vim.version.EventsOrdering", "vim25", "f90887BF7", 0, "vim25") AddVersion("vim.version.FCD_PERFORMANCE", "vim25", "f28CFD3D4", 0, "vim25") AddVersion("vim.version.PMemV2", "vim25", "fC83B16A4", 0, "vim25") AddVersion("vim.version.v7_0", "vim25", "7.0.0.0", 0, "vim25") AddVersion("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim25", "f5930569F", 0, "vim25") AddVersion("vim.version.nsx_uens_u2", "vim25", "fE414ECF0", 0, "vim25") AddVersionParent("vmodl.query.version.version4", "vmodl.query.version.version4") AddVersionParent("vmodl.query.version.version4", "vmodl.query.version.version3") AddVersionParent("vmodl.query.version.version4", "vmodl.query.version.version2") AddVersionParent("vmodl.query.version.version4", "vmodl.query.version.version1") AddVersionParent("vmodl.query.version.version4", "vmodl.version.version0") AddVersionParent("vmodl.query.version.version4", "vmodl.version.version1") AddVersionParent("vmodl.query.version.version4", "vmodl.version.version2") AddVersionParent("vmodl.query.version.version3", "vmodl.query.version.version3") AddVersionParent("vmodl.query.version.version3", "vmodl.query.version.version2") AddVersionParent("vmodl.query.version.version3", "vmodl.query.version.version1") AddVersionParent("vmodl.query.version.version3", "vmodl.version.version0") AddVersionParent("vmodl.query.version.version3", "vmodl.version.version1") AddVersionParent("vmodl.query.version.version2", "vmodl.query.version.version2") AddVersionParent("vmodl.query.version.version2", "vmodl.query.version.version1") AddVersionParent("vmodl.query.version.version2", "vmodl.version.version0") AddVersionParent("vmodl.query.version.version2", "vmodl.version.version1") AddVersionParent("vmodl.query.version.version1", "vmodl.query.version.version1") AddVersionParent("vmodl.query.version.version1", "vmodl.version.version0") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.query.version.version4") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.query.version.version3") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.query.version.version2") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.query.version.version1") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.pcieHotPlugOfFPT") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version8") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version9") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version6") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version7") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version1") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_1_1") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version4") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version5") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version2") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version3") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.version.version0") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.version.version1") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.version.version2") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v6_9_1") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_0_2") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v6_8_7") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.reflect.version.version1") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vmodl.reflect.version.version2") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v8_0_0_0") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_3_1") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_3_2") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_3_0") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version13") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version14") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version15") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version10") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version11") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.version12") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_2_0") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_2_1") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0_1_0") AddVersionParent("vim.version.pcieHotPlugOfFPT", "vim.version.v7_0") AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.query.version.version4") AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.query.version.version3") AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.query.version.version2") AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.query.version.version1") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.FileLockInfo_GSS34") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version8") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version9") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version6") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version7") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version1") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_1_1") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version4") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version5") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version2") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version3") AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.version.version0") AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.version.version1") AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.version.version2") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v6_9_1") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_0_2") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v6_8_7") AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.reflect.version.version1") AddVersionParent("vim.version.FileLockInfo_GSS34", "vmodl.reflect.version.version2") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v8_0_0_0") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_3_1") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_3_2") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_3_0") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version13") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version14") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version15") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version10") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version11") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.version12") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_2_0") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_2_1") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0_1_0") AddVersionParent("vim.version.FileLockInfo_GSS34", "vim.version.v7_0") AddVersionParent("vim.version.pr1429825", "vmodl.query.version.version4") AddVersionParent("vim.version.pr1429825", "vmodl.query.version.version3") AddVersionParent("vim.version.pr1429825", "vmodl.query.version.version2") AddVersionParent("vim.version.pr1429825", "vmodl.query.version.version1") AddVersionParent("vim.version.pr1429825", "vim.version.pr1429825") AddVersionParent("vim.version.pr1429825", "vim.version.version8") AddVersionParent("vim.version.pr1429825", "vim.version.version9") AddVersionParent("vim.version.pr1429825", "vim.version.version6") AddVersionParent("vim.version.pr1429825", "vim.version.version7") AddVersionParent("vim.version.pr1429825", "vim.version.version1") AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_1_1") AddVersionParent("vim.version.pr1429825", "vim.version.version4") AddVersionParent("vim.version.pr1429825", "vim.version.version5") AddVersionParent("vim.version.pr1429825", "vim.version.version2") AddVersionParent("vim.version.pr1429825", "vim.version.version3") AddVersionParent("vim.version.pr1429825", "vmodl.version.version0") AddVersionParent("vim.version.pr1429825", "vmodl.version.version1") AddVersionParent("vim.version.pr1429825", "vmodl.version.version2") AddVersionParent("vim.version.pr1429825", "vim.version.v6_9_1") AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_0_2") AddVersionParent("vim.version.pr1429825", "vim.version.v6_8_7") AddVersionParent("vim.version.pr1429825", "vmodl.reflect.version.version1") AddVersionParent("vim.version.pr1429825", "vmodl.reflect.version.version2") AddVersionParent("vim.version.pr1429825", "vim.version.v8_0_0_0") AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_3_1") AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_3_2") AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_3_0") AddVersionParent("vim.version.pr1429825", "vim.version.version13") AddVersionParent("vim.version.pr1429825", "vim.version.version14") AddVersionParent("vim.version.pr1429825", "vim.version.version15") AddVersionParent("vim.version.pr1429825", "vim.version.version10") AddVersionParent("vim.version.pr1429825", "vim.version.version11") AddVersionParent("vim.version.pr1429825", "vim.version.version12") AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_2_0") AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_2_1") AddVersionParent("vim.version.pr1429825", "vim.version.v7_0_1_0") AddVersionParent("vim.version.pr1429825", "vim.version.v7_0") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version4") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version3") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version2") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.query.version.version1") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version8") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version9") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version6") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version7") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version1") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version4") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version5") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version2") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version3") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.version.version0") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.version.version1") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.version.version2") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v6_9_1") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v6_8_7") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version13") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version14") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version15") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version10") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version11") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.version12") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT", "vim.version.v7_0") AddVersionParent("vim.version.MemoryTiering", "vmodl.query.version.version4") AddVersionParent("vim.version.MemoryTiering", "vmodl.query.version.version3") AddVersionParent("vim.version.MemoryTiering", "vmodl.query.version.version2") AddVersionParent("vim.version.MemoryTiering", "vmodl.query.version.version1") AddVersionParent("vim.version.MemoryTiering", "vim.version.MemoryTiering") AddVersionParent("vim.version.MemoryTiering", "vim.version.version8") AddVersionParent("vim.version.MemoryTiering", "vim.version.version9") AddVersionParent("vim.version.MemoryTiering", "vim.version.version6") AddVersionParent("vim.version.MemoryTiering", "vim.version.version7") AddVersionParent("vim.version.MemoryTiering", "vim.version.version1") AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_1_1") AddVersionParent("vim.version.MemoryTiering", "vim.version.version4") AddVersionParent("vim.version.MemoryTiering", "vim.version.version5") AddVersionParent("vim.version.MemoryTiering", "vim.version.version2") AddVersionParent("vim.version.MemoryTiering", "vim.version.version3") AddVersionParent("vim.version.MemoryTiering", "vmodl.version.version0") AddVersionParent("vim.version.MemoryTiering", "vmodl.version.version1") AddVersionParent("vim.version.MemoryTiering", "vmodl.version.version2") AddVersionParent("vim.version.MemoryTiering", "vim.version.v6_9_1") AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_0_2") AddVersionParent("vim.version.MemoryTiering", "vim.version.v6_8_7") AddVersionParent("vim.version.MemoryTiering", "vmodl.reflect.version.version1") AddVersionParent("vim.version.MemoryTiering", "vmodl.reflect.version.version2") AddVersionParent("vim.version.MemoryTiering", "vim.version.v8_0_0_0") AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_3_1") AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_3_2") AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_3_0") AddVersionParent("vim.version.MemoryTiering", "vim.version.version13") AddVersionParent("vim.version.MemoryTiering", "vim.version.version14") AddVersionParent("vim.version.MemoryTiering", "vim.version.version15") AddVersionParent("vim.version.MemoryTiering", "vim.version.version10") AddVersionParent("vim.version.MemoryTiering", "vim.version.version11") AddVersionParent("vim.version.MemoryTiering", "vim.version.version12") AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_2_0") AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_2_1") AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0_1_0") AddVersionParent("vim.version.MemoryTiering", "vim.version.v7_0") AddVersionParent("vim.version.version8", "vmodl.query.version.version4") AddVersionParent("vim.version.version8", "vmodl.query.version.version3") AddVersionParent("vim.version.version8", "vmodl.query.version.version2") AddVersionParent("vim.version.version8", "vmodl.query.version.version1") AddVersionParent("vim.version.version8", "vim.version.version8") AddVersionParent("vim.version.version8", "vim.version.version6") AddVersionParent("vim.version.version8", "vim.version.version7") AddVersionParent("vim.version.version8", "vim.version.version1") AddVersionParent("vim.version.version8", "vim.version.version4") AddVersionParent("vim.version.version8", "vim.version.version5") AddVersionParent("vim.version.version8", "vim.version.version2") AddVersionParent("vim.version.version8", "vim.version.version3") AddVersionParent("vim.version.version8", "vmodl.version.version0") AddVersionParent("vim.version.version8", "vmodl.version.version1") AddVersionParent("vim.version.version8", "vmodl.version.version2") AddVersionParent("vim.version.version8", "vmodl.reflect.version.version1") AddVersionParent("vim.version.version9", "vmodl.query.version.version4") AddVersionParent("vim.version.version9", "vmodl.query.version.version3") AddVersionParent("vim.version.version9", "vmodl.query.version.version2") AddVersionParent("vim.version.version9", "vmodl.query.version.version1") AddVersionParent("vim.version.version9", "vim.version.version8") AddVersionParent("vim.version.version9", "vim.version.version9") AddVersionParent("vim.version.version9", "vim.version.version6") AddVersionParent("vim.version.version9", "vim.version.version7") AddVersionParent("vim.version.version9", "vim.version.version1") AddVersionParent("vim.version.version9", "vim.version.version4") AddVersionParent("vim.version.version9", "vim.version.version5") AddVersionParent("vim.version.version9", "vim.version.version2") AddVersionParent("vim.version.version9", "vim.version.version3") AddVersionParent("vim.version.version9", "vmodl.version.version0") AddVersionParent("vim.version.version9", "vmodl.version.version1") AddVersionParent("vim.version.version9", "vmodl.version.version2") AddVersionParent("vim.version.version9", "vmodl.reflect.version.version1") AddVersionParent("vim.version.version6", "vmodl.query.version.version3") AddVersionParent("vim.version.version6", "vmodl.query.version.version2") AddVersionParent("vim.version.version6", "vmodl.query.version.version1") AddVersionParent("vim.version.version6", "vim.version.version6") AddVersionParent("vim.version.version6", "vim.version.version1") AddVersionParent("vim.version.version6", "vim.version.version4") AddVersionParent("vim.version.version6", "vim.version.version5") AddVersionParent("vim.version.version6", "vim.version.version2") AddVersionParent("vim.version.version6", "vim.version.version3") AddVersionParent("vim.version.version6", "vmodl.version.version0") AddVersionParent("vim.version.version6", "vmodl.version.version1") AddVersionParent("vim.version.version7", "vmodl.query.version.version4") AddVersionParent("vim.version.version7", "vmodl.query.version.version3") AddVersionParent("vim.version.version7", "vmodl.query.version.version2") AddVersionParent("vim.version.version7", "vmodl.query.version.version1") AddVersionParent("vim.version.version7", "vim.version.version6") AddVersionParent("vim.version.version7", "vim.version.version7") AddVersionParent("vim.version.version7", "vim.version.version1") AddVersionParent("vim.version.version7", "vim.version.version4") AddVersionParent("vim.version.version7", "vim.version.version5") AddVersionParent("vim.version.version7", "vim.version.version2") AddVersionParent("vim.version.version7", "vim.version.version3") AddVersionParent("vim.version.version7", "vmodl.version.version0") AddVersionParent("vim.version.version7", "vmodl.version.version1") AddVersionParent("vim.version.version7", "vmodl.version.version2") AddVersionParent("vim.version.version7", "vmodl.reflect.version.version1") AddVersionParent("vim.version.DiskGroupVMC", "vmodl.query.version.version4") AddVersionParent("vim.version.DiskGroupVMC", "vmodl.query.version.version3") AddVersionParent("vim.version.DiskGroupVMC", "vmodl.query.version.version2") AddVersionParent("vim.version.DiskGroupVMC", "vmodl.query.version.version1") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version8") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version9") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version6") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version7") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.DiskGroupVMC") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version1") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_1_1") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version4") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version5") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version2") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version3") AddVersionParent("vim.version.DiskGroupVMC", "vmodl.version.version0") AddVersionParent("vim.version.DiskGroupVMC", "vmodl.version.version1") AddVersionParent("vim.version.DiskGroupVMC", "vmodl.version.version2") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v6_9_1") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_0_2") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v6_8_7") AddVersionParent("vim.version.DiskGroupVMC", "vmodl.reflect.version.version1") AddVersionParent("vim.version.DiskGroupVMC", "vmodl.reflect.version.version2") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v8_0_0_0") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_3_1") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_3_2") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_3_0") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version13") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version14") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version15") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version10") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version11") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.version12") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_2_0") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_2_1") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0_1_0") AddVersionParent("vim.version.DiskGroupVMC", "vim.version.v7_0") AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.query.version.version4") AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.query.version.version3") AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.query.version.version2") AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.query.version.version1") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version8") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version9") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version6") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version7") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.TrustAuthority_V4") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version1") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_1_1") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version4") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version5") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version2") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version3") AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.version.version0") AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.version.version1") AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.version.version2") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v6_9_1") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_0_2") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v6_8_7") AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.reflect.version.version1") AddVersionParent("vim.version.TrustAuthority_V4", "vmodl.reflect.version.version2") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v8_0_0_0") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_3_1") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_3_2") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_3_0") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version13") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version14") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version15") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version10") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version11") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.version12") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_2_0") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_2_1") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0_1_0") AddVersionParent("vim.version.TrustAuthority_V4", "vim.version.v7_0") AddVersionParent("vim.version.version1", "vmodl.query.version.version1") AddVersionParent("vim.version.version1", "vim.version.version1") AddVersionParent("vim.version.version1", "vmodl.version.version0") AddVersionParent("vim.version.v7_0_1_1", "vmodl.query.version.version4") AddVersionParent("vim.version.v7_0_1_1", "vmodl.query.version.version3") AddVersionParent("vim.version.v7_0_1_1", "vmodl.query.version.version2") AddVersionParent("vim.version.v7_0_1_1", "vmodl.query.version.version1") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version8") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version9") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version6") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version7") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version1") AddVersionParent("vim.version.v7_0_1_1", "vim.version.v7_0_1_1") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version4") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version5") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version2") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version3") AddVersionParent("vim.version.v7_0_1_1", "vmodl.version.version0") AddVersionParent("vim.version.v7_0_1_1", "vmodl.version.version1") AddVersionParent("vim.version.v7_0_1_1", "vmodl.version.version2") AddVersionParent("vim.version.v7_0_1_1", "vim.version.v6_9_1") AddVersionParent("vim.version.v7_0_1_1", "vim.version.v7_0_0_2") AddVersionParent("vim.version.v7_0_1_1", "vim.version.v6_8_7") AddVersionParent("vim.version.v7_0_1_1", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v7_0_1_1", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version13") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version14") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version15") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version10") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version11") AddVersionParent("vim.version.v7_0_1_1", "vim.version.version12") AddVersionParent("vim.version.v7_0_1_1", "vim.version.v7_0_1_0") AddVersionParent("vim.version.v7_0_1_1", "vim.version.v7_0") AddVersionParent("vim.version.version4", "vmodl.query.version.version1") AddVersionParent("vim.version.version4", "vim.version.version1") AddVersionParent("vim.version.version4", "vim.version.version4") AddVersionParent("vim.version.version4", "vim.version.version2") AddVersionParent("vim.version.version4", "vim.version.version3") AddVersionParent("vim.version.version4", "vmodl.version.version0") AddVersionParent("vim.version.version5", "vmodl.query.version.version2") AddVersionParent("vim.version.version5", "vmodl.query.version.version1") AddVersionParent("vim.version.version5", "vim.version.version1") AddVersionParent("vim.version.version5", "vim.version.version4") AddVersionParent("vim.version.version5", "vim.version.version5") AddVersionParent("vim.version.version5", "vim.version.version2") AddVersionParent("vim.version.version5", "vim.version.version3") AddVersionParent("vim.version.version5", "vmodl.version.version0") AddVersionParent("vim.version.version5", "vmodl.version.version1") AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.query.version.version4") AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.query.version.version3") AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.query.version.version2") AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.query.version.version1") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version8") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version9") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version6") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version7") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version1") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_1_1") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version4") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version5") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.SRIOVValidNumVFs") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version2") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version3") AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.version.version0") AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.version.version1") AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.version.version2") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v6_9_1") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_0_2") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v6_8_7") AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.reflect.version.version1") AddVersionParent("vim.version.SRIOVValidNumVFs", "vmodl.reflect.version.version2") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v8_0_0_0") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_3_1") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_3_2") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_3_0") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version13") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version14") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version15") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version10") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version11") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.version12") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_2_0") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_2_1") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0_1_0") AddVersionParent("vim.version.SRIOVValidNumVFs", "vim.version.v7_0") AddVersionParent("vim.version.version2", "vmodl.query.version.version1") AddVersionParent("vim.version.version2", "vim.version.version1") AddVersionParent("vim.version.version2", "vim.version.version2") AddVersionParent("vim.version.version2", "vmodl.version.version0") AddVersionParent("vim.version.version3", "vmodl.query.version.version1") AddVersionParent("vim.version.version3", "vim.version.version1") AddVersionParent("vim.version.version3", "vim.version.version2") AddVersionParent("vim.version.version3", "vim.version.version3") AddVersionParent("vim.version.version3", "vmodl.version.version0") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.query.version.version4") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.query.version.version3") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.query.version.version2") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.query.version.version1") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version8") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version9") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version6") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version7") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version1") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_1_1") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version4") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version5") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version2") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version3") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.FCD_VRA_SUPPORT") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.version.version0") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.version.version1") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.version.version2") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v6_9_1") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_0_2") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v6_8_7") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.reflect.version.version1") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vmodl.reflect.version.version2") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v8_0_0_0") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_3_1") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_3_2") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_3_0") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version13") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version14") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version15") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version10") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version11") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.version12") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_2_0") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_2_1") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0_1_0") AddVersionParent("vim.version.FCD_VRA_SUPPORT", "vim.version.v7_0") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.query.version.version4") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.query.version.version3") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.query.version.version2") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.query.version.version1") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version8") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version9") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version6") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version7") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version1") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_1_1") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version4") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version5") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version2") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version3") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.NDU_VSPHERE_HA_INTEGRATION") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.version.version0") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.version.version1") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.version.version2") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v6_9_1") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_0_2") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v6_8_7") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.reflect.version.version1") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vmodl.reflect.version.version2") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v8_0_0_0") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_3_1") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_3_2") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_3_0") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version13") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version14") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version15") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version10") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version11") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.version12") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_2_0") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_2_1") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0_1_0") AddVersionParent("vim.version.NDU_VSPHERE_HA_INTEGRATION", "vim.version.v7_0") AddVersionParent("vim.version.guestDetailedData", "vmodl.query.version.version4") AddVersionParent("vim.version.guestDetailedData", "vmodl.query.version.version3") AddVersionParent("vim.version.guestDetailedData", "vmodl.query.version.version2") AddVersionParent("vim.version.guestDetailedData", "vmodl.query.version.version1") AddVersionParent("vim.version.guestDetailedData", "vim.version.version8") AddVersionParent("vim.version.guestDetailedData", "vim.version.version9") AddVersionParent("vim.version.guestDetailedData", "vim.version.version6") AddVersionParent("vim.version.guestDetailedData", "vim.version.version7") AddVersionParent("vim.version.guestDetailedData", "vim.version.version1") AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_1_1") AddVersionParent("vim.version.guestDetailedData", "vim.version.version4") AddVersionParent("vim.version.guestDetailedData", "vim.version.version5") AddVersionParent("vim.version.guestDetailedData", "vim.version.version2") AddVersionParent("vim.version.guestDetailedData", "vim.version.version3") AddVersionParent("vim.version.guestDetailedData", "vim.version.guestDetailedData") AddVersionParent("vim.version.guestDetailedData", "vmodl.version.version0") AddVersionParent("vim.version.guestDetailedData", "vmodl.version.version1") AddVersionParent("vim.version.guestDetailedData", "vmodl.version.version2") AddVersionParent("vim.version.guestDetailedData", "vim.version.v6_9_1") AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_0_2") AddVersionParent("vim.version.guestDetailedData", "vim.version.v6_8_7") AddVersionParent("vim.version.guestDetailedData", "vmodl.reflect.version.version1") AddVersionParent("vim.version.guestDetailedData", "vmodl.reflect.version.version2") AddVersionParent("vim.version.guestDetailedData", "vim.version.v8_0_0_0") AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_3_1") AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_3_2") AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_3_0") AddVersionParent("vim.version.guestDetailedData", "vim.version.version13") AddVersionParent("vim.version.guestDetailedData", "vim.version.version14") AddVersionParent("vim.version.guestDetailedData", "vim.version.version15") AddVersionParent("vim.version.guestDetailedData", "vim.version.version10") AddVersionParent("vim.version.guestDetailedData", "vim.version.version11") AddVersionParent("vim.version.guestDetailedData", "vim.version.version12") AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_2_0") AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_2_1") AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0_1_0") AddVersionParent("vim.version.guestDetailedData", "vim.version.v7_0") AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.query.version.version4") AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.query.version.version3") AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.query.version.version2") AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.query.version.version1") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version8") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version9") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version6") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version7") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version1") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version4") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version5") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version2") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version3") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.VmxRebootPowerOff") AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.version.version0") AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.version.version1") AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.version.version2") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v6_9_1") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v6_8_7") AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VmxRebootPowerOff", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version13") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version14") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version15") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version10") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version11") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.version12") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VmxRebootPowerOff", "vim.version.v7_0") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.query.version.version4") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.query.version.version3") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.query.version.version2") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.query.version.version1") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version8") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version9") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version6") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version7") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version1") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_1_1") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version4") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version5") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version2") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version3") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.SGX_MPA_VMCheck") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.version.version0") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.version.version1") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.version.version2") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v6_9_1") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_0_2") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v6_8_7") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.reflect.version.version1") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vmodl.reflect.version.version2") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v8_0_0_0") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_3_1") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_3_2") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_3_0") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version13") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version14") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version15") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version10") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version11") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.version12") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_2_0") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_2_1") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0_1_0") AddVersionParent("vim.version.SGX_MPA_VMCheck", "vim.version.v7_0") AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.query.version.version4") AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.query.version.version3") AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.query.version.version2") AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.query.version.version1") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version8") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version9") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version6") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version7") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version1") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_1_1") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version4") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version5") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version2") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version3") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.ClusteredEsx_V1") AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.version.version0") AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.version.version1") AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.version.version2") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v6_9_1") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_0_2") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v6_8_7") AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.reflect.version.version1") AddVersionParent("vim.version.ClusteredEsx_V1", "vmodl.reflect.version.version2") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v8_0_0_0") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_3_1") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_3_2") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_3_0") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version13") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version14") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version15") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version10") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version11") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.version12") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_2_0") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_2_1") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0_1_0") AddVersionParent("vim.version.ClusteredEsx_V1", "vim.version.v7_0") AddVersionParent("vim.version.vdcs", "vmodl.query.version.version4") AddVersionParent("vim.version.vdcs", "vmodl.query.version.version3") AddVersionParent("vim.version.vdcs", "vmodl.query.version.version2") AddVersionParent("vim.version.vdcs", "vmodl.query.version.version1") AddVersionParent("vim.version.vdcs", "vim.version.version8") AddVersionParent("vim.version.vdcs", "vim.version.version9") AddVersionParent("vim.version.vdcs", "vim.version.version6") AddVersionParent("vim.version.vdcs", "vim.version.version7") AddVersionParent("vim.version.vdcs", "vim.version.version1") AddVersionParent("vim.version.vdcs", "vim.version.v7_0_1_1") AddVersionParent("vim.version.vdcs", "vim.version.version4") AddVersionParent("vim.version.vdcs", "vim.version.version5") AddVersionParent("vim.version.vdcs", "vim.version.version2") AddVersionParent("vim.version.vdcs", "vim.version.version3") AddVersionParent("vim.version.vdcs", "vim.version.vdcs") AddVersionParent("vim.version.vdcs", "vmodl.version.version0") AddVersionParent("vim.version.vdcs", "vmodl.version.version1") AddVersionParent("vim.version.vdcs", "vmodl.version.version2") AddVersionParent("vim.version.vdcs", "vim.version.v6_9_1") AddVersionParent("vim.version.vdcs", "vim.version.v7_0_0_2") AddVersionParent("vim.version.vdcs", "vim.version.v6_8_7") AddVersionParent("vim.version.vdcs", "vmodl.reflect.version.version1") AddVersionParent("vim.version.vdcs", "vmodl.reflect.version.version2") AddVersionParent("vim.version.vdcs", "vim.version.v8_0_0_0") AddVersionParent("vim.version.vdcs", "vim.version.v7_0_3_1") AddVersionParent("vim.version.vdcs", "vim.version.v7_0_3_2") AddVersionParent("vim.version.vdcs", "vim.version.v7_0_3_0") AddVersionParent("vim.version.vdcs", "vim.version.version13") AddVersionParent("vim.version.vdcs", "vim.version.version14") AddVersionParent("vim.version.vdcs", "vim.version.version15") AddVersionParent("vim.version.vdcs", "vim.version.version10") AddVersionParent("vim.version.vdcs", "vim.version.version11") AddVersionParent("vim.version.vdcs", "vim.version.version12") AddVersionParent("vim.version.vdcs", "vim.version.v7_0_2_0") AddVersionParent("vim.version.vdcs", "vim.version.v7_0_2_1") AddVersionParent("vim.version.vdcs", "vim.version.v7_0_1_0") AddVersionParent("vim.version.vdcs", "vim.version.v7_0") AddVersionParent("vmodl.version.version0", "vmodl.version.version0") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.query.version.version4") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.query.version.version3") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.query.version.version2") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.query.version.version1") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version8") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version9") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version6") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version7") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version1") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_1_1") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version4") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version5") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version2") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version3") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.version.version0") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.NFS_VMKPORTBIND") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.version.version1") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.version.version2") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v6_9_1") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_0_2") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v6_8_7") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.reflect.version.version1") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vmodl.reflect.version.version2") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v8_0_0_0") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_3_1") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_3_2") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_3_0") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version13") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version14") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version15") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version10") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version11") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.version12") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_2_0") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_2_1") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0_1_0") AddVersionParent("vim.version.NFS_VMKPORTBIND", "vim.version.v7_0") AddVersionParent("vmodl.version.version1", "vmodl.version.version0") AddVersionParent("vmodl.version.version1", "vmodl.version.version1") AddVersionParent("vmodl.version.version2", "vmodl.version.version0") AddVersionParent("vmodl.version.version2", "vmodl.version.version1") AddVersionParent("vmodl.version.version2", "vmodl.version.version2") AddVersionParent("vim.version.v6_9_1", "vmodl.query.version.version4") AddVersionParent("vim.version.v6_9_1", "vmodl.query.version.version3") AddVersionParent("vim.version.v6_9_1", "vmodl.query.version.version2") AddVersionParent("vim.version.v6_9_1", "vmodl.query.version.version1") AddVersionParent("vim.version.v6_9_1", "vim.version.version8") AddVersionParent("vim.version.v6_9_1", "vim.version.version9") AddVersionParent("vim.version.v6_9_1", "vim.version.version6") AddVersionParent("vim.version.v6_9_1", "vim.version.version7") AddVersionParent("vim.version.v6_9_1", "vim.version.version1") AddVersionParent("vim.version.v6_9_1", "vim.version.version4") AddVersionParent("vim.version.v6_9_1", "vim.version.version5") AddVersionParent("vim.version.v6_9_1", "vim.version.version2") AddVersionParent("vim.version.v6_9_1", "vim.version.version3") AddVersionParent("vim.version.v6_9_1", "vmodl.version.version0") AddVersionParent("vim.version.v6_9_1", "vmodl.version.version1") AddVersionParent("vim.version.v6_9_1", "vmodl.version.version2") AddVersionParent("vim.version.v6_9_1", "vim.version.v6_9_1") AddVersionParent("vim.version.v6_9_1", "vim.version.v6_8_7") AddVersionParent("vim.version.v6_9_1", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v6_9_1", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v6_9_1", "vim.version.version13") AddVersionParent("vim.version.v6_9_1", "vim.version.version14") AddVersionParent("vim.version.v6_9_1", "vim.version.version15") AddVersionParent("vim.version.v6_9_1", "vim.version.version10") AddVersionParent("vim.version.v6_9_1", "vim.version.version11") AddVersionParent("vim.version.v6_9_1", "vim.version.version12") AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.query.version.version4") AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.query.version.version3") AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.query.version.version2") AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.query.version.version1") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version8") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version9") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version6") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version7") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version1") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_1_1") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version4") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version5") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version2") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version3") AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.version.version0") AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.version.version1") AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.version.version2") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v6_9_1") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.fourKnStorageSupport") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_0_2") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v6_8_7") AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.reflect.version.version1") AddVersionParent("vim.version.fourKnStorageSupport", "vmodl.reflect.version.version2") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v8_0_0_0") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_3_1") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_3_2") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_3_0") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version13") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version14") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version15") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version10") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version11") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.version12") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_2_0") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_2_1") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0_1_0") AddVersionParent("vim.version.fourKnStorageSupport", "vim.version.v7_0") AddVersionParent("vim.version.batchRenameSupport", "vmodl.query.version.version4") AddVersionParent("vim.version.batchRenameSupport", "vmodl.query.version.version3") AddVersionParent("vim.version.batchRenameSupport", "vmodl.query.version.version2") AddVersionParent("vim.version.batchRenameSupport", "vmodl.query.version.version1") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version8") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version9") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version6") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version7") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version1") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_1_1") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version4") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version5") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version2") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version3") AddVersionParent("vim.version.batchRenameSupport", "vmodl.version.version0") AddVersionParent("vim.version.batchRenameSupport", "vmodl.version.version1") AddVersionParent("vim.version.batchRenameSupport", "vmodl.version.version2") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v6_9_1") AddVersionParent("vim.version.batchRenameSupport", "vim.version.batchRenameSupport") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_0_2") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v6_8_7") AddVersionParent("vim.version.batchRenameSupport", "vmodl.reflect.version.version1") AddVersionParent("vim.version.batchRenameSupport", "vmodl.reflect.version.version2") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v8_0_0_0") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_3_1") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_3_2") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_3_0") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version13") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version14") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version15") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version10") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version11") AddVersionParent("vim.version.batchRenameSupport", "vim.version.version12") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_2_0") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_2_1") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0_1_0") AddVersionParent("vim.version.batchRenameSupport", "vim.version.v7_0") AddVersionParent("vim.version.resetportstatistics", "vmodl.query.version.version4") AddVersionParent("vim.version.resetportstatistics", "vmodl.query.version.version3") AddVersionParent("vim.version.resetportstatistics", "vmodl.query.version.version2") AddVersionParent("vim.version.resetportstatistics", "vmodl.query.version.version1") AddVersionParent("vim.version.resetportstatistics", "vim.version.version8") AddVersionParent("vim.version.resetportstatistics", "vim.version.version9") AddVersionParent("vim.version.resetportstatistics", "vim.version.version6") AddVersionParent("vim.version.resetportstatistics", "vim.version.version7") AddVersionParent("vim.version.resetportstatistics", "vim.version.version1") AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_1_1") AddVersionParent("vim.version.resetportstatistics", "vim.version.version4") AddVersionParent("vim.version.resetportstatistics", "vim.version.version5") AddVersionParent("vim.version.resetportstatistics", "vim.version.version2") AddVersionParent("vim.version.resetportstatistics", "vim.version.version3") AddVersionParent("vim.version.resetportstatistics", "vmodl.version.version0") AddVersionParent("vim.version.resetportstatistics", "vmodl.version.version1") AddVersionParent("vim.version.resetportstatistics", "vmodl.version.version2") AddVersionParent("vim.version.resetportstatistics", "vim.version.v6_9_1") AddVersionParent("vim.version.resetportstatistics", "vim.version.resetportstatistics") AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_0_2") AddVersionParent("vim.version.resetportstatistics", "vim.version.v6_8_7") AddVersionParent("vim.version.resetportstatistics", "vmodl.reflect.version.version1") AddVersionParent("vim.version.resetportstatistics", "vmodl.reflect.version.version2") AddVersionParent("vim.version.resetportstatistics", "vim.version.v8_0_0_0") AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_3_1") AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_3_2") AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_3_0") AddVersionParent("vim.version.resetportstatistics", "vim.version.version13") AddVersionParent("vim.version.resetportstatistics", "vim.version.version14") AddVersionParent("vim.version.resetportstatistics", "vim.version.version15") AddVersionParent("vim.version.resetportstatistics", "vim.version.version10") AddVersionParent("vim.version.resetportstatistics", "vim.version.version11") AddVersionParent("vim.version.resetportstatistics", "vim.version.version12") AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_2_0") AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_2_1") AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0_1_0") AddVersionParent("vim.version.resetportstatistics", "vim.version.v7_0") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.query.version.version4") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.query.version.version3") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.query.version.version2") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.query.version.version1") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version8") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version9") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version6") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version7") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version1") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version4") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version5") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version2") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version3") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.version.version0") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.version.version1") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.version.version2") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v6_9_1") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.VM_CLONE_SWITCH_HOST_EMM") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v6_8_7") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version13") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version14") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version15") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version10") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version11") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.version12") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VM_CLONE_SWITCH_HOST_EMM", "vim.version.v7_0") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.query.version.version4") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.query.version.version3") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.query.version.version2") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.query.version.version1") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version8") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version9") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version6") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version7") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version1") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version4") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version5") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version2") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version3") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.version.version0") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.version.version1") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.version.version2") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v6_9_1") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.VMC_NFS_SUPPORT") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v6_8_7") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version13") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version14") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version15") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version10") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version11") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.version12") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VMC_NFS_SUPPORT", "vim.version.v7_0") AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.query.version.version4") AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.query.version.version3") AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.query.version.version2") AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.query.version.version1") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version8") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version9") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version6") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version7") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version1") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_1_1") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version4") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version5") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version2") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version3") AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.version.version0") AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.version.version1") AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.version.version2") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v6_9_1") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.NsxLiveUpdate") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_0_2") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v6_8_7") AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.reflect.version.version1") AddVersionParent("vim.version.NsxLiveUpdate", "vmodl.reflect.version.version2") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v8_0_0_0") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_3_1") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_3_2") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_3_0") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version13") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version14") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version15") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version10") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version11") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.version12") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.LiveUpdate") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_2_0") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_2_1") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0_1_0") AddVersionParent("vim.version.NsxLiveUpdate", "vim.version.v7_0") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.query.version.version4") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.query.version.version3") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.query.version.version2") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.query.version.version1") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version8") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version9") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version6") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version7") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version1") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version4") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version5") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version2") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version3") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.version.version0") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.version.version1") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.version.version2") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v6_9_1") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.VDS_ReadOnlyDisk") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v6_8_7") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version13") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version14") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version15") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version10") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version11") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.version12") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VDS_ReadOnlyDisk", "vim.version.v7_0") AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.query.version.version4") AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.query.version.version3") AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.query.version.version2") AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.query.version.version1") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version8") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version9") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version6") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version7") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version1") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_1_1") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version4") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version5") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version2") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version3") AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.version.version0") AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.version.version1") AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.version.version2") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v6_9_1") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.SGX_MPA_HostReg") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_0_2") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v6_8_7") AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.reflect.version.version1") AddVersionParent("vim.version.SGX_MPA_HostReg", "vmodl.reflect.version.version2") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v8_0_0_0") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_3_1") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_3_2") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_3_0") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version13") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version14") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version15") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version10") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version11") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.version12") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_2_0") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_2_1") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0_1_0") AddVersionParent("vim.version.SGX_MPA_HostReg", "vim.version.v7_0") AddVersionParent("vim.version.v7_0_0_2", "vmodl.query.version.version4") AddVersionParent("vim.version.v7_0_0_2", "vmodl.query.version.version3") AddVersionParent("vim.version.v7_0_0_2", "vmodl.query.version.version2") AddVersionParent("vim.version.v7_0_0_2", "vmodl.query.version.version1") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version8") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version9") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version6") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version7") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version1") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version4") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version5") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version2") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version3") AddVersionParent("vim.version.v7_0_0_2", "vmodl.version.version0") AddVersionParent("vim.version.v7_0_0_2", "vmodl.version.version1") AddVersionParent("vim.version.v7_0_0_2", "vmodl.version.version2") AddVersionParent("vim.version.v7_0_0_2", "vim.version.v6_9_1") AddVersionParent("vim.version.v7_0_0_2", "vim.version.v7_0_0_2") AddVersionParent("vim.version.v7_0_0_2", "vim.version.v6_8_7") AddVersionParent("vim.version.v7_0_0_2", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v7_0_0_2", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version13") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version14") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version15") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version10") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version11") AddVersionParent("vim.version.v7_0_0_2", "vim.version.version12") AddVersionParent("vim.version.v7_0_0_2", "vim.version.v7_0") AddVersionParent("vim.version.GreenMetrics", "vmodl.query.version.version4") AddVersionParent("vim.version.GreenMetrics", "vmodl.query.version.version3") AddVersionParent("vim.version.GreenMetrics", "vmodl.query.version.version2") AddVersionParent("vim.version.GreenMetrics", "vmodl.query.version.version1") AddVersionParent("vim.version.GreenMetrics", "vim.version.version8") AddVersionParent("vim.version.GreenMetrics", "vim.version.version9") AddVersionParent("vim.version.GreenMetrics", "vim.version.version6") AddVersionParent("vim.version.GreenMetrics", "vim.version.version7") AddVersionParent("vim.version.GreenMetrics", "vim.version.version1") AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_1_1") AddVersionParent("vim.version.GreenMetrics", "vim.version.version4") AddVersionParent("vim.version.GreenMetrics", "vim.version.version5") AddVersionParent("vim.version.GreenMetrics", "vim.version.version2") AddVersionParent("vim.version.GreenMetrics", "vim.version.version3") AddVersionParent("vim.version.GreenMetrics", "vmodl.version.version0") AddVersionParent("vim.version.GreenMetrics", "vmodl.version.version1") AddVersionParent("vim.version.GreenMetrics", "vmodl.version.version2") AddVersionParent("vim.version.GreenMetrics", "vim.version.v6_9_1") AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_0_2") AddVersionParent("vim.version.GreenMetrics", "vim.version.GreenMetrics") AddVersionParent("vim.version.GreenMetrics", "vim.version.v6_8_7") AddVersionParent("vim.version.GreenMetrics", "vmodl.reflect.version.version1") AddVersionParent("vim.version.GreenMetrics", "vmodl.reflect.version.version2") AddVersionParent("vim.version.GreenMetrics", "vim.version.v8_0_0_0") AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_3_1") AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_3_2") AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_3_0") AddVersionParent("vim.version.GreenMetrics", "vim.version.version13") AddVersionParent("vim.version.GreenMetrics", "vim.version.version14") AddVersionParent("vim.version.GreenMetrics", "vim.version.version15") AddVersionParent("vim.version.GreenMetrics", "vim.version.version10") AddVersionParent("vim.version.GreenMetrics", "vim.version.version11") AddVersionParent("vim.version.GreenMetrics", "vim.version.version12") AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_2_0") AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_2_1") AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0_1_0") AddVersionParent("vim.version.GreenMetrics", "vim.version.v7_0") AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.query.version.version4") AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.query.version.version3") AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.query.version.version2") AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.query.version.version1") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version8") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version9") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version6") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version7") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version1") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_1_1") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version4") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version5") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version2") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version3") AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.version.version0") AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.version.version1") AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.version.version2") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v6_9_1") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_0_2") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.vVol_datastore_scalability") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v6_8_7") AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.reflect.version.version1") AddVersionParent("vim.version.vVol_datastore_scalability", "vmodl.reflect.version.version2") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v8_0_0_0") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_3_1") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_3_2") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_3_0") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version13") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version14") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version15") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version10") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version11") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.version12") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_2_0") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_2_1") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0_1_0") AddVersionParent("vim.version.vVol_datastore_scalability", "vim.version.v7_0") AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.query.version.version4") AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.query.version.version3") AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.query.version.version2") AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.query.version.version1") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version8") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version9") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version6") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version7") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version1") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_1_1") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version4") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version5") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version2") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version3") AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.version.version0") AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.version.version1") AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.version.version2") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v6_9_1") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_0_2") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.hostVendorSpecificStatus") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v6_8_7") AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.reflect.version.version1") AddVersionParent("vim.version.hostVendorSpecificStatus", "vmodl.reflect.version.version2") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v8_0_0_0") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_3_1") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_3_2") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_3_0") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version13") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version14") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version15") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version10") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version11") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.version12") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_2_0") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_2_1") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0_1_0") AddVersionParent("vim.version.hostVendorSpecificStatus", "vim.version.v7_0") AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.query.version.version4") AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.query.version.version3") AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.query.version.version2") AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.query.version.version1") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version8") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version9") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version6") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version7") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version1") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_1_1") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version4") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version5") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version2") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version3") AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.version.version0") AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.version.version1") AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.version.version2") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v6_9_1") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_0_2") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.AssignHwCompositeDev") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v6_8_7") AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.reflect.version.version1") AddVersionParent("vim.version.AssignHwCompositeDev", "vmodl.reflect.version.version2") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v8_0_0_0") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_3_1") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_3_2") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_3_0") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version13") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version14") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version15") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version10") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version11") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.version12") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_2_0") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_2_1") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0_1_0") AddVersionParent("vim.version.AssignHwCompositeDev", "vim.version.v7_0") AddVersionParent("vim.version.VMcrypt_V4", "vmodl.query.version.version4") AddVersionParent("vim.version.VMcrypt_V4", "vmodl.query.version.version3") AddVersionParent("vim.version.VMcrypt_V4", "vmodl.query.version.version2") AddVersionParent("vim.version.VMcrypt_V4", "vmodl.query.version.version1") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version8") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version9") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version6") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version7") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version1") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version4") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version5") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version2") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version3") AddVersionParent("vim.version.VMcrypt_V4", "vmodl.version.version0") AddVersionParent("vim.version.VMcrypt_V4", "vmodl.version.version1") AddVersionParent("vim.version.VMcrypt_V4", "vmodl.version.version2") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v6_9_1") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.VMcrypt_V4") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v6_8_7") AddVersionParent("vim.version.VMcrypt_V4", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VMcrypt_V4", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version13") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version14") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version15") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version10") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version11") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.version12") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VMcrypt_V4", "vim.version.v7_0") AddVersionParent("vim.version.hostAccessManager", "vmodl.query.version.version4") AddVersionParent("vim.version.hostAccessManager", "vmodl.query.version.version3") AddVersionParent("vim.version.hostAccessManager", "vmodl.query.version.version2") AddVersionParent("vim.version.hostAccessManager", "vmodl.query.version.version1") AddVersionParent("vim.version.hostAccessManager", "vim.version.version8") AddVersionParent("vim.version.hostAccessManager", "vim.version.version9") AddVersionParent("vim.version.hostAccessManager", "vim.version.version6") AddVersionParent("vim.version.hostAccessManager", "vim.version.version7") AddVersionParent("vim.version.hostAccessManager", "vim.version.version1") AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_1_1") AddVersionParent("vim.version.hostAccessManager", "vim.version.version4") AddVersionParent("vim.version.hostAccessManager", "vim.version.version5") AddVersionParent("vim.version.hostAccessManager", "vim.version.version2") AddVersionParent("vim.version.hostAccessManager", "vim.version.version3") AddVersionParent("vim.version.hostAccessManager", "vmodl.version.version0") AddVersionParent("vim.version.hostAccessManager", "vmodl.version.version1") AddVersionParent("vim.version.hostAccessManager", "vmodl.version.version2") AddVersionParent("vim.version.hostAccessManager", "vim.version.v6_9_1") AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_0_2") AddVersionParent("vim.version.hostAccessManager", "vim.version.hostAccessManager") AddVersionParent("vim.version.hostAccessManager", "vim.version.v6_8_7") AddVersionParent("vim.version.hostAccessManager", "vmodl.reflect.version.version1") AddVersionParent("vim.version.hostAccessManager", "vmodl.reflect.version.version2") AddVersionParent("vim.version.hostAccessManager", "vim.version.v8_0_0_0") AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_3_1") AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_3_2") AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_3_0") AddVersionParent("vim.version.hostAccessManager", "vim.version.version13") AddVersionParent("vim.version.hostAccessManager", "vim.version.version14") AddVersionParent("vim.version.hostAccessManager", "vim.version.version15") AddVersionParent("vim.version.hostAccessManager", "vim.version.version10") AddVersionParent("vim.version.hostAccessManager", "vim.version.version11") AddVersionParent("vim.version.hostAccessManager", "vim.version.version12") AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_2_0") AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_2_1") AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0_1_0") AddVersionParent("vim.version.hostAccessManager", "vim.version.v7_0") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.query.version.version4") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.query.version.version3") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.query.version.version2") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.query.version.version1") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version8") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version9") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version6") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version7") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version1") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_1_1") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version4") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version5") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version2") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version3") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.version.version0") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.version.version1") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.version.version2") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v6_9_1") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_0_2") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.ocmSupportedForReconfigure") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v6_8_7") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.reflect.version.version1") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vmodl.reflect.version.version2") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v8_0_0_0") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_3_1") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_3_2") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_3_0") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version13") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version14") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version15") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version10") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version11") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.version12") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_2_0") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_2_1") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0_1_0") AddVersionParent("vim.version.ocmSupportedForReconfigure", "vim.version.v7_0") AddVersionParent("vim.version.v6_8_7", "vmodl.query.version.version4") AddVersionParent("vim.version.v6_8_7", "vmodl.query.version.version3") AddVersionParent("vim.version.v6_8_7", "vmodl.query.version.version2") AddVersionParent("vim.version.v6_8_7", "vmodl.query.version.version1") AddVersionParent("vim.version.v6_8_7", "vim.version.version8") AddVersionParent("vim.version.v6_8_7", "vim.version.version9") AddVersionParent("vim.version.v6_8_7", "vim.version.version6") AddVersionParent("vim.version.v6_8_7", "vim.version.version7") AddVersionParent("vim.version.v6_8_7", "vim.version.version1") AddVersionParent("vim.version.v6_8_7", "vim.version.version4") AddVersionParent("vim.version.v6_8_7", "vim.version.version5") AddVersionParent("vim.version.v6_8_7", "vim.version.version2") AddVersionParent("vim.version.v6_8_7", "vim.version.version3") AddVersionParent("vim.version.v6_8_7", "vmodl.version.version0") AddVersionParent("vim.version.v6_8_7", "vmodl.version.version1") AddVersionParent("vim.version.v6_8_7", "vmodl.version.version2") AddVersionParent("vim.version.v6_8_7", "vim.version.v6_8_7") AddVersionParent("vim.version.v6_8_7", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v6_8_7", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v6_8_7", "vim.version.version13") AddVersionParent("vim.version.v6_8_7", "vim.version.version14") AddVersionParent("vim.version.v6_8_7", "vim.version.version15") AddVersionParent("vim.version.v6_8_7", "vim.version.version10") AddVersionParent("vim.version.v6_8_7", "vim.version.version11") AddVersionParent("vim.version.v6_8_7", "vim.version.version12") AddVersionParent("vmodl.reflect.version.version1", "vmodl.version.version0") AddVersionParent("vmodl.reflect.version.version1", "vmodl.version.version1") AddVersionParent("vmodl.reflect.version.version1", "vmodl.version.version2") AddVersionParent("vmodl.reflect.version.version1", "vmodl.reflect.version.version1") AddVersionParent("vmodl.reflect.version.version2", "vmodl.version.version0") AddVersionParent("vmodl.reflect.version.version2", "vmodl.version.version1") AddVersionParent("vmodl.reflect.version.version2", "vmodl.version.version2") AddVersionParent("vmodl.reflect.version.version2", "vmodl.reflect.version.version1") AddVersionParent("vmodl.reflect.version.version2", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v8_0_0_0", "vmodl.query.version.version4") AddVersionParent("vim.version.v8_0_0_0", "vmodl.query.version.version3") AddVersionParent("vim.version.v8_0_0_0", "vmodl.query.version.version2") AddVersionParent("vim.version.v8_0_0_0", "vmodl.query.version.version1") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version8") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version9") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version6") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version7") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version1") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_1_1") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version4") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version5") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version2") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version3") AddVersionParent("vim.version.v8_0_0_0", "vmodl.version.version0") AddVersionParent("vim.version.v8_0_0_0", "vmodl.version.version1") AddVersionParent("vim.version.v8_0_0_0", "vmodl.version.version2") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v6_9_1") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_0_2") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v6_8_7") AddVersionParent("vim.version.v8_0_0_0", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v8_0_0_0", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v8_0_0_0") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_3_1") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_3_2") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_3_0") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version13") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version14") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version15") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version10") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version11") AddVersionParent("vim.version.v8_0_0_0", "vim.version.version12") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_2_0") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_2_1") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0_1_0") AddVersionParent("vim.version.v8_0_0_0", "vim.version.v7_0") AddVersionParent("vim.version.VQAT", "vmodl.query.version.version4") AddVersionParent("vim.version.VQAT", "vmodl.query.version.version3") AddVersionParent("vim.version.VQAT", "vmodl.query.version.version2") AddVersionParent("vim.version.VQAT", "vmodl.query.version.version1") AddVersionParent("vim.version.VQAT", "vim.version.version8") AddVersionParent("vim.version.VQAT", "vim.version.version9") AddVersionParent("vim.version.VQAT", "vim.version.version6") AddVersionParent("vim.version.VQAT", "vim.version.version7") AddVersionParent("vim.version.VQAT", "vim.version.version1") AddVersionParent("vim.version.VQAT", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VQAT", "vim.version.version4") AddVersionParent("vim.version.VQAT", "vim.version.version5") AddVersionParent("vim.version.VQAT", "vim.version.version2") AddVersionParent("vim.version.VQAT", "vim.version.version3") AddVersionParent("vim.version.VQAT", "vmodl.version.version0") AddVersionParent("vim.version.VQAT", "vmodl.version.version1") AddVersionParent("vim.version.VQAT", "vmodl.version.version2") AddVersionParent("vim.version.VQAT", "vim.version.v6_9_1") AddVersionParent("vim.version.VQAT", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VQAT", "vim.version.v6_8_7") AddVersionParent("vim.version.VQAT", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VQAT", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VQAT", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VQAT", "vim.version.VQAT") AddVersionParent("vim.version.VQAT", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VQAT", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VQAT", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VQAT", "vim.version.version13") AddVersionParent("vim.version.VQAT", "vim.version.version14") AddVersionParent("vim.version.VQAT", "vim.version.version15") AddVersionParent("vim.version.VQAT", "vim.version.version10") AddVersionParent("vim.version.VQAT", "vim.version.version11") AddVersionParent("vim.version.VQAT", "vim.version.version12") AddVersionParent("vim.version.VQAT", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VQAT", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VQAT", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VQAT", "vim.version.v7_0") AddVersionParent("vim.version.VirtualTopo", "vmodl.query.version.version4") AddVersionParent("vim.version.VirtualTopo", "vmodl.query.version.version3") AddVersionParent("vim.version.VirtualTopo", "vmodl.query.version.version2") AddVersionParent("vim.version.VirtualTopo", "vmodl.query.version.version1") AddVersionParent("vim.version.VirtualTopo", "vim.version.version8") AddVersionParent("vim.version.VirtualTopo", "vim.version.version9") AddVersionParent("vim.version.VirtualTopo", "vim.version.version6") AddVersionParent("vim.version.VirtualTopo", "vim.version.version7") AddVersionParent("vim.version.VirtualTopo", "vim.version.version1") AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VirtualTopo", "vim.version.version4") AddVersionParent("vim.version.VirtualTopo", "vim.version.version5") AddVersionParent("vim.version.VirtualTopo", "vim.version.version2") AddVersionParent("vim.version.VirtualTopo", "vim.version.version3") AddVersionParent("vim.version.VirtualTopo", "vmodl.version.version0") AddVersionParent("vim.version.VirtualTopo", "vmodl.version.version1") AddVersionParent("vim.version.VirtualTopo", "vmodl.version.version2") AddVersionParent("vim.version.VirtualTopo", "vim.version.v6_9_1") AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VirtualTopo", "vim.version.v6_8_7") AddVersionParent("vim.version.VirtualTopo", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VirtualTopo", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VirtualTopo", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VirtualTopo", "vim.version.VirtualTopo") AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VirtualTopo", "vim.version.version13") AddVersionParent("vim.version.VirtualTopo", "vim.version.version14") AddVersionParent("vim.version.VirtualTopo", "vim.version.version15") AddVersionParent("vim.version.VirtualTopo", "vim.version.version10") AddVersionParent("vim.version.VirtualTopo", "vim.version.version11") AddVersionParent("vim.version.VirtualTopo", "vim.version.version12") AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VirtualTopo", "vim.version.v7_0") AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.query.version.version4") AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.query.version.version3") AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.query.version.version2") AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.query.version.version1") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version8") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version9") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version6") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version7") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version1") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version4") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version5") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version2") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version3") AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.version.version0") AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.version.version1") AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.version.version2") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v6_9_1") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v6_8_7") AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VCDP_NestedFilters", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.VCDP_NestedFilters") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version13") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version14") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version15") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version10") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version11") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.version12") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VCDP_NestedFilters", "vim.version.v7_0") AddVersionParent("vim.version.Tools_Update_Health", "vmodl.query.version.version4") AddVersionParent("vim.version.Tools_Update_Health", "vmodl.query.version.version3") AddVersionParent("vim.version.Tools_Update_Health", "vmodl.query.version.version2") AddVersionParent("vim.version.Tools_Update_Health", "vmodl.query.version.version1") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version8") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version9") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version6") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version7") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version1") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_1_1") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version4") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version5") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version2") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version3") AddVersionParent("vim.version.Tools_Update_Health", "vmodl.version.version0") AddVersionParent("vim.version.Tools_Update_Health", "vmodl.version.version1") AddVersionParent("vim.version.Tools_Update_Health", "vmodl.version.version2") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v6_9_1") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_0_2") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v6_8_7") AddVersionParent("vim.version.Tools_Update_Health", "vmodl.reflect.version.version1") AddVersionParent("vim.version.Tools_Update_Health", "vmodl.reflect.version.version2") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v8_0_0_0") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.Tools_Update_Health") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_3_1") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_3_2") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_3_0") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version13") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version14") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version15") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version10") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version11") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.version12") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_2_0") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_2_1") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0_1_0") AddVersionParent("vim.version.Tools_Update_Health", "vim.version.v7_0") AddVersionParent("vim.version.nativeSnapshot", "vmodl.query.version.version4") AddVersionParent("vim.version.nativeSnapshot", "vmodl.query.version.version3") AddVersionParent("vim.version.nativeSnapshot", "vmodl.query.version.version2") AddVersionParent("vim.version.nativeSnapshot", "vmodl.query.version.version1") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version8") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version9") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version6") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version7") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version1") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_1_1") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version4") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version5") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version2") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version3") AddVersionParent("vim.version.nativeSnapshot", "vmodl.version.version0") AddVersionParent("vim.version.nativeSnapshot", "vmodl.version.version1") AddVersionParent("vim.version.nativeSnapshot", "vmodl.version.version2") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v6_9_1") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_0_2") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v6_8_7") AddVersionParent("vim.version.nativeSnapshot", "vmodl.reflect.version.version1") AddVersionParent("vim.version.nativeSnapshot", "vmodl.reflect.version.version2") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v8_0_0_0") AddVersionParent("vim.version.nativeSnapshot", "vim.version.nativeSnapshot") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_3_1") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_3_2") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_3_0") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version13") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version14") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version15") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version10") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version11") AddVersionParent("vim.version.nativeSnapshot", "vim.version.version12") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_2_0") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_2_1") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0_1_0") AddVersionParent("vim.version.nativeSnapshot", "vim.version.v7_0") AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.query.version.version4") AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.query.version.version3") AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.query.version.version2") AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.query.version.version1") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version8") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version9") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version6") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version7") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version1") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_1_1") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version4") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version5") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version2") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version3") AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.version.version0") AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.version.version1") AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.version.version2") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v6_9_1") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_0_2") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v6_8_7") AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.reflect.version.version1") AddVersionParent("vim.version.CPU_Scheduler_Info", "vmodl.reflect.version.version2") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v8_0_0_0") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.CPU_Scheduler_Info") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_3_1") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_3_2") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_3_0") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version13") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version14") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version15") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version10") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version11") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.version12") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_2_0") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_2_1") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0_1_0") AddVersionParent("vim.version.CPU_Scheduler_Info", "vim.version.v7_0") AddVersionParent("vim.version.hostProfiles", "vmodl.query.version.version4") AddVersionParent("vim.version.hostProfiles", "vmodl.query.version.version3") AddVersionParent("vim.version.hostProfiles", "vmodl.query.version.version2") AddVersionParent("vim.version.hostProfiles", "vmodl.query.version.version1") AddVersionParent("vim.version.hostProfiles", "vim.version.version8") AddVersionParent("vim.version.hostProfiles", "vim.version.version9") AddVersionParent("vim.version.hostProfiles", "vim.version.version6") AddVersionParent("vim.version.hostProfiles", "vim.version.version7") AddVersionParent("vim.version.hostProfiles", "vim.version.version1") AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_1_1") AddVersionParent("vim.version.hostProfiles", "vim.version.version4") AddVersionParent("vim.version.hostProfiles", "vim.version.version5") AddVersionParent("vim.version.hostProfiles", "vim.version.version2") AddVersionParent("vim.version.hostProfiles", "vim.version.version3") AddVersionParent("vim.version.hostProfiles", "vmodl.version.version0") AddVersionParent("vim.version.hostProfiles", "vmodl.version.version1") AddVersionParent("vim.version.hostProfiles", "vmodl.version.version2") AddVersionParent("vim.version.hostProfiles", "vim.version.v6_9_1") AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_0_2") AddVersionParent("vim.version.hostProfiles", "vim.version.v6_8_7") AddVersionParent("vim.version.hostProfiles", "vmodl.reflect.version.version1") AddVersionParent("vim.version.hostProfiles", "vmodl.reflect.version.version2") AddVersionParent("vim.version.hostProfiles", "vim.version.v8_0_0_0") AddVersionParent("vim.version.hostProfiles", "vim.version.hostProfiles") AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_3_1") AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_3_2") AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_3_0") AddVersionParent("vim.version.hostProfiles", "vim.version.version13") AddVersionParent("vim.version.hostProfiles", "vim.version.version14") AddVersionParent("vim.version.hostProfiles", "vim.version.version15") AddVersionParent("vim.version.hostProfiles", "vim.version.version10") AddVersionParent("vim.version.hostProfiles", "vim.version.version11") AddVersionParent("vim.version.hostProfiles", "vim.version.version12") AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_2_0") AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_2_1") AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0_1_0") AddVersionParent("vim.version.hostProfiles", "vim.version.v7_0") AddVersionParent("vim.version.GraphicsDRS", "vmodl.query.version.version4") AddVersionParent("vim.version.GraphicsDRS", "vmodl.query.version.version3") AddVersionParent("vim.version.GraphicsDRS", "vmodl.query.version.version2") AddVersionParent("vim.version.GraphicsDRS", "vmodl.query.version.version1") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version8") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version9") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version6") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version7") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version1") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_1_1") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version4") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version5") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version2") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version3") AddVersionParent("vim.version.GraphicsDRS", "vmodl.version.version0") AddVersionParent("vim.version.GraphicsDRS", "vmodl.version.version1") AddVersionParent("vim.version.GraphicsDRS", "vmodl.version.version2") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v6_9_1") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_0_2") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v6_8_7") AddVersionParent("vim.version.GraphicsDRS", "vmodl.reflect.version.version1") AddVersionParent("vim.version.GraphicsDRS", "vmodl.reflect.version.version2") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v8_0_0_0") AddVersionParent("vim.version.GraphicsDRS", "vim.version.GraphicsDRS") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_3_1") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_3_2") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_3_0") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version13") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version14") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version15") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version10") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version11") AddVersionParent("vim.version.GraphicsDRS", "vim.version.version12") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_2_0") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_2_1") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0_1_0") AddVersionParent("vim.version.GraphicsDRS", "vim.version.v7_0") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.query.version.version4") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.query.version.version3") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.query.version.version2") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.query.version.version1") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version8") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version9") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version6") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version7") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version1") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version4") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version5") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version2") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version3") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.version.version0") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.version.version1") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.version.version2") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v6_9_1") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v6_8_7") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.VM_CLONE_REKEY_TPM") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version13") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version14") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version15") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version10") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version11") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.version12") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VM_CLONE_REKEY_TPM", "vim.version.v7_0") AddVersionParent("vim.version.v7_0_3_1", "vmodl.query.version.version4") AddVersionParent("vim.version.v7_0_3_1", "vmodl.query.version.version3") AddVersionParent("vim.version.v7_0_3_1", "vmodl.query.version.version2") AddVersionParent("vim.version.v7_0_3_1", "vmodl.query.version.version1") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version8") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version9") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version6") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version7") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version1") AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_1_1") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version4") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version5") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version2") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version3") AddVersionParent("vim.version.v7_0_3_1", "vmodl.version.version0") AddVersionParent("vim.version.v7_0_3_1", "vmodl.version.version1") AddVersionParent("vim.version.v7_0_3_1", "vmodl.version.version2") AddVersionParent("vim.version.v7_0_3_1", "vim.version.v6_9_1") AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_0_2") AddVersionParent("vim.version.v7_0_3_1", "vim.version.v6_8_7") AddVersionParent("vim.version.v7_0_3_1", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v7_0_3_1", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_3_1") AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_3_0") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version13") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version14") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version15") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version10") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version11") AddVersionParent("vim.version.v7_0_3_1", "vim.version.version12") AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_2_0") AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_2_1") AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0_1_0") AddVersionParent("vim.version.v7_0_3_1", "vim.version.v7_0") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.query.version.version4") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.query.version.version3") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.query.version.version2") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.query.version.version1") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version8") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version9") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version6") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version7") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version1") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_1_1") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version4") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version5") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version2") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version3") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.version.version0") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.version.version1") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.version.version2") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v6_9_1") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_0_2") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v6_8_7") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.reflect.version.version1") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vmodl.reflect.version.version2") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v8_0_0_0") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_3_1") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.FCD_CATALOG_HEALTH") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_3_2") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_3_0") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version13") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version14") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version15") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version10") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version11") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.version12") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_2_0") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_2_1") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0_1_0") AddVersionParent("vim.version.FCD_CATALOG_HEALTH", "vim.version.v7_0") AddVersionParent("vim.version.v7_0_3_2", "vmodl.query.version.version4") AddVersionParent("vim.version.v7_0_3_2", "vmodl.query.version.version3") AddVersionParent("vim.version.v7_0_3_2", "vmodl.query.version.version2") AddVersionParent("vim.version.v7_0_3_2", "vmodl.query.version.version1") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version8") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version9") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version6") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version7") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version1") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_1_1") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version4") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version5") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version2") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version3") AddVersionParent("vim.version.v7_0_3_2", "vmodl.version.version0") AddVersionParent("vim.version.v7_0_3_2", "vmodl.version.version1") AddVersionParent("vim.version.v7_0_3_2", "vmodl.version.version2") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v6_9_1") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_0_2") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v6_8_7") AddVersionParent("vim.version.v7_0_3_2", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v7_0_3_2", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_3_1") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_3_2") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_3_0") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version13") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version14") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version15") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version10") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version11") AddVersionParent("vim.version.v7_0_3_2", "vim.version.version12") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_2_0") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_2_1") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0_1_0") AddVersionParent("vim.version.v7_0_3_2", "vim.version.v7_0") AddVersionParent("vim.version.v7_0_3_0", "vmodl.query.version.version4") AddVersionParent("vim.version.v7_0_3_0", "vmodl.query.version.version3") AddVersionParent("vim.version.v7_0_3_0", "vmodl.query.version.version2") AddVersionParent("vim.version.v7_0_3_0", "vmodl.query.version.version1") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version8") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version9") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version6") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version7") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version1") AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_1_1") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version4") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version5") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version2") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version3") AddVersionParent("vim.version.v7_0_3_0", "vmodl.version.version0") AddVersionParent("vim.version.v7_0_3_0", "vmodl.version.version1") AddVersionParent("vim.version.v7_0_3_0", "vmodl.version.version2") AddVersionParent("vim.version.v7_0_3_0", "vim.version.v6_9_1") AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_0_2") AddVersionParent("vim.version.v7_0_3_0", "vim.version.v6_8_7") AddVersionParent("vim.version.v7_0_3_0", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v7_0_3_0", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_3_0") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version13") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version14") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version15") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version10") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version11") AddVersionParent("vim.version.v7_0_3_0", "vim.version.version12") AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_2_0") AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_2_1") AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0_1_0") AddVersionParent("vim.version.v7_0_3_0", "vim.version.v7_0") AddVersionParent("vim.version.hwh", "vmodl.query.version.version4") AddVersionParent("vim.version.hwh", "vmodl.query.version.version3") AddVersionParent("vim.version.hwh", "vmodl.query.version.version2") AddVersionParent("vim.version.hwh", "vmodl.query.version.version1") AddVersionParent("vim.version.hwh", "vim.version.version8") AddVersionParent("vim.version.hwh", "vim.version.version9") AddVersionParent("vim.version.hwh", "vim.version.version6") AddVersionParent("vim.version.hwh", "vim.version.version7") AddVersionParent("vim.version.hwh", "vim.version.version1") AddVersionParent("vim.version.hwh", "vim.version.v7_0_1_1") AddVersionParent("vim.version.hwh", "vim.version.version4") AddVersionParent("vim.version.hwh", "vim.version.version5") AddVersionParent("vim.version.hwh", "vim.version.version2") AddVersionParent("vim.version.hwh", "vim.version.version3") AddVersionParent("vim.version.hwh", "vmodl.version.version0") AddVersionParent("vim.version.hwh", "vmodl.version.version1") AddVersionParent("vim.version.hwh", "vmodl.version.version2") AddVersionParent("vim.version.hwh", "vim.version.v6_9_1") AddVersionParent("vim.version.hwh", "vim.version.v7_0_0_2") AddVersionParent("vim.version.hwh", "vim.version.v6_8_7") AddVersionParent("vim.version.hwh", "vmodl.reflect.version.version1") AddVersionParent("vim.version.hwh", "vmodl.reflect.version.version2") AddVersionParent("vim.version.hwh", "vim.version.v8_0_0_0") AddVersionParent("vim.version.hwh", "vim.version.v7_0_3_1") AddVersionParent("vim.version.hwh", "vim.version.v7_0_3_2") AddVersionParent("vim.version.hwh", "vim.version.v7_0_3_0") AddVersionParent("vim.version.hwh", "vim.version.hwh") AddVersionParent("vim.version.hwh", "vim.version.version13") AddVersionParent("vim.version.hwh", "vim.version.version14") AddVersionParent("vim.version.hwh", "vim.version.version15") AddVersionParent("vim.version.hwh", "vim.version.version10") AddVersionParent("vim.version.hwh", "vim.version.version11") AddVersionParent("vim.version.hwh", "vim.version.version12") AddVersionParent("vim.version.hwh", "vim.version.v7_0_2_0") AddVersionParent("vim.version.hwh", "vim.version.v7_0_2_1") AddVersionParent("vim.version.hwh", "vim.version.v7_0_1_0") AddVersionParent("vim.version.hwh", "vim.version.v7_0") AddVersionParent("vim.version.version13", "vmodl.query.version.version4") AddVersionParent("vim.version.version13", "vmodl.query.version.version3") AddVersionParent("vim.version.version13", "vmodl.query.version.version2") AddVersionParent("vim.version.version13", "vmodl.query.version.version1") AddVersionParent("vim.version.version13", "vim.version.version8") AddVersionParent("vim.version.version13", "vim.version.version9") AddVersionParent("vim.version.version13", "vim.version.version6") AddVersionParent("vim.version.version13", "vim.version.version7") AddVersionParent("vim.version.version13", "vim.version.version1") AddVersionParent("vim.version.version13", "vim.version.version4") AddVersionParent("vim.version.version13", "vim.version.version5") AddVersionParent("vim.version.version13", "vim.version.version2") AddVersionParent("vim.version.version13", "vim.version.version3") AddVersionParent("vim.version.version13", "vmodl.version.version0") AddVersionParent("vim.version.version13", "vmodl.version.version1") AddVersionParent("vim.version.version13", "vmodl.version.version2") AddVersionParent("vim.version.version13", "vmodl.reflect.version.version1") AddVersionParent("vim.version.version13", "vmodl.reflect.version.version2") AddVersionParent("vim.version.version13", "vim.version.version13") AddVersionParent("vim.version.version13", "vim.version.version10") AddVersionParent("vim.version.version13", "vim.version.version11") AddVersionParent("vim.version.version13", "vim.version.version12") AddVersionParent("vim.version.smartnic_vc", "vmodl.query.version.version4") AddVersionParent("vim.version.smartnic_vc", "vmodl.query.version.version3") AddVersionParent("vim.version.smartnic_vc", "vmodl.query.version.version2") AddVersionParent("vim.version.smartnic_vc", "vmodl.query.version.version1") AddVersionParent("vim.version.smartnic_vc", "vim.version.version8") AddVersionParent("vim.version.smartnic_vc", "vim.version.version9") AddVersionParent("vim.version.smartnic_vc", "vim.version.version6") AddVersionParent("vim.version.smartnic_vc", "vim.version.version7") AddVersionParent("vim.version.smartnic_vc", "vim.version.version1") AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_1_1") AddVersionParent("vim.version.smartnic_vc", "vim.version.version4") AddVersionParent("vim.version.smartnic_vc", "vim.version.version5") AddVersionParent("vim.version.smartnic_vc", "vim.version.version2") AddVersionParent("vim.version.smartnic_vc", "vim.version.version3") AddVersionParent("vim.version.smartnic_vc", "vmodl.version.version0") AddVersionParent("vim.version.smartnic_vc", "vmodl.version.version1") AddVersionParent("vim.version.smartnic_vc", "vmodl.version.version2") AddVersionParent("vim.version.smartnic_vc", "vim.version.v6_9_1") AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_0_2") AddVersionParent("vim.version.smartnic_vc", "vim.version.v6_8_7") AddVersionParent("vim.version.smartnic_vc", "vmodl.reflect.version.version1") AddVersionParent("vim.version.smartnic_vc", "vmodl.reflect.version.version2") AddVersionParent("vim.version.smartnic_vc", "vim.version.v8_0_0_0") AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_3_1") AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_3_2") AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_3_0") AddVersionParent("vim.version.smartnic_vc", "vim.version.version13") AddVersionParent("vim.version.smartnic_vc", "vim.version.smartnic_vc") AddVersionParent("vim.version.smartnic_vc", "vim.version.version14") AddVersionParent("vim.version.smartnic_vc", "vim.version.version15") AddVersionParent("vim.version.smartnic_vc", "vim.version.version10") AddVersionParent("vim.version.smartnic_vc", "vim.version.version11") AddVersionParent("vim.version.smartnic_vc", "vim.version.version12") AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_2_0") AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_2_1") AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0_1_0") AddVersionParent("vim.version.smartnic_vc", "vim.version.v7_0") AddVersionParent("vim.version.version14", "vmodl.query.version.version4") AddVersionParent("vim.version.version14", "vmodl.query.version.version3") AddVersionParent("vim.version.version14", "vmodl.query.version.version2") AddVersionParent("vim.version.version14", "vmodl.query.version.version1") AddVersionParent("vim.version.version14", "vim.version.version8") AddVersionParent("vim.version.version14", "vim.version.version9") AddVersionParent("vim.version.version14", "vim.version.version6") AddVersionParent("vim.version.version14", "vim.version.version7") AddVersionParent("vim.version.version14", "vim.version.version1") AddVersionParent("vim.version.version14", "vim.version.version4") AddVersionParent("vim.version.version14", "vim.version.version5") AddVersionParent("vim.version.version14", "vim.version.version2") AddVersionParent("vim.version.version14", "vim.version.version3") AddVersionParent("vim.version.version14", "vmodl.version.version0") AddVersionParent("vim.version.version14", "vmodl.version.version1") AddVersionParent("vim.version.version14", "vmodl.version.version2") AddVersionParent("vim.version.version14", "vmodl.reflect.version.version1") AddVersionParent("vim.version.version14", "vmodl.reflect.version.version2") AddVersionParent("vim.version.version14", "vim.version.version13") AddVersionParent("vim.version.version14", "vim.version.version14") AddVersionParent("vim.version.version14", "vim.version.version10") AddVersionParent("vim.version.version14", "vim.version.version11") AddVersionParent("vim.version.version14", "vim.version.version12") AddVersionParent("vim.version.version15", "vmodl.query.version.version4") AddVersionParent("vim.version.version15", "vmodl.query.version.version3") AddVersionParent("vim.version.version15", "vmodl.query.version.version2") AddVersionParent("vim.version.version15", "vmodl.query.version.version1") AddVersionParent("vim.version.version15", "vim.version.version8") AddVersionParent("vim.version.version15", "vim.version.version9") AddVersionParent("vim.version.version15", "vim.version.version6") AddVersionParent("vim.version.version15", "vim.version.version7") AddVersionParent("vim.version.version15", "vim.version.version1") AddVersionParent("vim.version.version15", "vim.version.version4") AddVersionParent("vim.version.version15", "vim.version.version5") AddVersionParent("vim.version.version15", "vim.version.version2") AddVersionParent("vim.version.version15", "vim.version.version3") AddVersionParent("vim.version.version15", "vmodl.version.version0") AddVersionParent("vim.version.version15", "vmodl.version.version1") AddVersionParent("vim.version.version15", "vmodl.version.version2") AddVersionParent("vim.version.version15", "vmodl.reflect.version.version1") AddVersionParent("vim.version.version15", "vmodl.reflect.version.version2") AddVersionParent("vim.version.version15", "vim.version.version13") AddVersionParent("vim.version.version15", "vim.version.version14") AddVersionParent("vim.version.version15", "vim.version.version15") AddVersionParent("vim.version.version15", "vim.version.version10") AddVersionParent("vim.version.version15", "vim.version.version11") AddVersionParent("vim.version.version15", "vim.version.version12") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.query.version.version4") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.query.version.version3") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.query.version.version2") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.query.version.version1") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version8") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version9") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version6") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version7") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version1") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_1_1") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version4") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version5") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version2") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version3") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.version.version0") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.version.version1") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.version.version2") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v6_9_1") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_0_2") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v6_8_7") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.reflect.version.version1") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vmodl.reflect.version.version2") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v8_0_0_0") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_3_1") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_3_2") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_3_0") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version13") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version14") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version15") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.DRS_LB_REASONCODE") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version10") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version11") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.version12") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_2_0") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_2_1") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0_1_0") AddVersionParent("vim.version.DRS_LB_REASONCODE", "vim.version.v7_0") AddVersionParent("vim.version.VSAN2_Configure", "vmodl.query.version.version4") AddVersionParent("vim.version.VSAN2_Configure", "vmodl.query.version.version3") AddVersionParent("vim.version.VSAN2_Configure", "vmodl.query.version.version2") AddVersionParent("vim.version.VSAN2_Configure", "vmodl.query.version.version1") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version8") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version9") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version6") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version7") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version1") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version4") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version5") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version2") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version3") AddVersionParent("vim.version.VSAN2_Configure", "vmodl.version.version0") AddVersionParent("vim.version.VSAN2_Configure", "vmodl.version.version1") AddVersionParent("vim.version.VSAN2_Configure", "vmodl.version.version2") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v6_9_1") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v6_8_7") AddVersionParent("vim.version.VSAN2_Configure", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VSAN2_Configure", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version13") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version14") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version15") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.VSAN2_Configure") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version10") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version11") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.version12") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VSAN2_Configure", "vim.version.v7_0") AddVersionParent("vim.version.HWv20", "vmodl.query.version.version4") AddVersionParent("vim.version.HWv20", "vmodl.query.version.version3") AddVersionParent("vim.version.HWv20", "vmodl.query.version.version2") AddVersionParent("vim.version.HWv20", "vmodl.query.version.version1") AddVersionParent("vim.version.HWv20", "vim.version.version8") AddVersionParent("vim.version.HWv20", "vim.version.version9") AddVersionParent("vim.version.HWv20", "vim.version.version6") AddVersionParent("vim.version.HWv20", "vim.version.version7") AddVersionParent("vim.version.HWv20", "vim.version.version1") AddVersionParent("vim.version.HWv20", "vim.version.v7_0_1_1") AddVersionParent("vim.version.HWv20", "vim.version.version4") AddVersionParent("vim.version.HWv20", "vim.version.version5") AddVersionParent("vim.version.HWv20", "vim.version.version2") AddVersionParent("vim.version.HWv20", "vim.version.version3") AddVersionParent("vim.version.HWv20", "vmodl.version.version0") AddVersionParent("vim.version.HWv20", "vmodl.version.version1") AddVersionParent("vim.version.HWv20", "vmodl.version.version2") AddVersionParent("vim.version.HWv20", "vim.version.v6_9_1") AddVersionParent("vim.version.HWv20", "vim.version.v7_0_0_2") AddVersionParent("vim.version.HWv20", "vim.version.v6_8_7") AddVersionParent("vim.version.HWv20", "vmodl.reflect.version.version1") AddVersionParent("vim.version.HWv20", "vmodl.reflect.version.version2") AddVersionParent("vim.version.HWv20", "vim.version.v8_0_0_0") AddVersionParent("vim.version.HWv20", "vim.version.v7_0_3_1") AddVersionParent("vim.version.HWv20", "vim.version.v7_0_3_2") AddVersionParent("vim.version.HWv20", "vim.version.v7_0_3_0") AddVersionParent("vim.version.HWv20", "vim.version.version13") AddVersionParent("vim.version.HWv20", "vim.version.version14") AddVersionParent("vim.version.HWv20", "vim.version.version15") AddVersionParent("vim.version.HWv20", "vim.version.HWv20") AddVersionParent("vim.version.HWv20", "vim.version.version10") AddVersionParent("vim.version.HWv20", "vim.version.version11") AddVersionParent("vim.version.HWv20", "vim.version.version12") AddVersionParent("vim.version.HWv20", "vim.version.v7_0_2_0") AddVersionParent("vim.version.HWv20", "vim.version.v7_0_2_1") AddVersionParent("vim.version.HWv20", "vim.version.v7_0_1_0") AddVersionParent("vim.version.HWv20", "vim.version.v7_0") AddVersionParent("vim.version.gosCrashRemediation", "vmodl.query.version.version4") AddVersionParent("vim.version.gosCrashRemediation", "vmodl.query.version.version3") AddVersionParent("vim.version.gosCrashRemediation", "vmodl.query.version.version2") AddVersionParent("vim.version.gosCrashRemediation", "vmodl.query.version.version1") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version8") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version9") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version6") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version7") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version1") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_1_1") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version4") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version5") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version2") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version3") AddVersionParent("vim.version.gosCrashRemediation", "vmodl.version.version0") AddVersionParent("vim.version.gosCrashRemediation", "vmodl.version.version1") AddVersionParent("vim.version.gosCrashRemediation", "vmodl.version.version2") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v6_9_1") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_0_2") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v6_8_7") AddVersionParent("vim.version.gosCrashRemediation", "vmodl.reflect.version.version1") AddVersionParent("vim.version.gosCrashRemediation", "vmodl.reflect.version.version2") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v8_0_0_0") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_3_1") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_3_2") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_3_0") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version13") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version14") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version15") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.gosCrashRemediation") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version10") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version11") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.version12") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_2_0") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_2_1") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0_1_0") AddVersionParent("vim.version.gosCrashRemediation", "vim.version.v7_0") AddVersionParent("vim.version.unstable", "vmodl.query.version.version4") AddVersionParent("vim.version.unstable", "vmodl.query.version.version3") AddVersionParent("vim.version.unstable", "vmodl.query.version.version2") AddVersionParent("vim.version.unstable", "vmodl.query.version.version1") AddVersionParent("vim.version.unstable", "vim.version.pcieHotPlugOfFPT") AddVersionParent("vim.version.unstable", "vim.version.FileLockInfo_GSS34") AddVersionParent("vim.version.unstable", "vim.version.pr1429825") AddVersionParent("vim.version.unstable", "vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT") AddVersionParent("vim.version.unstable", "vim.version.MemoryTiering") AddVersionParent("vim.version.unstable", "vim.version.version8") AddVersionParent("vim.version.unstable", "vim.version.version9") AddVersionParent("vim.version.unstable", "vim.version.version6") AddVersionParent("vim.version.unstable", "vim.version.version7") AddVersionParent("vim.version.unstable", "vim.version.DiskGroupVMC") AddVersionParent("vim.version.unstable", "vim.version.TrustAuthority_V4") AddVersionParent("vim.version.unstable", "vim.version.version1") AddVersionParent("vim.version.unstable", "vim.version.v7_0_1_1") AddVersionParent("vim.version.unstable", "vim.version.version4") AddVersionParent("vim.version.unstable", "vim.version.version5") AddVersionParent("vim.version.unstable", "vim.version.SRIOVValidNumVFs") AddVersionParent("vim.version.unstable", "vim.version.version2") AddVersionParent("vim.version.unstable", "vim.version.version3") AddVersionParent("vim.version.unstable", "vim.version.FCD_VRA_SUPPORT") AddVersionParent("vim.version.unstable", "vim.version.NDU_VSPHERE_HA_INTEGRATION") AddVersionParent("vim.version.unstable", "vim.version.guestDetailedData") AddVersionParent("vim.version.unstable", "vim.version.VmxRebootPowerOff") AddVersionParent("vim.version.unstable", "vim.version.SGX_MPA_VMCheck") AddVersionParent("vim.version.unstable", "vim.version.ClusteredEsx_V1") AddVersionParent("vim.version.unstable", "vim.version.vdcs") AddVersionParent("vim.version.unstable", "vmodl.version.version0") AddVersionParent("vim.version.unstable", "vim.version.NFS_VMKPORTBIND") AddVersionParent("vim.version.unstable", "vmodl.version.version1") AddVersionParent("vim.version.unstable", "vmodl.version.version2") AddVersionParent("vim.version.unstable", "vim.version.v6_9_1") AddVersionParent("vim.version.unstable", "vim.version.fourKnStorageSupport") AddVersionParent("vim.version.unstable", "vim.version.batchRenameSupport") AddVersionParent("vim.version.unstable", "vim.version.resetportstatistics") AddVersionParent("vim.version.unstable", "vim.version.VM_CLONE_SWITCH_HOST_EMM") AddVersionParent("vim.version.unstable", "vim.version.VMC_NFS_SUPPORT") AddVersionParent("vim.version.unstable", "vim.version.NsxLiveUpdate") AddVersionParent("vim.version.unstable", "vim.version.VDS_ReadOnlyDisk") AddVersionParent("vim.version.unstable", "vim.version.SGX_MPA_HostReg") AddVersionParent("vim.version.unstable", "vim.version.v7_0_0_2") AddVersionParent("vim.version.unstable", "vim.version.GreenMetrics") AddVersionParent("vim.version.unstable", "vim.version.vVol_datastore_scalability") AddVersionParent("vim.version.unstable", "vim.version.hostVendorSpecificStatus") AddVersionParent("vim.version.unstable", "vim.version.AssignHwCompositeDev") AddVersionParent("vim.version.unstable", "vim.version.VMcrypt_V4") AddVersionParent("vim.version.unstable", "vim.version.hostAccessManager") AddVersionParent("vim.version.unstable", "vim.version.ocmSupportedForReconfigure") AddVersionParent("vim.version.unstable", "vim.version.v6_8_7") AddVersionParent("vim.version.unstable", "vmodl.reflect.version.version1") AddVersionParent("vim.version.unstable", "vmodl.reflect.version.version2") AddVersionParent("vim.version.unstable", "vim.version.v8_0_0_0") AddVersionParent("vim.version.unstable", "vim.version.VQAT") AddVersionParent("vim.version.unstable", "vim.version.VirtualTopo") AddVersionParent("vim.version.unstable", "vim.version.VCDP_NestedFilters") AddVersionParent("vim.version.unstable", "vim.version.Tools_Update_Health") AddVersionParent("vim.version.unstable", "vim.version.nativeSnapshot") AddVersionParent("vim.version.unstable", "vim.version.CPU_Scheduler_Info") AddVersionParent("vim.version.unstable", "vim.version.hostProfiles") AddVersionParent("vim.version.unstable", "vim.version.GraphicsDRS") AddVersionParent("vim.version.unstable", "vim.version.VM_CLONE_REKEY_TPM") AddVersionParent("vim.version.unstable", "vim.version.v7_0_3_1") AddVersionParent("vim.version.unstable", "vim.version.FCD_CATALOG_HEALTH") AddVersionParent("vim.version.unstable", "vim.version.v7_0_3_2") AddVersionParent("vim.version.unstable", "vim.version.v7_0_3_0") AddVersionParent("vim.version.unstable", "vim.version.hwh") AddVersionParent("vim.version.unstable", "vim.version.version13") AddVersionParent("vim.version.unstable", "vim.version.smartnic_vc") AddVersionParent("vim.version.unstable", "vim.version.version14") AddVersionParent("vim.version.unstable", "vim.version.version15") AddVersionParent("vim.version.unstable", "vim.version.DRS_LB_REASONCODE") AddVersionParent("vim.version.unstable", "vim.version.VSAN2_Configure") AddVersionParent("vim.version.unstable", "vim.version.HWv20") AddVersionParent("vim.version.unstable", "vim.version.gosCrashRemediation") AddVersionParent("vim.version.unstable", "vim.version.unstable") AddVersionParent("vim.version.unstable", "vim.version.VSAN_DeltaCompEnsureDurability") AddVersionParent("vim.version.unstable", "vim.version.FT_DRS_METRO_CLUSTER") AddVersionParent("vim.version.unstable", "vim.version.bmcInfo") AddVersionParent("vim.version.unstable", "vim.version.VCSOF_173") AddVersionParent("vim.version.unstable", "vim.version.pciSriovExtendedID") AddVersionParent("vim.version.unstable", "vim.version.vmxnet3UPT") AddVersionParent("vim.version.unstable", "vim.version.version10") AddVersionParent("vim.version.unstable", "vim.version.version11") AddVersionParent("vim.version.unstable", "vim.version.VMcrypt_IntegrityProtection") AddVersionParent("vim.version.unstable", "vim.version.version12") AddVersionParent("vim.version.unstable", "vim.version.pciDeviceExt") AddVersionParent("vim.version.unstable", "vim.version.toolsOffHost") AddVersionParent("vim.version.unstable", "vim.version.vHT") AddVersionParent("vim.version.unstable", "vim.version.ClusterConfigManagerV2") AddVersionParent("vim.version.unstable", "vim.version.smartnic_network") AddVersionParent("vim.version.unstable", "vim.version.hostCertificateManagement") AddVersionParent("vim.version.unstable", "vim.version.DVX") AddVersionParent("vim.version.unstable", "vim.version.vmMisc") AddVersionParent("vim.version.unstable", "vim.version.VLCM_QuickLaunchPreload") AddVersionParent("vim.version.unstable", "vim.version.VmcExternalStorageSupport") AddVersionParent("vim.version.unstable", "vim.version.LSI2PVSCSI") AddVersionParent("vim.version.unstable", "vim.version.ProvisioningEventRefresh") AddVersionParent("vim.version.unstable", "vim.version.PodVMOnVDS") AddVersionParent("vim.version.unstable", "vim.version.ClusterConfigManagerTransition") AddVersionParent("vim.version.unstable", "vim.version.LiveUpdate") AddVersionParent("vim.version.unstable", "vim.version.OVF_SINGLEDEPLOY_API") AddVersionParent("vim.version.unstable", "vim.version.v7_0_2_0") AddVersionParent("vim.version.unstable", "vim.version.pr1803450") AddVersionParent("vim.version.unstable", "vim.version.v7_0_2_1") AddVersionParent("vim.version.unstable", "vim.version.Vmfs_Unmap_Ultralow_Rate") AddVersionParent("vim.version.unstable", "vim.version.VMcrypt_OnlineVMEncryption") AddVersionParent("vim.version.unstable", "vim.version.FT_VBS_SUPPORT") AddVersionParent("vim.version.unstable", "vim.version.WCP_FaultDomains") AddVersionParent("vim.version.unstable", "vim.version.VMcrypt3_KeyCustomAttribute") AddVersionParent("vim.version.unstable", "vim.version.dnd") AddVersionParent("vim.version.unstable", "vim.version.optional_virtual_disks") AddVersionParent("vim.version.unstable", "vim.version.hwh2_0") AddVersionParent("vim.version.unstable", "vim.version.v7_0_1_0") AddVersionParent("vim.version.unstable", "vim.version.E2ENativeNVMeSupport") AddVersionParent("vim.version.unstable", "vim.version.VM_OP_NOTIFICATION_TO_APP_V1") AddVersionParent("vim.version.unstable", "vim.version.EventsOrdering") AddVersionParent("vim.version.unstable", "vim.version.FCD_PERFORMANCE") AddVersionParent("vim.version.unstable", "vim.version.PMemV2") AddVersionParent("vim.version.unstable", "vim.version.v7_0") AddVersionParent("vim.version.unstable", "vim.version.SPBM_DISK_PROV_VIA_POLICY") AddVersionParent("vim.version.unstable", "vim.version.nsx_uens_u2") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.query.version.version4") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.query.version.version3") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.query.version.version2") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.query.version.version1") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version8") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version9") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version6") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version7") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version1") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version4") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version5") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version2") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version3") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.version.version0") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.version.version1") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.version.version2") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v6_9_1") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v6_8_7") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version13") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version14") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version15") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.VSAN_DeltaCompEnsureDurability") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version10") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version11") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.version12") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VSAN_DeltaCompEnsureDurability", "vim.version.v7_0") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.query.version.version4") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.query.version.version3") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.query.version.version2") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.query.version.version1") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version8") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version9") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version6") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version7") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version1") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_1_1") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version4") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version5") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version2") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version3") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.version.version0") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.version.version1") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.version.version2") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v6_9_1") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_0_2") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v6_8_7") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.reflect.version.version1") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vmodl.reflect.version.version2") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v8_0_0_0") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_3_1") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_3_2") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_3_0") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version13") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version14") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version15") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.FT_DRS_METRO_CLUSTER") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version10") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version11") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.version12") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_2_0") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_2_1") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0_1_0") AddVersionParent("vim.version.FT_DRS_METRO_CLUSTER", "vim.version.v7_0") AddVersionParent("vim.version.bmcInfo", "vmodl.query.version.version4") AddVersionParent("vim.version.bmcInfo", "vmodl.query.version.version3") AddVersionParent("vim.version.bmcInfo", "vmodl.query.version.version2") AddVersionParent("vim.version.bmcInfo", "vmodl.query.version.version1") AddVersionParent("vim.version.bmcInfo", "vim.version.version8") AddVersionParent("vim.version.bmcInfo", "vim.version.version9") AddVersionParent("vim.version.bmcInfo", "vim.version.version6") AddVersionParent("vim.version.bmcInfo", "vim.version.version7") AddVersionParent("vim.version.bmcInfo", "vim.version.version1") AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_1_1") AddVersionParent("vim.version.bmcInfo", "vim.version.version4") AddVersionParent("vim.version.bmcInfo", "vim.version.version5") AddVersionParent("vim.version.bmcInfo", "vim.version.version2") AddVersionParent("vim.version.bmcInfo", "vim.version.version3") AddVersionParent("vim.version.bmcInfo", "vmodl.version.version0") AddVersionParent("vim.version.bmcInfo", "vmodl.version.version1") AddVersionParent("vim.version.bmcInfo", "vmodl.version.version2") AddVersionParent("vim.version.bmcInfo", "vim.version.v6_9_1") AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_0_2") AddVersionParent("vim.version.bmcInfo", "vim.version.v6_8_7") AddVersionParent("vim.version.bmcInfo", "vmodl.reflect.version.version1") AddVersionParent("vim.version.bmcInfo", "vmodl.reflect.version.version2") AddVersionParent("vim.version.bmcInfo", "vim.version.v8_0_0_0") AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_3_1") AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_3_2") AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_3_0") AddVersionParent("vim.version.bmcInfo", "vim.version.version13") AddVersionParent("vim.version.bmcInfo", "vim.version.version14") AddVersionParent("vim.version.bmcInfo", "vim.version.version15") AddVersionParent("vim.version.bmcInfo", "vim.version.bmcInfo") AddVersionParent("vim.version.bmcInfo", "vim.version.version10") AddVersionParent("vim.version.bmcInfo", "vim.version.version11") AddVersionParent("vim.version.bmcInfo", "vim.version.version12") AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_2_0") AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_2_1") AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0_1_0") AddVersionParent("vim.version.bmcInfo", "vim.version.v7_0") AddVersionParent("vim.version.disabled", "vmodl.query.version.version4") AddVersionParent("vim.version.disabled", "vmodl.query.version.version3") AddVersionParent("vim.version.disabled", "vmodl.query.version.version2") AddVersionParent("vim.version.disabled", "vmodl.query.version.version1") AddVersionParent("vim.version.disabled", "vim.version.pcieHotPlugOfFPT") AddVersionParent("vim.version.disabled", "vim.version.FileLockInfo_GSS34") AddVersionParent("vim.version.disabled", "vim.version.pr1429825") AddVersionParent("vim.version.disabled", "vim.version.VSPHERE_DP_QUIESCED_SNAPSHOT") AddVersionParent("vim.version.disabled", "vim.version.MemoryTiering") AddVersionParent("vim.version.disabled", "vim.version.version8") AddVersionParent("vim.version.disabled", "vim.version.version9") AddVersionParent("vim.version.disabled", "vim.version.version6") AddVersionParent("vim.version.disabled", "vim.version.version7") AddVersionParent("vim.version.disabled", "vim.version.DiskGroupVMC") AddVersionParent("vim.version.disabled", "vim.version.TrustAuthority_V4") AddVersionParent("vim.version.disabled", "vim.version.version1") AddVersionParent("vim.version.disabled", "vim.version.v7_0_1_1") AddVersionParent("vim.version.disabled", "vim.version.version4") AddVersionParent("vim.version.disabled", "vim.version.version5") AddVersionParent("vim.version.disabled", "vim.version.SRIOVValidNumVFs") AddVersionParent("vim.version.disabled", "vim.version.version2") AddVersionParent("vim.version.disabled", "vim.version.version3") AddVersionParent("vim.version.disabled", "vim.version.FCD_VRA_SUPPORT") AddVersionParent("vim.version.disabled", "vim.version.NDU_VSPHERE_HA_INTEGRATION") AddVersionParent("vim.version.disabled", "vim.version.guestDetailedData") AddVersionParent("vim.version.disabled", "vim.version.VmxRebootPowerOff") AddVersionParent("vim.version.disabled", "vim.version.SGX_MPA_VMCheck") AddVersionParent("vim.version.disabled", "vim.version.ClusteredEsx_V1") AddVersionParent("vim.version.disabled", "vim.version.vdcs") AddVersionParent("vim.version.disabled", "vmodl.version.version0") AddVersionParent("vim.version.disabled", "vim.version.NFS_VMKPORTBIND") AddVersionParent("vim.version.disabled", "vmodl.version.version1") AddVersionParent("vim.version.disabled", "vmodl.version.version2") AddVersionParent("vim.version.disabled", "vim.version.v6_9_1") AddVersionParent("vim.version.disabled", "vim.version.fourKnStorageSupport") AddVersionParent("vim.version.disabled", "vim.version.batchRenameSupport") AddVersionParent("vim.version.disabled", "vim.version.resetportstatistics") AddVersionParent("vim.version.disabled", "vim.version.VM_CLONE_SWITCH_HOST_EMM") AddVersionParent("vim.version.disabled", "vim.version.VMC_NFS_SUPPORT") AddVersionParent("vim.version.disabled", "vim.version.NsxLiveUpdate") AddVersionParent("vim.version.disabled", "vim.version.VDS_ReadOnlyDisk") AddVersionParent("vim.version.disabled", "vim.version.SGX_MPA_HostReg") AddVersionParent("vim.version.disabled", "vim.version.v7_0_0_2") AddVersionParent("vim.version.disabled", "vim.version.GreenMetrics") AddVersionParent("vim.version.disabled", "vim.version.vVol_datastore_scalability") AddVersionParent("vim.version.disabled", "vim.version.hostVendorSpecificStatus") AddVersionParent("vim.version.disabled", "vim.version.AssignHwCompositeDev") AddVersionParent("vim.version.disabled", "vim.version.VMcrypt_V4") AddVersionParent("vim.version.disabled", "vim.version.hostAccessManager") AddVersionParent("vim.version.disabled", "vim.version.ocmSupportedForReconfigure") AddVersionParent("vim.version.disabled", "vim.version.v6_8_7") AddVersionParent("vim.version.disabled", "vmodl.reflect.version.version1") AddVersionParent("vim.version.disabled", "vmodl.reflect.version.version2") AddVersionParent("vim.version.disabled", "vim.version.v8_0_0_0") AddVersionParent("vim.version.disabled", "vim.version.VQAT") AddVersionParent("vim.version.disabled", "vim.version.VirtualTopo") AddVersionParent("vim.version.disabled", "vim.version.VCDP_NestedFilters") AddVersionParent("vim.version.disabled", "vim.version.Tools_Update_Health") AddVersionParent("vim.version.disabled", "vim.version.nativeSnapshot") AddVersionParent("vim.version.disabled", "vim.version.CPU_Scheduler_Info") AddVersionParent("vim.version.disabled", "vim.version.hostProfiles") AddVersionParent("vim.version.disabled", "vim.version.GraphicsDRS") AddVersionParent("vim.version.disabled", "vim.version.VM_CLONE_REKEY_TPM") AddVersionParent("vim.version.disabled", "vim.version.v7_0_3_1") AddVersionParent("vim.version.disabled", "vim.version.FCD_CATALOG_HEALTH") AddVersionParent("vim.version.disabled", "vim.version.v7_0_3_2") AddVersionParent("vim.version.disabled", "vim.version.v7_0_3_0") AddVersionParent("vim.version.disabled", "vim.version.hwh") AddVersionParent("vim.version.disabled", "vim.version.version13") AddVersionParent("vim.version.disabled", "vim.version.smartnic_vc") AddVersionParent("vim.version.disabled", "vim.version.version14") AddVersionParent("vim.version.disabled", "vim.version.version15") AddVersionParent("vim.version.disabled", "vim.version.DRS_LB_REASONCODE") AddVersionParent("vim.version.disabled", "vim.version.VSAN2_Configure") AddVersionParent("vim.version.disabled", "vim.version.HWv20") AddVersionParent("vim.version.disabled", "vim.version.gosCrashRemediation") AddVersionParent("vim.version.disabled", "vim.version.unstable") AddVersionParent("vim.version.disabled", "vim.version.VSAN_DeltaCompEnsureDurability") AddVersionParent("vim.version.disabled", "vim.version.FT_DRS_METRO_CLUSTER") AddVersionParent("vim.version.disabled", "vim.version.bmcInfo") AddVersionParent("vim.version.disabled", "vim.version.disabled") AddVersionParent("vim.version.disabled", "vim.version.VCSOF_173") AddVersionParent("vim.version.disabled", "vim.version.pciSriovExtendedID") AddVersionParent("vim.version.disabled", "vim.version.vmxnet3UPT") AddVersionParent("vim.version.disabled", "vim.version.version10") AddVersionParent("vim.version.disabled", "vim.version.version11") AddVersionParent("vim.version.disabled", "vim.version.VMcrypt_IntegrityProtection") AddVersionParent("vim.version.disabled", "vim.version.version12") AddVersionParent("vim.version.disabled", "vim.version.pciDeviceExt") AddVersionParent("vim.version.disabled", "vim.version.toolsOffHost") AddVersionParent("vim.version.disabled", "vim.version.vHT") AddVersionParent("vim.version.disabled", "vim.version.ClusterConfigManagerV2") AddVersionParent("vim.version.disabled", "vim.version.smartnic_network") AddVersionParent("vim.version.disabled", "vim.version.hostCertificateManagement") AddVersionParent("vim.version.disabled", "vim.version.DVX") AddVersionParent("vim.version.disabled", "vim.version.vmMisc") AddVersionParent("vim.version.disabled", "vim.version.VLCM_QuickLaunchPreload") AddVersionParent("vim.version.disabled", "vim.version.VmcExternalStorageSupport") AddVersionParent("vim.version.disabled", "vim.version.LSI2PVSCSI") AddVersionParent("vim.version.disabled", "vim.version.ProvisioningEventRefresh") AddVersionParent("vim.version.disabled", "vim.version.PodVMOnVDS") AddVersionParent("vim.version.disabled", "vim.version.ClusterConfigManagerTransition") AddVersionParent("vim.version.disabled", "vim.version.LiveUpdate") AddVersionParent("vim.version.disabled", "vim.version.OVF_SINGLEDEPLOY_API") AddVersionParent("vim.version.disabled", "vim.version.v7_0_2_0") AddVersionParent("vim.version.disabled", "vim.version.pr1803450") AddVersionParent("vim.version.disabled", "vim.version.v7_0_2_1") AddVersionParent("vim.version.disabled", "vim.version.Vmfs_Unmap_Ultralow_Rate") AddVersionParent("vim.version.disabled", "vim.version.VMcrypt_OnlineVMEncryption") AddVersionParent("vim.version.disabled", "vim.version.FT_VBS_SUPPORT") AddVersionParent("vim.version.disabled", "vim.version.WCP_FaultDomains") AddVersionParent("vim.version.disabled", "vim.version.VMcrypt3_KeyCustomAttribute") AddVersionParent("vim.version.disabled", "vim.version.dnd") AddVersionParent("vim.version.disabled", "vim.version.optional_virtual_disks") AddVersionParent("vim.version.disabled", "vim.version.hwh2_0") AddVersionParent("vim.version.disabled", "vim.version.v7_0_1_0") AddVersionParent("vim.version.disabled", "vim.version.E2ENativeNVMeSupport") AddVersionParent("vim.version.disabled", "vim.version.VM_OP_NOTIFICATION_TO_APP_V1") AddVersionParent("vim.version.disabled", "vim.version.EventsOrdering") AddVersionParent("vim.version.disabled", "vim.version.FCD_PERFORMANCE") AddVersionParent("vim.version.disabled", "vim.version.PMemV2") AddVersionParent("vim.version.disabled", "vim.version.v7_0") AddVersionParent("vim.version.disabled", "vim.version.SPBM_DISK_PROV_VIA_POLICY") AddVersionParent("vim.version.disabled", "vim.version.nsx_uens_u2") AddVersionParent("vim.version.VCSOF_173", "vmodl.query.version.version4") AddVersionParent("vim.version.VCSOF_173", "vmodl.query.version.version3") AddVersionParent("vim.version.VCSOF_173", "vmodl.query.version.version2") AddVersionParent("vim.version.VCSOF_173", "vmodl.query.version.version1") AddVersionParent("vim.version.VCSOF_173", "vim.version.version8") AddVersionParent("vim.version.VCSOF_173", "vim.version.version9") AddVersionParent("vim.version.VCSOF_173", "vim.version.version6") AddVersionParent("vim.version.VCSOF_173", "vim.version.version7") AddVersionParent("vim.version.VCSOF_173", "vim.version.version1") AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VCSOF_173", "vim.version.version4") AddVersionParent("vim.version.VCSOF_173", "vim.version.version5") AddVersionParent("vim.version.VCSOF_173", "vim.version.version2") AddVersionParent("vim.version.VCSOF_173", "vim.version.version3") AddVersionParent("vim.version.VCSOF_173", "vmodl.version.version0") AddVersionParent("vim.version.VCSOF_173", "vmodl.version.version1") AddVersionParent("vim.version.VCSOF_173", "vmodl.version.version2") AddVersionParent("vim.version.VCSOF_173", "vim.version.v6_9_1") AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VCSOF_173", "vim.version.v6_8_7") AddVersionParent("vim.version.VCSOF_173", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VCSOF_173", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VCSOF_173", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VCSOF_173", "vim.version.version13") AddVersionParent("vim.version.VCSOF_173", "vim.version.version14") AddVersionParent("vim.version.VCSOF_173", "vim.version.version15") AddVersionParent("vim.version.VCSOF_173", "vim.version.VCSOF_173") AddVersionParent("vim.version.VCSOF_173", "vim.version.version10") AddVersionParent("vim.version.VCSOF_173", "vim.version.version11") AddVersionParent("vim.version.VCSOF_173", "vim.version.version12") AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VCSOF_173", "vim.version.v7_0") AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.query.version.version4") AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.query.version.version3") AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.query.version.version2") AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.query.version.version1") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version8") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version9") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version6") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version7") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version1") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_1_1") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version4") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version5") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version2") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version3") AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.version.version0") AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.version.version1") AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.version.version2") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v6_9_1") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_0_2") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v6_8_7") AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.reflect.version.version1") AddVersionParent("vim.version.pciSriovExtendedID", "vmodl.reflect.version.version2") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v8_0_0_0") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_3_1") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_3_2") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_3_0") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version13") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version14") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version15") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.pciSriovExtendedID") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version10") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version11") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.version12") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_2_0") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_2_1") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0_1_0") AddVersionParent("vim.version.pciSriovExtendedID", "vim.version.v7_0") AddVersionParent("vim.version.vmxnet3UPT", "vmodl.query.version.version4") AddVersionParent("vim.version.vmxnet3UPT", "vmodl.query.version.version3") AddVersionParent("vim.version.vmxnet3UPT", "vmodl.query.version.version2") AddVersionParent("vim.version.vmxnet3UPT", "vmodl.query.version.version1") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version8") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version9") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version6") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version7") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version1") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_1_1") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version4") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version5") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version2") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version3") AddVersionParent("vim.version.vmxnet3UPT", "vmodl.version.version0") AddVersionParent("vim.version.vmxnet3UPT", "vmodl.version.version1") AddVersionParent("vim.version.vmxnet3UPT", "vmodl.version.version2") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v6_9_1") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_0_2") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v6_8_7") AddVersionParent("vim.version.vmxnet3UPT", "vmodl.reflect.version.version1") AddVersionParent("vim.version.vmxnet3UPT", "vmodl.reflect.version.version2") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v8_0_0_0") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_3_1") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_3_2") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_3_0") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version13") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version14") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version15") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.vmxnet3UPT") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version10") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version11") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.version12") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_2_0") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_2_1") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0_1_0") AddVersionParent("vim.version.vmxnet3UPT", "vim.version.v7_0") AddVersionParent("vim.version.version10", "vmodl.query.version.version4") AddVersionParent("vim.version.version10", "vmodl.query.version.version3") AddVersionParent("vim.version.version10", "vmodl.query.version.version2") AddVersionParent("vim.version.version10", "vmodl.query.version.version1") AddVersionParent("vim.version.version10", "vim.version.version8") AddVersionParent("vim.version.version10", "vim.version.version9") AddVersionParent("vim.version.version10", "vim.version.version6") AddVersionParent("vim.version.version10", "vim.version.version7") AddVersionParent("vim.version.version10", "vim.version.version1") AddVersionParent("vim.version.version10", "vim.version.version4") AddVersionParent("vim.version.version10", "vim.version.version5") AddVersionParent("vim.version.version10", "vim.version.version2") AddVersionParent("vim.version.version10", "vim.version.version3") AddVersionParent("vim.version.version10", "vmodl.version.version0") AddVersionParent("vim.version.version10", "vmodl.version.version1") AddVersionParent("vim.version.version10", "vmodl.version.version2") AddVersionParent("vim.version.version10", "vmodl.reflect.version.version1") AddVersionParent("vim.version.version10", "vmodl.reflect.version.version2") AddVersionParent("vim.version.version10", "vim.version.version10") AddVersionParent("vim.version.version11", "vmodl.query.version.version4") AddVersionParent("vim.version.version11", "vmodl.query.version.version3") AddVersionParent("vim.version.version11", "vmodl.query.version.version2") AddVersionParent("vim.version.version11", "vmodl.query.version.version1") AddVersionParent("vim.version.version11", "vim.version.version8") AddVersionParent("vim.version.version11", "vim.version.version9") AddVersionParent("vim.version.version11", "vim.version.version6") AddVersionParent("vim.version.version11", "vim.version.version7") AddVersionParent("vim.version.version11", "vim.version.version1") AddVersionParent("vim.version.version11", "vim.version.version4") AddVersionParent("vim.version.version11", "vim.version.version5") AddVersionParent("vim.version.version11", "vim.version.version2") AddVersionParent("vim.version.version11", "vim.version.version3") AddVersionParent("vim.version.version11", "vmodl.version.version0") AddVersionParent("vim.version.version11", "vmodl.version.version1") AddVersionParent("vim.version.version11", "vmodl.version.version2") AddVersionParent("vim.version.version11", "vmodl.reflect.version.version1") AddVersionParent("vim.version.version11", "vmodl.reflect.version.version2") AddVersionParent("vim.version.version11", "vim.version.version10") AddVersionParent("vim.version.version11", "vim.version.version11") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.query.version.version4") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.query.version.version3") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.query.version.version2") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.query.version.version1") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version8") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version9") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version6") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version7") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version1") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version4") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version5") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version2") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version3") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.version.version0") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.version.version1") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.version.version2") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v6_9_1") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v6_8_7") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version13") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version14") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version15") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version10") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version11") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.VMcrypt_IntegrityProtection") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.version12") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VMcrypt_IntegrityProtection", "vim.version.v7_0") AddVersionParent("vim.version.version12", "vmodl.query.version.version4") AddVersionParent("vim.version.version12", "vmodl.query.version.version3") AddVersionParent("vim.version.version12", "vmodl.query.version.version2") AddVersionParent("vim.version.version12", "vmodl.query.version.version1") AddVersionParent("vim.version.version12", "vim.version.version8") AddVersionParent("vim.version.version12", "vim.version.version9") AddVersionParent("vim.version.version12", "vim.version.version6") AddVersionParent("vim.version.version12", "vim.version.version7") AddVersionParent("vim.version.version12", "vim.version.version1") AddVersionParent("vim.version.version12", "vim.version.version4") AddVersionParent("vim.version.version12", "vim.version.version5") AddVersionParent("vim.version.version12", "vim.version.version2") AddVersionParent("vim.version.version12", "vim.version.version3") AddVersionParent("vim.version.version12", "vmodl.version.version0") AddVersionParent("vim.version.version12", "vmodl.version.version1") AddVersionParent("vim.version.version12", "vmodl.version.version2") AddVersionParent("vim.version.version12", "vmodl.reflect.version.version1") AddVersionParent("vim.version.version12", "vmodl.reflect.version.version2") AddVersionParent("vim.version.version12", "vim.version.version10") AddVersionParent("vim.version.version12", "vim.version.version11") AddVersionParent("vim.version.version12", "vim.version.version12") AddVersionParent("vim.version.pciDeviceExt", "vmodl.query.version.version4") AddVersionParent("vim.version.pciDeviceExt", "vmodl.query.version.version3") AddVersionParent("vim.version.pciDeviceExt", "vmodl.query.version.version2") AddVersionParent("vim.version.pciDeviceExt", "vmodl.query.version.version1") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version8") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version9") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version6") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version7") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version1") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_1_1") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version4") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version5") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version2") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version3") AddVersionParent("vim.version.pciDeviceExt", "vmodl.version.version0") AddVersionParent("vim.version.pciDeviceExt", "vmodl.version.version1") AddVersionParent("vim.version.pciDeviceExt", "vmodl.version.version2") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v6_9_1") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_0_2") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v6_8_7") AddVersionParent("vim.version.pciDeviceExt", "vmodl.reflect.version.version1") AddVersionParent("vim.version.pciDeviceExt", "vmodl.reflect.version.version2") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v8_0_0_0") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_3_1") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_3_2") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_3_0") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version13") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version14") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version15") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version10") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version11") AddVersionParent("vim.version.pciDeviceExt", "vim.version.version12") AddVersionParent("vim.version.pciDeviceExt", "vim.version.pciDeviceExt") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_2_0") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_2_1") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0_1_0") AddVersionParent("vim.version.pciDeviceExt", "vim.version.v7_0") AddVersionParent("vim.version.toolsOffHost", "vmodl.query.version.version4") AddVersionParent("vim.version.toolsOffHost", "vmodl.query.version.version3") AddVersionParent("vim.version.toolsOffHost", "vmodl.query.version.version2") AddVersionParent("vim.version.toolsOffHost", "vmodl.query.version.version1") AddVersionParent("vim.version.toolsOffHost", "vim.version.version8") AddVersionParent("vim.version.toolsOffHost", "vim.version.version9") AddVersionParent("vim.version.toolsOffHost", "vim.version.version6") AddVersionParent("vim.version.toolsOffHost", "vim.version.version7") AddVersionParent("vim.version.toolsOffHost", "vim.version.version1") AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_1_1") AddVersionParent("vim.version.toolsOffHost", "vim.version.version4") AddVersionParent("vim.version.toolsOffHost", "vim.version.version5") AddVersionParent("vim.version.toolsOffHost", "vim.version.version2") AddVersionParent("vim.version.toolsOffHost", "vim.version.version3") AddVersionParent("vim.version.toolsOffHost", "vmodl.version.version0") AddVersionParent("vim.version.toolsOffHost", "vmodl.version.version1") AddVersionParent("vim.version.toolsOffHost", "vmodl.version.version2") AddVersionParent("vim.version.toolsOffHost", "vim.version.v6_9_1") AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_0_2") AddVersionParent("vim.version.toolsOffHost", "vim.version.v6_8_7") AddVersionParent("vim.version.toolsOffHost", "vmodl.reflect.version.version1") AddVersionParent("vim.version.toolsOffHost", "vmodl.reflect.version.version2") AddVersionParent("vim.version.toolsOffHost", "vim.version.v8_0_0_0") AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_3_1") AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_3_2") AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_3_0") AddVersionParent("vim.version.toolsOffHost", "vim.version.version13") AddVersionParent("vim.version.toolsOffHost", "vim.version.version14") AddVersionParent("vim.version.toolsOffHost", "vim.version.version15") AddVersionParent("vim.version.toolsOffHost", "vim.version.version10") AddVersionParent("vim.version.toolsOffHost", "vim.version.version11") AddVersionParent("vim.version.toolsOffHost", "vim.version.version12") AddVersionParent("vim.version.toolsOffHost", "vim.version.toolsOffHost") AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_2_0") AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_2_1") AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0_1_0") AddVersionParent("vim.version.toolsOffHost", "vim.version.v7_0") AddVersionParent("vim.version.vHT", "vmodl.query.version.version4") AddVersionParent("vim.version.vHT", "vmodl.query.version.version3") AddVersionParent("vim.version.vHT", "vmodl.query.version.version2") AddVersionParent("vim.version.vHT", "vmodl.query.version.version1") AddVersionParent("vim.version.vHT", "vim.version.version8") AddVersionParent("vim.version.vHT", "vim.version.version9") AddVersionParent("vim.version.vHT", "vim.version.version6") AddVersionParent("vim.version.vHT", "vim.version.version7") AddVersionParent("vim.version.vHT", "vim.version.version1") AddVersionParent("vim.version.vHT", "vim.version.v7_0_1_1") AddVersionParent("vim.version.vHT", "vim.version.version4") AddVersionParent("vim.version.vHT", "vim.version.version5") AddVersionParent("vim.version.vHT", "vim.version.version2") AddVersionParent("vim.version.vHT", "vim.version.version3") AddVersionParent("vim.version.vHT", "vmodl.version.version0") AddVersionParent("vim.version.vHT", "vmodl.version.version1") AddVersionParent("vim.version.vHT", "vmodl.version.version2") AddVersionParent("vim.version.vHT", "vim.version.v6_9_1") AddVersionParent("vim.version.vHT", "vim.version.v7_0_0_2") AddVersionParent("vim.version.vHT", "vim.version.v6_8_7") AddVersionParent("vim.version.vHT", "vmodl.reflect.version.version1") AddVersionParent("vim.version.vHT", "vmodl.reflect.version.version2") AddVersionParent("vim.version.vHT", "vim.version.v8_0_0_0") AddVersionParent("vim.version.vHT", "vim.version.v7_0_3_1") AddVersionParent("vim.version.vHT", "vim.version.v7_0_3_2") AddVersionParent("vim.version.vHT", "vim.version.v7_0_3_0") AddVersionParent("vim.version.vHT", "vim.version.version13") AddVersionParent("vim.version.vHT", "vim.version.version14") AddVersionParent("vim.version.vHT", "vim.version.version15") AddVersionParent("vim.version.vHT", "vim.version.version10") AddVersionParent("vim.version.vHT", "vim.version.version11") AddVersionParent("vim.version.vHT", "vim.version.version12") AddVersionParent("vim.version.vHT", "vim.version.vHT") AddVersionParent("vim.version.vHT", "vim.version.v7_0_2_0") AddVersionParent("vim.version.vHT", "vim.version.v7_0_2_1") AddVersionParent("vim.version.vHT", "vim.version.v7_0_1_0") AddVersionParent("vim.version.vHT", "vim.version.v7_0") AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.query.version.version4") AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.query.version.version3") AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.query.version.version2") AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.query.version.version1") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version8") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version9") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version6") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version7") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version1") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_1_1") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version4") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version5") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version2") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version3") AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.version.version0") AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.version.version1") AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.version.version2") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v6_9_1") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_0_2") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v6_8_7") AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.reflect.version.version1") AddVersionParent("vim.version.ClusterConfigManagerV2", "vmodl.reflect.version.version2") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v8_0_0_0") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_3_1") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_3_2") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_3_0") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version13") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version14") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version15") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version10") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version11") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.version12") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.ClusterConfigManagerV2") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_2_0") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_2_1") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0_1_0") AddVersionParent("vim.version.ClusterConfigManagerV2", "vim.version.v7_0") AddVersionParent("vim.version.smartnic_network", "vmodl.query.version.version4") AddVersionParent("vim.version.smartnic_network", "vmodl.query.version.version3") AddVersionParent("vim.version.smartnic_network", "vmodl.query.version.version2") AddVersionParent("vim.version.smartnic_network", "vmodl.query.version.version1") AddVersionParent("vim.version.smartnic_network", "vim.version.version8") AddVersionParent("vim.version.smartnic_network", "vim.version.version9") AddVersionParent("vim.version.smartnic_network", "vim.version.version6") AddVersionParent("vim.version.smartnic_network", "vim.version.version7") AddVersionParent("vim.version.smartnic_network", "vim.version.version1") AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_1_1") AddVersionParent("vim.version.smartnic_network", "vim.version.version4") AddVersionParent("vim.version.smartnic_network", "vim.version.version5") AddVersionParent("vim.version.smartnic_network", "vim.version.version2") AddVersionParent("vim.version.smartnic_network", "vim.version.version3") AddVersionParent("vim.version.smartnic_network", "vmodl.version.version0") AddVersionParent("vim.version.smartnic_network", "vmodl.version.version1") AddVersionParent("vim.version.smartnic_network", "vmodl.version.version2") AddVersionParent("vim.version.smartnic_network", "vim.version.v6_9_1") AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_0_2") AddVersionParent("vim.version.smartnic_network", "vim.version.v6_8_7") AddVersionParent("vim.version.smartnic_network", "vmodl.reflect.version.version1") AddVersionParent("vim.version.smartnic_network", "vmodl.reflect.version.version2") AddVersionParent("vim.version.smartnic_network", "vim.version.v8_0_0_0") AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_3_1") AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_3_2") AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_3_0") AddVersionParent("vim.version.smartnic_network", "vim.version.version13") AddVersionParent("vim.version.smartnic_network", "vim.version.version14") AddVersionParent("vim.version.smartnic_network", "vim.version.version15") AddVersionParent("vim.version.smartnic_network", "vim.version.version10") AddVersionParent("vim.version.smartnic_network", "vim.version.version11") AddVersionParent("vim.version.smartnic_network", "vim.version.version12") AddVersionParent("vim.version.smartnic_network", "vim.version.smartnic_network") AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_2_0") AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_2_1") AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0_1_0") AddVersionParent("vim.version.smartnic_network", "vim.version.v7_0") AddVersionParent("vim.version.hostCertificateManagement", "vmodl.query.version.version4") AddVersionParent("vim.version.hostCertificateManagement", "vmodl.query.version.version3") AddVersionParent("vim.version.hostCertificateManagement", "vmodl.query.version.version2") AddVersionParent("vim.version.hostCertificateManagement", "vmodl.query.version.version1") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version8") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version9") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version6") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version7") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version1") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_1_1") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version4") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version5") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version2") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version3") AddVersionParent("vim.version.hostCertificateManagement", "vmodl.version.version0") AddVersionParent("vim.version.hostCertificateManagement", "vmodl.version.version1") AddVersionParent("vim.version.hostCertificateManagement", "vmodl.version.version2") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v6_9_1") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_0_2") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v6_8_7") AddVersionParent("vim.version.hostCertificateManagement", "vmodl.reflect.version.version1") AddVersionParent("vim.version.hostCertificateManagement", "vmodl.reflect.version.version2") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v8_0_0_0") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_3_1") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_3_2") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_3_0") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version13") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version14") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version15") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version10") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version11") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.version12") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.hostCertificateManagement") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_2_0") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_2_1") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0_1_0") AddVersionParent("vim.version.hostCertificateManagement", "vim.version.v7_0") AddVersionParent("vim.version.DVX", "vmodl.query.version.version4") AddVersionParent("vim.version.DVX", "vmodl.query.version.version3") AddVersionParent("vim.version.DVX", "vmodl.query.version.version2") AddVersionParent("vim.version.DVX", "vmodl.query.version.version1") AddVersionParent("vim.version.DVX", "vim.version.version8") AddVersionParent("vim.version.DVX", "vim.version.version9") AddVersionParent("vim.version.DVX", "vim.version.version6") AddVersionParent("vim.version.DVX", "vim.version.version7") AddVersionParent("vim.version.DVX", "vim.version.version1") AddVersionParent("vim.version.DVX", "vim.version.v7_0_1_1") AddVersionParent("vim.version.DVX", "vim.version.version4") AddVersionParent("vim.version.DVX", "vim.version.version5") AddVersionParent("vim.version.DVX", "vim.version.version2") AddVersionParent("vim.version.DVX", "vim.version.version3") AddVersionParent("vim.version.DVX", "vmodl.version.version0") AddVersionParent("vim.version.DVX", "vmodl.version.version1") AddVersionParent("vim.version.DVX", "vmodl.version.version2") AddVersionParent("vim.version.DVX", "vim.version.v6_9_1") AddVersionParent("vim.version.DVX", "vim.version.v7_0_0_2") AddVersionParent("vim.version.DVX", "vim.version.v6_8_7") AddVersionParent("vim.version.DVX", "vmodl.reflect.version.version1") AddVersionParent("vim.version.DVX", "vmodl.reflect.version.version2") AddVersionParent("vim.version.DVX", "vim.version.v8_0_0_0") AddVersionParent("vim.version.DVX", "vim.version.v7_0_3_1") AddVersionParent("vim.version.DVX", "vim.version.v7_0_3_2") AddVersionParent("vim.version.DVX", "vim.version.v7_0_3_0") AddVersionParent("vim.version.DVX", "vim.version.version13") AddVersionParent("vim.version.DVX", "vim.version.version14") AddVersionParent("vim.version.DVX", "vim.version.version15") AddVersionParent("vim.version.DVX", "vim.version.version10") AddVersionParent("vim.version.DVX", "vim.version.version11") AddVersionParent("vim.version.DVX", "vim.version.version12") AddVersionParent("vim.version.DVX", "vim.version.DVX") AddVersionParent("vim.version.DVX", "vim.version.v7_0_2_0") AddVersionParent("vim.version.DVX", "vim.version.v7_0_2_1") AddVersionParent("vim.version.DVX", "vim.version.v7_0_1_0") AddVersionParent("vim.version.DVX", "vim.version.v7_0") AddVersionParent("vim.version.vmMisc", "vmodl.query.version.version4") AddVersionParent("vim.version.vmMisc", "vmodl.query.version.version3") AddVersionParent("vim.version.vmMisc", "vmodl.query.version.version2") AddVersionParent("vim.version.vmMisc", "vmodl.query.version.version1") AddVersionParent("vim.version.vmMisc", "vim.version.version8") AddVersionParent("vim.version.vmMisc", "vim.version.version9") AddVersionParent("vim.version.vmMisc", "vim.version.version6") AddVersionParent("vim.version.vmMisc", "vim.version.version7") AddVersionParent("vim.version.vmMisc", "vim.version.version1") AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_1_1") AddVersionParent("vim.version.vmMisc", "vim.version.version4") AddVersionParent("vim.version.vmMisc", "vim.version.version5") AddVersionParent("vim.version.vmMisc", "vim.version.version2") AddVersionParent("vim.version.vmMisc", "vim.version.version3") AddVersionParent("vim.version.vmMisc", "vmodl.version.version0") AddVersionParent("vim.version.vmMisc", "vmodl.version.version1") AddVersionParent("vim.version.vmMisc", "vmodl.version.version2") AddVersionParent("vim.version.vmMisc", "vim.version.v6_9_1") AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_0_2") AddVersionParent("vim.version.vmMisc", "vim.version.v6_8_7") AddVersionParent("vim.version.vmMisc", "vmodl.reflect.version.version1") AddVersionParent("vim.version.vmMisc", "vmodl.reflect.version.version2") AddVersionParent("vim.version.vmMisc", "vim.version.v8_0_0_0") AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_3_1") AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_3_2") AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_3_0") AddVersionParent("vim.version.vmMisc", "vim.version.version13") AddVersionParent("vim.version.vmMisc", "vim.version.version14") AddVersionParent("vim.version.vmMisc", "vim.version.version15") AddVersionParent("vim.version.vmMisc", "vim.version.version10") AddVersionParent("vim.version.vmMisc", "vim.version.version11") AddVersionParent("vim.version.vmMisc", "vim.version.version12") AddVersionParent("vim.version.vmMisc", "vim.version.vmMisc") AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_2_0") AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_2_1") AddVersionParent("vim.version.vmMisc", "vim.version.v7_0_1_0") AddVersionParent("vim.version.vmMisc", "vim.version.v7_0") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.query.version.version4") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.query.version.version3") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.query.version.version2") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.query.version.version1") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version8") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version9") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version6") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version7") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version1") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version4") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version5") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version2") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version3") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.version.version0") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.version.version1") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.version.version2") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v6_9_1") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v6_8_7") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version13") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version14") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version15") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version10") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version11") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.version12") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.VLCM_QuickLaunchPreload") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VLCM_QuickLaunchPreload", "vim.version.v7_0") AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.query.version.version4") AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.query.version.version3") AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.query.version.version2") AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.query.version.version1") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version8") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version9") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version6") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version7") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version1") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version4") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version5") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version2") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version3") AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.version.version0") AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.version.version1") AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.version.version2") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v6_9_1") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v6_8_7") AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VmcExternalStorageSupport", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version13") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version14") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version15") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version10") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version11") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.version12") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.VmcExternalStorageSupport") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VmcExternalStorageSupport", "vim.version.v7_0") AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.query.version.version4") AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.query.version.version3") AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.query.version.version2") AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.query.version.version1") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version8") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version9") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version6") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version7") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version1") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_1_1") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version4") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version5") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version2") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version3") AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.version.version0") AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.version.version1") AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.version.version2") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v6_9_1") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_0_2") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v6_8_7") AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.reflect.version.version1") AddVersionParent("vim.version.LSI2PVSCSI", "vmodl.reflect.version.version2") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v8_0_0_0") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_3_1") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_3_2") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_3_0") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version13") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version14") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version15") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version10") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version11") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.version12") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.LSI2PVSCSI") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_2_0") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_2_1") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0_1_0") AddVersionParent("vim.version.LSI2PVSCSI", "vim.version.v7_0") AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.query.version.version4") AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.query.version.version3") AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.query.version.version2") AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.query.version.version1") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version8") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version9") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version6") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version7") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version1") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_1_1") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version4") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version5") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version2") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version3") AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.version.version0") AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.version.version1") AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.version.version2") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v6_9_1") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_0_2") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v6_8_7") AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.reflect.version.version1") AddVersionParent("vim.version.ProvisioningEventRefresh", "vmodl.reflect.version.version2") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v8_0_0_0") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_3_1") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_3_2") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_3_0") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version13") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version14") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version15") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version10") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version11") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.version12") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.ProvisioningEventRefresh") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_2_0") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_2_1") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0_1_0") AddVersionParent("vim.version.ProvisioningEventRefresh", "vim.version.v7_0") AddVersionParent("vim.version.PodVMOnVDS", "vmodl.query.version.version4") AddVersionParent("vim.version.PodVMOnVDS", "vmodl.query.version.version3") AddVersionParent("vim.version.PodVMOnVDS", "vmodl.query.version.version2") AddVersionParent("vim.version.PodVMOnVDS", "vmodl.query.version.version1") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version8") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version9") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version6") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version7") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version1") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_1_1") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version4") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version5") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version2") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version3") AddVersionParent("vim.version.PodVMOnVDS", "vmodl.version.version0") AddVersionParent("vim.version.PodVMOnVDS", "vmodl.version.version1") AddVersionParent("vim.version.PodVMOnVDS", "vmodl.version.version2") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v6_9_1") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_0_2") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v6_8_7") AddVersionParent("vim.version.PodVMOnVDS", "vmodl.reflect.version.version1") AddVersionParent("vim.version.PodVMOnVDS", "vmodl.reflect.version.version2") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v8_0_0_0") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_3_1") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_3_2") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_3_0") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version13") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version14") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version15") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version10") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version11") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.version12") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.PodVMOnVDS") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_2_0") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_2_1") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0_1_0") AddVersionParent("vim.version.PodVMOnVDS", "vim.version.v7_0") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.query.version.version4") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.query.version.version3") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.query.version.version2") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.query.version.version1") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version8") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version9") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version6") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version7") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version1") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_1_1") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version4") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version5") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version2") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version3") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.version.version0") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.version.version1") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.version.version2") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v6_9_1") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_0_2") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v6_8_7") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.reflect.version.version1") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vmodl.reflect.version.version2") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v8_0_0_0") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_3_1") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_3_2") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_3_0") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version13") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version14") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version15") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version10") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version11") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.version12") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.ClusterConfigManagerTransition") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_2_0") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_2_1") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0_1_0") AddVersionParent("vim.version.ClusterConfigManagerTransition", "vim.version.v7_0") AddVersionParent("vim.version.LiveUpdate", "vmodl.query.version.version4") AddVersionParent("vim.version.LiveUpdate", "vmodl.query.version.version3") AddVersionParent("vim.version.LiveUpdate", "vmodl.query.version.version2") AddVersionParent("vim.version.LiveUpdate", "vmodl.query.version.version1") AddVersionParent("vim.version.LiveUpdate", "vim.version.version8") AddVersionParent("vim.version.LiveUpdate", "vim.version.version9") AddVersionParent("vim.version.LiveUpdate", "vim.version.version6") AddVersionParent("vim.version.LiveUpdate", "vim.version.version7") AddVersionParent("vim.version.LiveUpdate", "vim.version.version1") AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_1_1") AddVersionParent("vim.version.LiveUpdate", "vim.version.version4") AddVersionParent("vim.version.LiveUpdate", "vim.version.version5") AddVersionParent("vim.version.LiveUpdate", "vim.version.version2") AddVersionParent("vim.version.LiveUpdate", "vim.version.version3") AddVersionParent("vim.version.LiveUpdate", "vmodl.version.version0") AddVersionParent("vim.version.LiveUpdate", "vmodl.version.version1") AddVersionParent("vim.version.LiveUpdate", "vmodl.version.version2") AddVersionParent("vim.version.LiveUpdate", "vim.version.v6_9_1") AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_0_2") AddVersionParent("vim.version.LiveUpdate", "vim.version.v6_8_7") AddVersionParent("vim.version.LiveUpdate", "vmodl.reflect.version.version1") AddVersionParent("vim.version.LiveUpdate", "vmodl.reflect.version.version2") AddVersionParent("vim.version.LiveUpdate", "vim.version.v8_0_0_0") AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_3_1") AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_3_2") AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_3_0") AddVersionParent("vim.version.LiveUpdate", "vim.version.version13") AddVersionParent("vim.version.LiveUpdate", "vim.version.version14") AddVersionParent("vim.version.LiveUpdate", "vim.version.version15") AddVersionParent("vim.version.LiveUpdate", "vim.version.version10") AddVersionParent("vim.version.LiveUpdate", "vim.version.version11") AddVersionParent("vim.version.LiveUpdate", "vim.version.version12") AddVersionParent("vim.version.LiveUpdate", "vim.version.LiveUpdate") AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_2_0") AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_2_1") AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0_1_0") AddVersionParent("vim.version.LiveUpdate", "vim.version.v7_0") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.query.version.version4") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.query.version.version3") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.query.version.version2") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.query.version.version1") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version8") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version9") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version6") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version7") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version1") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_1_1") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version4") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version5") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version2") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version3") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.version.version0") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.version.version1") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.version.version2") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v6_9_1") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_0_2") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v6_8_7") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.reflect.version.version1") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vmodl.reflect.version.version2") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v8_0_0_0") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_3_1") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_3_2") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_3_0") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version13") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version14") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version15") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version10") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version11") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.version12") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.OVF_SINGLEDEPLOY_API") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_2_0") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_2_1") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0_1_0") AddVersionParent("vim.version.OVF_SINGLEDEPLOY_API", "vim.version.v7_0") AddVersionParent("vim.version.v7_0_2_0", "vmodl.query.version.version4") AddVersionParent("vim.version.v7_0_2_0", "vmodl.query.version.version3") AddVersionParent("vim.version.v7_0_2_0", "vmodl.query.version.version2") AddVersionParent("vim.version.v7_0_2_0", "vmodl.query.version.version1") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version8") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version9") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version6") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version7") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version1") AddVersionParent("vim.version.v7_0_2_0", "vim.version.v7_0_1_1") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version4") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version5") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version2") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version3") AddVersionParent("vim.version.v7_0_2_0", "vmodl.version.version0") AddVersionParent("vim.version.v7_0_2_0", "vmodl.version.version1") AddVersionParent("vim.version.v7_0_2_0", "vmodl.version.version2") AddVersionParent("vim.version.v7_0_2_0", "vim.version.v6_9_1") AddVersionParent("vim.version.v7_0_2_0", "vim.version.v7_0_0_2") AddVersionParent("vim.version.v7_0_2_0", "vim.version.v6_8_7") AddVersionParent("vim.version.v7_0_2_0", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v7_0_2_0", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version13") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version14") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version15") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version10") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version11") AddVersionParent("vim.version.v7_0_2_0", "vim.version.version12") AddVersionParent("vim.version.v7_0_2_0", "vim.version.v7_0_2_0") AddVersionParent("vim.version.v7_0_2_0", "vim.version.v7_0_1_0") AddVersionParent("vim.version.v7_0_2_0", "vim.version.v7_0") AddVersionParent("vim.version.pr1803450", "vmodl.query.version.version4") AddVersionParent("vim.version.pr1803450", "vmodl.query.version.version3") AddVersionParent("vim.version.pr1803450", "vmodl.query.version.version2") AddVersionParent("vim.version.pr1803450", "vmodl.query.version.version1") AddVersionParent("vim.version.pr1803450", "vim.version.version8") AddVersionParent("vim.version.pr1803450", "vim.version.version9") AddVersionParent("vim.version.pr1803450", "vim.version.version6") AddVersionParent("vim.version.pr1803450", "vim.version.version7") AddVersionParent("vim.version.pr1803450", "vim.version.version1") AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_1_1") AddVersionParent("vim.version.pr1803450", "vim.version.version4") AddVersionParent("vim.version.pr1803450", "vim.version.version5") AddVersionParent("vim.version.pr1803450", "vim.version.version2") AddVersionParent("vim.version.pr1803450", "vim.version.version3") AddVersionParent("vim.version.pr1803450", "vmodl.version.version0") AddVersionParent("vim.version.pr1803450", "vmodl.version.version1") AddVersionParent("vim.version.pr1803450", "vmodl.version.version2") AddVersionParent("vim.version.pr1803450", "vim.version.v6_9_1") AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_0_2") AddVersionParent("vim.version.pr1803450", "vim.version.v6_8_7") AddVersionParent("vim.version.pr1803450", "vmodl.reflect.version.version1") AddVersionParent("vim.version.pr1803450", "vmodl.reflect.version.version2") AddVersionParent("vim.version.pr1803450", "vim.version.v8_0_0_0") AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_3_1") AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_3_2") AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_3_0") AddVersionParent("vim.version.pr1803450", "vim.version.version13") AddVersionParent("vim.version.pr1803450", "vim.version.version14") AddVersionParent("vim.version.pr1803450", "vim.version.version15") AddVersionParent("vim.version.pr1803450", "vim.version.version10") AddVersionParent("vim.version.pr1803450", "vim.version.version11") AddVersionParent("vim.version.pr1803450", "vim.version.version12") AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_2_0") AddVersionParent("vim.version.pr1803450", "vim.version.pr1803450") AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_2_1") AddVersionParent("vim.version.pr1803450", "vim.version.v7_0_1_0") AddVersionParent("vim.version.pr1803450", "vim.version.v7_0") AddVersionParent("vim.version.v7_0_2_1", "vmodl.query.version.version4") AddVersionParent("vim.version.v7_0_2_1", "vmodl.query.version.version3") AddVersionParent("vim.version.v7_0_2_1", "vmodl.query.version.version2") AddVersionParent("vim.version.v7_0_2_1", "vmodl.query.version.version1") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version8") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version9") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version6") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version7") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version1") AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0_1_1") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version4") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version5") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version2") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version3") AddVersionParent("vim.version.v7_0_2_1", "vmodl.version.version0") AddVersionParent("vim.version.v7_0_2_1", "vmodl.version.version1") AddVersionParent("vim.version.v7_0_2_1", "vmodl.version.version2") AddVersionParent("vim.version.v7_0_2_1", "vim.version.v6_9_1") AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0_0_2") AddVersionParent("vim.version.v7_0_2_1", "vim.version.v6_8_7") AddVersionParent("vim.version.v7_0_2_1", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v7_0_2_1", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version13") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version14") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version15") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version10") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version11") AddVersionParent("vim.version.v7_0_2_1", "vim.version.version12") AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0_2_0") AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0_2_1") AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0_1_0") AddVersionParent("vim.version.v7_0_2_1", "vim.version.v7_0") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.query.version.version4") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.query.version.version3") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.query.version.version2") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.query.version.version1") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version8") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version9") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version6") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version7") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version1") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_1_1") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version4") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version5") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version2") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version3") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.version.version0") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.version.version1") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.version.version2") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v6_9_1") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_0_2") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v6_8_7") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.reflect.version.version1") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vmodl.reflect.version.version2") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v8_0_0_0") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_3_1") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_3_2") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_3_0") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version13") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version14") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version15") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version10") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version11") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.version12") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_2_0") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_2_1") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.Vmfs_Unmap_Ultralow_Rate") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0_1_0") AddVersionParent("vim.version.Vmfs_Unmap_Ultralow_Rate", "vim.version.v7_0") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.query.version.version4") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.query.version.version3") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.query.version.version2") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.query.version.version1") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version8") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version9") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version6") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version7") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version1") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version4") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version5") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version2") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version3") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.version.version0") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.version.version1") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.version.version2") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v6_9_1") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v6_8_7") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version13") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version14") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version15") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version10") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version11") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.VMcrypt_IntegrityProtection") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.version12") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.VMcrypt_OnlineVMEncryption") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VMcrypt_OnlineVMEncryption", "vim.version.v7_0") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.query.version.version4") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.query.version.version3") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.query.version.version2") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.query.version.version1") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version8") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version9") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version6") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version7") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version1") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_1_1") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version4") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version5") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version2") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version3") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.version.version0") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.version.version1") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.version.version2") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v6_9_1") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_0_2") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v6_8_7") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.reflect.version.version1") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vmodl.reflect.version.version2") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v8_0_0_0") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_3_1") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_3_2") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_3_0") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version13") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version14") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version15") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version10") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version11") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.version12") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_2_0") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_2_1") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.FT_VBS_SUPPORT") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0_1_0") AddVersionParent("vim.version.FT_VBS_SUPPORT", "vim.version.v7_0") AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.query.version.version4") AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.query.version.version3") AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.query.version.version2") AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.query.version.version1") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version8") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version9") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version6") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version7") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version1") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_1_1") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version4") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version5") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version2") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version3") AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.version.version0") AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.version.version1") AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.version.version2") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v6_9_1") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_0_2") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v6_8_7") AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.reflect.version.version1") AddVersionParent("vim.version.WCP_FaultDomains", "vmodl.reflect.version.version2") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v8_0_0_0") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_3_1") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_3_2") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_3_0") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version13") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version14") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version15") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version10") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version11") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.version12") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_2_0") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_2_1") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.WCP_FaultDomains") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0_1_0") AddVersionParent("vim.version.WCP_FaultDomains", "vim.version.v7_0") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.query.version.version4") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.query.version.version3") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.query.version.version2") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.query.version.version1") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version8") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version9") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version6") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version7") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version1") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version4") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version5") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version2") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version3") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.version.version0") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.version.version1") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.version.version2") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v6_9_1") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v6_8_7") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version13") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version14") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version15") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version10") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version11") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.version12") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.VMcrypt3_KeyCustomAttribute") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VMcrypt3_KeyCustomAttribute", "vim.version.v7_0") AddVersionParent("vim.version.dnd", "vmodl.query.version.version4") AddVersionParent("vim.version.dnd", "vmodl.query.version.version3") AddVersionParent("vim.version.dnd", "vmodl.query.version.version2") AddVersionParent("vim.version.dnd", "vmodl.query.version.version1") AddVersionParent("vim.version.dnd", "vim.version.version8") AddVersionParent("vim.version.dnd", "vim.version.version9") AddVersionParent("vim.version.dnd", "vim.version.version6") AddVersionParent("vim.version.dnd", "vim.version.version7") AddVersionParent("vim.version.dnd", "vim.version.version1") AddVersionParent("vim.version.dnd", "vim.version.v7_0_1_1") AddVersionParent("vim.version.dnd", "vim.version.version4") AddVersionParent("vim.version.dnd", "vim.version.version5") AddVersionParent("vim.version.dnd", "vim.version.version2") AddVersionParent("vim.version.dnd", "vim.version.version3") AddVersionParent("vim.version.dnd", "vmodl.version.version0") AddVersionParent("vim.version.dnd", "vmodl.version.version1") AddVersionParent("vim.version.dnd", "vmodl.version.version2") AddVersionParent("vim.version.dnd", "vim.version.v6_9_1") AddVersionParent("vim.version.dnd", "vim.version.v7_0_0_2") AddVersionParent("vim.version.dnd", "vim.version.v6_8_7") AddVersionParent("vim.version.dnd", "vmodl.reflect.version.version1") AddVersionParent("vim.version.dnd", "vmodl.reflect.version.version2") AddVersionParent("vim.version.dnd", "vim.version.v8_0_0_0") AddVersionParent("vim.version.dnd", "vim.version.v7_0_3_1") AddVersionParent("vim.version.dnd", "vim.version.v7_0_3_2") AddVersionParent("vim.version.dnd", "vim.version.v7_0_3_0") AddVersionParent("vim.version.dnd", "vim.version.version13") AddVersionParent("vim.version.dnd", "vim.version.version14") AddVersionParent("vim.version.dnd", "vim.version.version15") AddVersionParent("vim.version.dnd", "vim.version.version10") AddVersionParent("vim.version.dnd", "vim.version.version11") AddVersionParent("vim.version.dnd", "vim.version.version12") AddVersionParent("vim.version.dnd", "vim.version.v7_0_2_0") AddVersionParent("vim.version.dnd", "vim.version.v7_0_2_1") AddVersionParent("vim.version.dnd", "vim.version.dnd") AddVersionParent("vim.version.dnd", "vim.version.v7_0_1_0") AddVersionParent("vim.version.dnd", "vim.version.v7_0") AddVersionParent("vim.version.optional_virtual_disks", "vmodl.query.version.version4") AddVersionParent("vim.version.optional_virtual_disks", "vmodl.query.version.version3") AddVersionParent("vim.version.optional_virtual_disks", "vmodl.query.version.version2") AddVersionParent("vim.version.optional_virtual_disks", "vmodl.query.version.version1") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version8") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version9") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version6") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version7") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version1") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_1_1") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version4") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version5") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version2") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version3") AddVersionParent("vim.version.optional_virtual_disks", "vmodl.version.version0") AddVersionParent("vim.version.optional_virtual_disks", "vmodl.version.version1") AddVersionParent("vim.version.optional_virtual_disks", "vmodl.version.version2") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v6_9_1") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_0_2") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v6_8_7") AddVersionParent("vim.version.optional_virtual_disks", "vmodl.reflect.version.version1") AddVersionParent("vim.version.optional_virtual_disks", "vmodl.reflect.version.version2") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v8_0_0_0") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_3_1") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_3_2") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_3_0") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version13") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version14") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version15") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version10") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version11") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.version12") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_2_0") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_2_1") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.optional_virtual_disks") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0_1_0") AddVersionParent("vim.version.optional_virtual_disks", "vim.version.v7_0") AddVersionParent("vim.version.hwh2_0", "vmodl.query.version.version4") AddVersionParent("vim.version.hwh2_0", "vmodl.query.version.version3") AddVersionParent("vim.version.hwh2_0", "vmodl.query.version.version2") AddVersionParent("vim.version.hwh2_0", "vmodl.query.version.version1") AddVersionParent("vim.version.hwh2_0", "vim.version.version8") AddVersionParent("vim.version.hwh2_0", "vim.version.version9") AddVersionParent("vim.version.hwh2_0", "vim.version.version6") AddVersionParent("vim.version.hwh2_0", "vim.version.version7") AddVersionParent("vim.version.hwh2_0", "vim.version.version1") AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_1_1") AddVersionParent("vim.version.hwh2_0", "vim.version.version4") AddVersionParent("vim.version.hwh2_0", "vim.version.version5") AddVersionParent("vim.version.hwh2_0", "vim.version.version2") AddVersionParent("vim.version.hwh2_0", "vim.version.version3") AddVersionParent("vim.version.hwh2_0", "vmodl.version.version0") AddVersionParent("vim.version.hwh2_0", "vmodl.version.version1") AddVersionParent("vim.version.hwh2_0", "vmodl.version.version2") AddVersionParent("vim.version.hwh2_0", "vim.version.v6_9_1") AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_0_2") AddVersionParent("vim.version.hwh2_0", "vim.version.v6_8_7") AddVersionParent("vim.version.hwh2_0", "vmodl.reflect.version.version1") AddVersionParent("vim.version.hwh2_0", "vmodl.reflect.version.version2") AddVersionParent("vim.version.hwh2_0", "vim.version.v8_0_0_0") AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_3_1") AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_3_2") AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_3_0") AddVersionParent("vim.version.hwh2_0", "vim.version.version13") AddVersionParent("vim.version.hwh2_0", "vim.version.version14") AddVersionParent("vim.version.hwh2_0", "vim.version.version15") AddVersionParent("vim.version.hwh2_0", "vim.version.version10") AddVersionParent("vim.version.hwh2_0", "vim.version.version11") AddVersionParent("vim.version.hwh2_0", "vim.version.version12") AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_2_0") AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_2_1") AddVersionParent("vim.version.hwh2_0", "vim.version.hwh2_0") AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0_1_0") AddVersionParent("vim.version.hwh2_0", "vim.version.v7_0") AddVersionParent("vim.version.v7_0_1_0", "vmodl.query.version.version4") AddVersionParent("vim.version.v7_0_1_0", "vmodl.query.version.version3") AddVersionParent("vim.version.v7_0_1_0", "vmodl.query.version.version2") AddVersionParent("vim.version.v7_0_1_0", "vmodl.query.version.version1") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version8") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version9") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version6") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version7") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version1") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version4") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version5") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version2") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version3") AddVersionParent("vim.version.v7_0_1_0", "vmodl.version.version0") AddVersionParent("vim.version.v7_0_1_0", "vmodl.version.version1") AddVersionParent("vim.version.v7_0_1_0", "vmodl.version.version2") AddVersionParent("vim.version.v7_0_1_0", "vim.version.v6_9_1") AddVersionParent("vim.version.v7_0_1_0", "vim.version.v7_0_0_2") AddVersionParent("vim.version.v7_0_1_0", "vim.version.v6_8_7") AddVersionParent("vim.version.v7_0_1_0", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v7_0_1_0", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version13") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version14") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version15") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version10") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version11") AddVersionParent("vim.version.v7_0_1_0", "vim.version.version12") AddVersionParent("vim.version.v7_0_1_0", "vim.version.v7_0_1_0") AddVersionParent("vim.version.v7_0_1_0", "vim.version.v7_0") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.query.version.version4") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.query.version.version3") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.query.version.version2") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.query.version.version1") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version8") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version9") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version6") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version7") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version1") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_1_1") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version4") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version5") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version2") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version3") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.version.version0") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.version.version1") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.version.version2") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v6_9_1") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_0_2") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v6_8_7") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.reflect.version.version1") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vmodl.reflect.version.version2") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v8_0_0_0") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_3_1") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_3_2") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_3_0") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version13") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version14") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version15") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version10") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version11") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.version12") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_2_0") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_2_1") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0_1_0") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.E2ENativeNVMeSupport") AddVersionParent("vim.version.E2ENativeNVMeSupport", "vim.version.v7_0") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.query.version.version4") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.query.version.version3") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.query.version.version2") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.query.version.version1") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version8") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version9") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version6") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version7") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version1") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_1_1") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version4") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version5") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version2") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version3") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.version.version0") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.version.version1") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.version.version2") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v6_9_1") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_0_2") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v6_8_7") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.reflect.version.version1") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vmodl.reflect.version.version2") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v8_0_0_0") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_3_1") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_3_2") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_3_0") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version13") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version14") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version15") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version10") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version11") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.version12") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_2_0") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_2_1") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0_1_0") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.VM_OP_NOTIFICATION_TO_APP_V1") AddVersionParent("vim.version.VM_OP_NOTIFICATION_TO_APP_V1", "vim.version.v7_0") AddVersionParent("vim.version.EventsOrdering", "vmodl.query.version.version4") AddVersionParent("vim.version.EventsOrdering", "vmodl.query.version.version3") AddVersionParent("vim.version.EventsOrdering", "vmodl.query.version.version2") AddVersionParent("vim.version.EventsOrdering", "vmodl.query.version.version1") AddVersionParent("vim.version.EventsOrdering", "vim.version.version8") AddVersionParent("vim.version.EventsOrdering", "vim.version.version9") AddVersionParent("vim.version.EventsOrdering", "vim.version.version6") AddVersionParent("vim.version.EventsOrdering", "vim.version.version7") AddVersionParent("vim.version.EventsOrdering", "vim.version.version1") AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_1_1") AddVersionParent("vim.version.EventsOrdering", "vim.version.version4") AddVersionParent("vim.version.EventsOrdering", "vim.version.version5") AddVersionParent("vim.version.EventsOrdering", "vim.version.version2") AddVersionParent("vim.version.EventsOrdering", "vim.version.version3") AddVersionParent("vim.version.EventsOrdering", "vmodl.version.version0") AddVersionParent("vim.version.EventsOrdering", "vmodl.version.version1") AddVersionParent("vim.version.EventsOrdering", "vmodl.version.version2") AddVersionParent("vim.version.EventsOrdering", "vim.version.v6_9_1") AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_0_2") AddVersionParent("vim.version.EventsOrdering", "vim.version.v6_8_7") AddVersionParent("vim.version.EventsOrdering", "vmodl.reflect.version.version1") AddVersionParent("vim.version.EventsOrdering", "vmodl.reflect.version.version2") AddVersionParent("vim.version.EventsOrdering", "vim.version.v8_0_0_0") AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_3_1") AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_3_2") AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_3_0") AddVersionParent("vim.version.EventsOrdering", "vim.version.version13") AddVersionParent("vim.version.EventsOrdering", "vim.version.version14") AddVersionParent("vim.version.EventsOrdering", "vim.version.version15") AddVersionParent("vim.version.EventsOrdering", "vim.version.version10") AddVersionParent("vim.version.EventsOrdering", "vim.version.version11") AddVersionParent("vim.version.EventsOrdering", "vim.version.version12") AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_2_0") AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_2_1") AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0_1_0") AddVersionParent("vim.version.EventsOrdering", "vim.version.EventsOrdering") AddVersionParent("vim.version.EventsOrdering", "vim.version.v7_0") AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.query.version.version4") AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.query.version.version3") AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.query.version.version2") AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.query.version.version1") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version8") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version9") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version6") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version7") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version1") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_1_1") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version4") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version5") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version2") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version3") AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.version.version0") AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.version.version1") AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.version.version2") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v6_9_1") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_0_2") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v6_8_7") AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.reflect.version.version1") AddVersionParent("vim.version.FCD_PERFORMANCE", "vmodl.reflect.version.version2") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v8_0_0_0") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_3_1") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_3_2") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_3_0") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version13") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version14") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version15") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version10") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version11") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.version12") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_2_0") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_2_1") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0_1_0") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.FCD_PERFORMANCE") AddVersionParent("vim.version.FCD_PERFORMANCE", "vim.version.v7_0") AddVersionParent("vim.version.PMemV2", "vmodl.query.version.version4") AddVersionParent("vim.version.PMemV2", "vmodl.query.version.version3") AddVersionParent("vim.version.PMemV2", "vmodl.query.version.version2") AddVersionParent("vim.version.PMemV2", "vmodl.query.version.version1") AddVersionParent("vim.version.PMemV2", "vim.version.version8") AddVersionParent("vim.version.PMemV2", "vim.version.version9") AddVersionParent("vim.version.PMemV2", "vim.version.version6") AddVersionParent("vim.version.PMemV2", "vim.version.version7") AddVersionParent("vim.version.PMemV2", "vim.version.version1") AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_1_1") AddVersionParent("vim.version.PMemV2", "vim.version.version4") AddVersionParent("vim.version.PMemV2", "vim.version.version5") AddVersionParent("vim.version.PMemV2", "vim.version.version2") AddVersionParent("vim.version.PMemV2", "vim.version.version3") AddVersionParent("vim.version.PMemV2", "vmodl.version.version0") AddVersionParent("vim.version.PMemV2", "vmodl.version.version1") AddVersionParent("vim.version.PMemV2", "vmodl.version.version2") AddVersionParent("vim.version.PMemV2", "vim.version.v6_9_1") AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_0_2") AddVersionParent("vim.version.PMemV2", "vim.version.v6_8_7") AddVersionParent("vim.version.PMemV2", "vmodl.reflect.version.version1") AddVersionParent("vim.version.PMemV2", "vmodl.reflect.version.version2") AddVersionParent("vim.version.PMemV2", "vim.version.v8_0_0_0") AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_3_1") AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_3_2") AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_3_0") AddVersionParent("vim.version.PMemV2", "vim.version.version13") AddVersionParent("vim.version.PMemV2", "vim.version.version14") AddVersionParent("vim.version.PMemV2", "vim.version.version15") AddVersionParent("vim.version.PMemV2", "vim.version.version10") AddVersionParent("vim.version.PMemV2", "vim.version.version11") AddVersionParent("vim.version.PMemV2", "vim.version.version12") AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_2_0") AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_2_1") AddVersionParent("vim.version.PMemV2", "vim.version.v7_0_1_0") AddVersionParent("vim.version.PMemV2", "vim.version.PMemV2") AddVersionParent("vim.version.PMemV2", "vim.version.v7_0") AddVersionParent("vim.version.v7_0", "vmodl.query.version.version4") AddVersionParent("vim.version.v7_0", "vmodl.query.version.version3") AddVersionParent("vim.version.v7_0", "vmodl.query.version.version2") AddVersionParent("vim.version.v7_0", "vmodl.query.version.version1") AddVersionParent("vim.version.v7_0", "vim.version.version8") AddVersionParent("vim.version.v7_0", "vim.version.version9") AddVersionParent("vim.version.v7_0", "vim.version.version6") AddVersionParent("vim.version.v7_0", "vim.version.version7") AddVersionParent("vim.version.v7_0", "vim.version.version1") AddVersionParent("vim.version.v7_0", "vim.version.version4") AddVersionParent("vim.version.v7_0", "vim.version.version5") AddVersionParent("vim.version.v7_0", "vim.version.version2") AddVersionParent("vim.version.v7_0", "vim.version.version3") AddVersionParent("vim.version.v7_0", "vmodl.version.version0") AddVersionParent("vim.version.v7_0", "vmodl.version.version1") AddVersionParent("vim.version.v7_0", "vmodl.version.version2") AddVersionParent("vim.version.v7_0", "vim.version.v6_9_1") AddVersionParent("vim.version.v7_0", "vim.version.v6_8_7") AddVersionParent("vim.version.v7_0", "vmodl.reflect.version.version1") AddVersionParent("vim.version.v7_0", "vmodl.reflect.version.version2") AddVersionParent("vim.version.v7_0", "vim.version.version13") AddVersionParent("vim.version.v7_0", "vim.version.version14") AddVersionParent("vim.version.v7_0", "vim.version.version15") AddVersionParent("vim.version.v7_0", "vim.version.version10") AddVersionParent("vim.version.v7_0", "vim.version.version11") AddVersionParent("vim.version.v7_0", "vim.version.version12") AddVersionParent("vim.version.v7_0", "vim.version.v7_0") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.query.version.version4") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.query.version.version3") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.query.version.version2") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.query.version.version1") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version8") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version9") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version6") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version7") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version1") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_1_1") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version4") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version5") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version2") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version3") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.version.version0") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.version.version1") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.version.version2") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v6_9_1") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_0_2") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v6_8_7") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.reflect.version.version1") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vmodl.reflect.version.version2") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v8_0_0_0") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_3_1") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_3_2") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_3_0") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version13") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version14") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version15") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version10") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version11") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.version12") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_2_0") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_2_1") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0_1_0") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.v7_0") AddVersionParent("vim.version.SPBM_DISK_PROV_VIA_POLICY", "vim.version.SPBM_DISK_PROV_VIA_POLICY") AddVersionParent("vim.version.nsx_uens_u2", "vmodl.query.version.version4") AddVersionParent("vim.version.nsx_uens_u2", "vmodl.query.version.version3") AddVersionParent("vim.version.nsx_uens_u2", "vmodl.query.version.version2") AddVersionParent("vim.version.nsx_uens_u2", "vmodl.query.version.version1") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version8") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version9") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version6") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version7") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version1") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_1_1") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version4") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version5") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version2") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version3") AddVersionParent("vim.version.nsx_uens_u2", "vmodl.version.version0") AddVersionParent("vim.version.nsx_uens_u2", "vmodl.version.version1") AddVersionParent("vim.version.nsx_uens_u2", "vmodl.version.version2") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v6_9_1") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_0_2") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v6_8_7") AddVersionParent("vim.version.nsx_uens_u2", "vmodl.reflect.version.version1") AddVersionParent("vim.version.nsx_uens_u2", "vmodl.reflect.version.version2") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v8_0_0_0") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_3_1") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_3_2") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_3_0") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version13") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version14") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version15") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version10") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version11") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.version12") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_2_0") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_2_1") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0_1_0") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.v7_0") AddVersionParent("vim.version.nsx_uens_u2", "vim.version.nsx_uens_u2") CreateDataType("hostd.DVPortgroupInfo", "HostdDVPortgroupInfo", "vmodl.DynamicData", "vim.version.version8", [("portgroupKey", "string", "vim.version.version8", 0), ("dvsUuid", "string", "vim.version.version8", 0), ("name", "string", "vim.version.version8", 0), ("numPorts", "int", "vim.version.version8", F_OPTIONAL), ("type", "string", "vim.version.version8", 0), ("policy", "vim.dvs.DistributedVirtualPortgroup.PortgroupPolicy", "vim.version.version8", F_OPTIONAL), ("defaultPortConfig", "vim.dvs.DistributedVirtualPort.Setting", "vim.version.version8", F_OPTIONAL), ("uplinkPortgroup", "boolean", "vim.version.version8", 0), ("keyedOpaqueDataList", "vim.dvs.KeyedOpaqueData.KeyedOpaqueDataList", "vim.version.version8", F_OPTIONAL), ("opaqueDataList", "vim.dvs.OpaqueData.OpaqueDataList", "vim.version.version8", F_OPTIONAL), ("transportZoneUuid", "string", "vim.version.v7_0", F_OPTIONAL), ("logicalSwitchUuid", "string", "vim.version.v7_0", F_OPTIONAL), ("backingType", "string", "vim.version.v7_0", F_OPTIONAL)]) CreateManagedType("hostd.DebugManager", "HostdDebugManager", "vmodl.ManagedObject", "vim.version.version1", None, [("redirectStdoutToFile", "RedirectStdoutToFile", "vim.version.version1", (("filePath", "string", "vim.version.version1", 0, None),("redirectStderr", "boolean", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("redirectStderrToFile", "RedirectStderrToFile", "vim.version.version1", (("filePath", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("throwException", "ThrowException", "vim.version.version1", (("message", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("clobberAddress", "ClobberAddress", "vim.version.version1", (("addr", "long", "vim.version.version1", F_OPTIONAL, None),), (0, "void", "void"), None, None), ("useTooManyFds", "UseTooManyFds", "vim.version.version1", (("numFds", "int", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("coldQuit", "ColdQuit", "vim.version.version1", (), (0, "void", "void"), None, None), ("updateDispatcherOption", "UpdateDispatcherOption", "vim.version.version1", (("option", "vim.option.OptionValue", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("forceDiskPathSanityCheck", "ForceDiskPathSanityCheck", "vim.version.version1", (), (0, "void", "void"), None, None), ("injectDiskLayoutError", "InjectDiskLayoutError", "vim.version.version1", (("set", "boolean", "vim.version.version1", 0, None),("value", "boolean", "vim.version.version1", 0, None),), (0, "boolean", "boolean"), None, None), ("testRealpathError", "TestRealpathError", "vim.version.version1", (("enable", "boolean", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("simulateInaccessibleStorage", "SimulateInaccessibleStorage", "vim.version.version1", (("enable", "boolean", "vim.version.version1", F_OPTIONAL, None),), (0, "boolean", "boolean"), None, None), ("injectRemoveResourceEntryError", "InjectRemoveResourceEntryError", "vim.version.version1", (("set", "boolean", "vim.version.version1", 0, None),("value", "boolean", "vim.version.version1", 0, None),), (0, "boolean", "boolean"), None, None), ("setMaxRegisteredVmsLimit", "SetMaxRegisteredVmsLimit", "vim.version.version1", (("newMaxVMLimit", "int", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("getMaxRegisteredVmsLimit", "GetMaxRegisteredVmsLimit", "vim.version.version1", (), (0, "int", "int"), None, None), ("injectProductLockerError", "InjectProductLockerError", "vim.version.version1", (("errorId", "string", "vim.version.version1", 0, None),("set", "boolean", "vim.version.version1", 0, None),("value", "boolean", "vim.version.version1", 0, None),), (0, "boolean", "boolean"), None, None), ("getRegisteredManagedObjects", "GetRegisteredManagedObjects", "vim.version.version1", (), (0, "vmodl.ManagedObject[]", "vmodl.ManagedObject[]"), None, None), ("getPreferenceOption", "GetPreferenceOption", "vim.version.version1", (("prefKey", "string", "vim.version.version1", 0, None),), (F_OPTIONAL, "string", "string"), None, None), ("setPreferenceOption", "SetPreferenceOption", "vim.version.version1", (("prefKey", "string", "vim.version.version1", 0, None),("value", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("setTesterOption", "SetTesterOption", "vim.version.version1", (("path", "string", "vim.version.version1", 0, None),("value", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("setIntegerConfigOption", "SetIntegerConfigOption", "vim.version.version1", (("configPath", "string", "vim.version.version1", 0, None),("configValue", "int", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("setStringConfigOption", "SetStringConfigOption", "vim.version.version1", (("configPath", "string", "vim.version.version1", 0, None),("configValue", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("setBooleanConfigOption", "SetBooleanConfigOption", "vim.version.version1", (("configPath", "string", "vim.version.version1", 0, None),("configValue", "boolean", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("blockingSerializedTask", "BlockingSerializedTask_Task", "vim.version.version1", (), (0, "vim.Task", "void"), None, None), ("startModeOperation", "StartModeOperation", "vim.version.version1", (("opType", "string", "vim.version.version1", F_OPTIONAL, None),), (0, "string", "string"), None, None), ("endModeOperation", "EndModeOperation", "vim.version.version1", (("operationId", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("MessWithHaTaskManagerSettings", "MessWithHaTaskManagerSettings", "vim.version.version1", (("settings", "hostd.HaTaskManagerSettings", "vim.version.version1", F_OPTIONAL, None),), (0, "hostd.HaTaskManagerSettings", "hostd.HaTaskManagerSettings"), None, None), ("addSmartNic", "AddSmartNic", "vim.version.version1", (("mockupSmartNic", "hostd.MockupSmartNic", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("removeSmartNic", "RemoveSmartNic", "vim.version.version1", (("id", "string", "vim.version.version1", 0, None),), (0, "boolean", "boolean"), None, None), ("changeSmartNicAvailability", "ChangeSmartNicAvailability", "vim.version.version1", (("id", "string", "vim.version.version1", 0, None),("available", "boolean", "vim.version.version1", 0, None),), (0, "void", "void"), None, None)]) CreateDataType("hostd.HaTaskManagerSettings", "HostdHaTaskManagerSettings", "vmodl.DynamicData", "vim.version.version1", [("maxRetainedTasks", "int", "vim.version.version1", 0), ("taskCleanupInterval", "int", "vim.version.version1", 0), ("taskTimedoutPeriod", "int", "vim.version.version1", 0), ("taskRetentionPeriod", "int", "vim.version.version1", 0)]) CreateManagedType("hostd.HbrInternalSystem", "HostdHbrInternalSystem", "vmodl.ManagedObject", "vim.version.version7", None, [("abortVmInstance", "HbrAbortVmInstance", "vim.version.version7", (("vm", "vim.VirtualMachine", "vim.version.version7", 0, "VirtualMachine.Hbr.ReplicaManagement"),), (0, "void", "void"), "Host.Hbr.HbrManagement", ["vim.fault.ReplicationVmFault", "vim.fault.ReplicationVmConfigFault", ]), ("getGroupState", "HbrGetGroupState", "vim.version.version7", (("vm", "vim.VirtualMachine", "vim.version.version7", 0, "VirtualMachine.Hbr.ReplicaManagement"),), (0, "string", "string"), "Host.Hbr.HbrManagement", ["vim.fault.ReplicationVmFault", "vim.fault.ReplicationVmConfigFault", ])]) CreateManagedType("hostd.HostsvcManager", "HostdHostsvcManager", "vmodl.ManagedObject", "vim.version.version1", None, [("refreshDatastores", "RefreshDatastores", "vim.version.version1", (), (0, "void", "void"), None, None), ("refreshVsanDatastore", "RefreshVsanDatastore", "vim.version.version1", (("vsanProperty", "anyType", "vim.version.version1", F_OPTIONAL, None),), (0, "void", "void"), None, None), ("refreshPhysicalNic", "RefreshPhysicalNic", "vim.version.version1", (("id", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("refreshDefaultGateway", "RefreshDefaultGateway", "vim.version.version1", (), (0, "void", "void"), None, None), ("refreshServiceConsoleNic", "RefreshServiceConsoleNic", "vim.version.version1", (), (0, "void", "void"), None, None), ("refreshNetworkConfig", "RefreshNetworkConfig", "vim.version.version1", (), (0, "void", "void"), None, None), ("updateVsanProperty", "UpdateVsanProperty", "vim.version.version1", (("propertyPath", "string", "vim.version.version1", 0, None),("property", "anyType", "vim.version.version1", F_OPTIONAL, None),), (0, "void", "void"), None, None), ("listModeOperations", "ListModeOperations", "vim.version.version1", (), (F_OPTIONAL, "string[]", "string[]"), None, None)]) CreateManagedType("hostd.LogsvcManager", "HostdLogsvcManager", "vmodl.ManagedObject", "vim.version.version1", None, [("setLogLevel", "SetLogLevel", "vim.version.version1", (("name", "string", "vim.version.version1", 0, None),("level", "string", "vim.version.version1", 0, None),("prefix", "boolean", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("getLoggers", "GetLoggers", "vim.version.version1", (("prefix", "string", "vim.version.version1", 0, None),), (F_OPTIONAL, "hostd.LogsvcManager.LogInfo[]", "hostd.LogsvcManager.LogInfo[]"), None, None)]) CreateDataType("hostd.LogsvcManager.LogInfo", "HostdLogsvcManagerLogInfo", "vmodl.DynamicData", "vim.version.version1", [("name", "string", "vim.version.version1", 0), ("logLevel", "string", "vim.version.version1", 0)]) CreateDataType("hostd.MockupSmartNic", "HostdMockupSmartNic", "vmodl.DynamicData", "vim.version.version1", [("id", "string", "vim.version.version1", 0), ("model", "string", "vim.version.version1", 0), ("pciAddress", "string", "vim.version.version1", 0), ("transportAddress", "string", "vim.version.version1", 0)]) CreateManagedType("hostd.MockupSystem", "HostdMockupSystem", "vmodl.ManagedObject", "vim.version.version1", None, [("addPhysicalNic", "AddPhysicalNic", "vim.version.version1", (("pnic", "vim.host.PhysicalNic", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("removePhysicalNic", "RemovePhysicalNic", "vim.version.version1", (("id", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("setPhysicalNicActualLinkSpeed", "SetPhysicalNicActualLinkSpeed", "vim.version.version1", (("id", "string", "vim.version.version1", 0, None),("linkSpeed", "vim.host.PhysicalNic.LinkSpeedDuplex", "vim.version.version1", F_OPTIONAL, None),), (0, "void", "void"), None, None), ("clearNetworkSystem", "ClearNetworkSystem", "vim.version.version1", (), (0, "void", "void"), None, None), ("addScsiLun", "AddScsiLun", "vim.version.version1", (("lun", "vim.host.ScsiLun", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("removeScsiLun", "RemoveScsiLun", "vim.version.version1", (("id", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("addScsiDisk", "AddScsiDisk", "vim.version.version1", (("disk", "vim.host.ScsiDisk", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("removeScsiDisk", "RemoveScsiDisk", "vim.version.version1", (("id", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("addHostBusAdapter", "AddHostBusAdapter", "vim.version.version1", (("hba", "vim.host.HostBusAdapter", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("removeHostBusAdapter", "RemoveHostBusAdapter", "vim.version.version1", (("id", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("addTopology", "AddTopology", "vim.version.version1", (("adapter", "string", "vim.version.version1", 0, None),("target", "int", "vim.version.version1", 0, None),("lun", "int", "vim.version.version1", 0, None),("devname", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("removeTopology", "RemoveTopology", "vim.version.version1", (("adapter", "string", "vim.version.version1", 0, None),("target", "int", "vim.version.version1", 0, None),("lun", "int", "vim.version.version1", 0, None),("devname", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("clearStorageSystem", "ClearStorageSystem", "vim.version.version1", (), (0, "void", "void"), None, None), ("addPCIDevice", "AddPCIDevice", "vim.version.version1", (("pci", "vim.host.PciDevice", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("removePCIDevice", "RemovePCIDevice", "vim.version.version1", (("id", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("updateDiskPartitions", "updateDiskPartitionsMs", "vim.version.version1", (("devName", "string", "vim.version.version1", 0, None),("spec", "vim.host.DiskPartitionInfo.Specification", "vim.version.version1", 0, None),), (0, "void", "void"), None, None), ("activateMultipathPath", "ActivateMultipathPath", "vim.version.version1", (("pathId", "string", "vim.version.version1", 0, None),), (0, "void", "void"), None, None)]) CreateEnumType("hostd.OpType", "HostdOpType", "vim.version.version1", ["VM_OP", "NFC_OP"]) CreateManagedType("hostd.PerfCounterManager", "HostdPerfCounterManager", "vmodl.ManagedObject", "vim.version.version1", [("registry", "hostd.perfCounter.Registry", "vim.version.version1", 0, None)], [("queryScoreboard", "QueryScoreboard", "vim.version.version1", (), (0, "string", "string"), None, None)]) CreateManagedType("hostd.ServiceInstance", "HostdServiceInstance", "vmodl.ManagedObject", "vim.version.version1", [("debugManager", "hostd.DebugManager", "vim.version.version1", 0, None), ("hostsvcManager", "hostd.HostsvcManager", "vim.version.version1", 0, None), ("perfCounterManager", "hostd.PerfCounterManager", "vim.version.version1", 0, None), ("logsvcManager", "hostd.LogsvcManager", "vim.version.version1", 0, None), ("vprobesManager", "hostd.VprobesManager", "vim.version.version7", 0, None)], [("refresh", "HostdServiceInstance_Refresh", "vim.version.version1", (("name", "string", "vim.version.version1", F_OPTIONAL, None),), (0, "void", "void"), None, None), ("shutdown", "Shutdown", "vim.version.version1", (), (0, "void", "void"), None, None)]) CreateManagedType("hostd.VprobesManager", "HostdVprobesManager", "vmodl.ManagedObject", "vim.version.version1", None, [("resetVprobes", "resetVmDomainVprobes_Task", "vim.version.version1", (("vm", "vim.VirtualMachine", "vim.version.version1", 0, None),("instanceId", "string", "vim.version.version1", 0, None),), (0, "vim.Task", "void"), "Global.VCServer", ["vim.fault.TaskInProgress", "vim.fault.InvalidState", "vim.fault.NotFound", ]), ("loadVprobes", "loadVmDomainVprobes_Task", "vim.version.version1", (("vm", "vim.VirtualMachine", "vim.version.version1", 0, None),("script", "string", "vim.version.version1", 0, None),), (0, "vim.Task", "string"), "Global.VCServer", ["vim.fault.TaskInProgress", "vim.fault.InvalidState", "vim.fault.NotFound", ]), ("listVprobesStaticProbes", "listVmDomainVprobes_Task", "vim.version.version1", (("vm", "vim.VirtualMachine", "vim.version.version1", 0, None),), (0, "vim.Task", "string"), "Global.VCServer", ["vim.fault.TaskInProgress", "vim.fault.InvalidState", "vim.fault.NotFound", ]), ("listVprobesGlobals", "listVmDomainVprobesGlobals_Task", "vim.version.version1", (("vm", "vim.VirtualMachine", "vim.version.version1", 0, None),), (0, "vim.Task", "string"), "Global.VCServer", ["vim.fault.TaskInProgress", "vim.fault.InvalidState", "vim.fault.NotFound", ]), ("getVprobesVersion", "getVmDomainVprobesVersion_Task", "vim.version.version1", (("vm", "vim.VirtualMachine", "vim.version.version1", 0, None),), (0, "vim.Task", "string"), "Global.VCServer", ["vim.fault.TaskInProgress", "vim.fault.InvalidState", "vim.fault.NotFound", ])]) CreateDataType("hostd.host.FileSystemBackingInfo", "HostdHostFileSystemBackingInfo", "vmodl.DynamicData", "vim.version.version1", [("id", "string", "vim.version.version1", 0), ("name", "string", "vim.version.version1", 0), ("path", "string", "vim.version.version1", 0), ("capacity", "long", "vim.version.version1", 0), ("freeSpace", "long", "vim.version.version1", 0), ("type", "string", "vim.version.version1", 0), ("accessible", "boolean", "vim.version.version1", 0), ("mounted", "boolean", "vim.version.version1", 0)]) CreateEnumType("hostd.host.FileSystemBackingInfo.FileSystemType", "HostdHostFileSystemBackingInfoFileSystemType", "vim.version.version1", ["local", "nfs", "cifs", "nfsv41"]) CreateDataType("hostd.host.FileSystemVolume", "HostdHostFileSystemVolume", "vmodl.DynamicData", "vim.version.version1", [("accessMode", "string", "vim.version.version1", 0), ("type", "string", "vim.version.version1", 0), ("otherType", "string", "vim.version.version1", F_OPTIONAL), ("path", "string", "vim.version.version1", 0), ("pathAlias", "string[]", "vim.version.version1", 0), ("id", "string", "vim.version.version1", 0), ("name", "string", "vim.version.version1", 0), ("capacity", "long", "vim.version.version1", F_OPTIONAL), ("available", "long", "vim.version.version1", F_OPTIONAL), ("lastRefresh", "vmodl.DateTime", "vim.version.version1", 0), ("accessible", "boolean", "vim.version.version1", 0), ("inaccessibleReason", "string", "vim.version.version1", F_OPTIONAL), ("mounted", "boolean", "vim.version.version1", 0), ("vStorageSupport", "string", "vim.version.version1", F_OPTIONAL), ("nativeCloneCapable", "boolean", "vim.version.version1", F_OPTIONAL), ("containerId", "string", "vim.version.version13", F_OPTIONAL), ("aliasOf", "string", "vim.version.version13", F_OPTIONAL), ("clusteredVmdkSupported", "boolean", "vim.version.v7_0", F_OPTIONAL)]) CreateEnumType("hostd.host.FileSystemVolume.AccessMode", "HostdHostFileSystemVolumeAccessMode", "vim.version.version1", ["readWrite", "readOnly"]) CreateDataType("hostd.host.FileSystemVolumeInfo", "HostdHostFileSystemVolumeInfo", "vmodl.DynamicData", "vim.version.version1", [("volumeTypes", "string[]", "vim.version.version1", F_OPTIONAL), ("volume", "hostd.host.FileSystemVolume[]", "vim.version.version1", F_OPTIONAL)]) CreateDataType("hostd.host.LocalDatastoreEntry", "HostdHostLocalDatastoreEntry", "vmodl.DynamicData", "vim.version.version1", [("name", "string", "vim.version.version1", 0), ("path", "string", "vim.version.version1", 0), ("id", "string", "vim.version.version1", F_OPTIONAL)]) CreateDataType("hostd.host.LocalFileSystemVolume", "HostdHostLocalFileSystemVolume", "hostd.host.FileSystemVolume", "vim.version.version1", [("device", "string", "vim.version.version1", 0)]) CreateDataType("hostd.host.NasDatastoreEntry", "HostdHostNasDatastoreEntry", "vmodl.DynamicData", "vim.version.version1", [("name", "string", "vim.version.version1", 0), ("path", "string", "vim.version.version1", 0), ("type", "string", "vim.version.version1", 0), ("remoteHost", "string", "vim.version.version1", 0), ("remotePath", "string", "vim.version.version1", 0), ("id", "string", "vim.version.version1", F_OPTIONAL), ("userName", "string", "vim.version.version1", F_OPTIONAL), ("remoteHostNames", "string[]", "vim.version.disabled", F_OPTIONAL)]) CreateDataType("hostd.host.NasVolume", "HostdHostNasVolume", "hostd.host.FileSystemVolume", "vim.version.version1", [("remoteHost", "string", "vim.version.version1", 0), ("remotePath", "string", "vim.version.version1", 0), ("remoteHostNames", "string[]", "vim.version.disabled", F_OPTIONAL), ("securityType", "string", "vim.version.version10", F_OPTIONAL), ("protocolEndpoint", "boolean", "vim.version.version10", F_OPTIONAL), ("vmknicName", "string", "vim.version.NFS_VMKPORTBIND", F_OPTIONAL), ("vmknicActive", "boolean", "vim.version.NFS_VMKPORTBIND", F_OPTIONAL), ("mountFailedReason", "string", "vim.version.VMC_NFS_SUPPORT", F_OPTIONAL)]) CreateDataType("hostd.host.NasVolumeInfo", "HostdHostNasVolumeInfo", "hostd.host.FileSystemBackingInfo", "vim.version.version1", [("accessMode", "string", "vim.version.version1", 0), ("remoteHost", "string", "vim.version.version1", 0), ("remotePath", "string", "vim.version.version1", 0), ("userName", "string", "vim.version.version1", F_OPTIONAL), ("password", "string", "vim.version.version1", F_OPTIONAL), ("remoteHostNames", "string[]", "vim.version.disabled", F_OPTIONAL)]) CreateEnumType("hostd.host.NasVolumeInfo.AccessMode", "HostdHostNasVolumeInfoAccessMode", "vim.version.version1", ["readWrite", "readOnly"]) CreateDataType("hostd.host.PMemVolume", "HostdHostPMemVolume", "hostd.host.FileSystemVolume", "vim.version.version12", [("version", "string", "vim.version.version12", 0)]) CreateDataType("hostd.host.VfatVolume", "HostdHostVfatVolume", "hostd.host.FileSystemVolume", "vim.version.version1", None) CreateDataType("hostd.host.Vffs", "HostdHostVffs", "hostd.host.FileSystemVolume", "vim.version.version9", [("majorVersion", "int", "vim.version.version9", 0), ("version", "string", "vim.version.version9", 0), ("uuid", "string", "vim.version.version9", 0), ("extent", "vim.host.ScsiDisk.Partition[]", "vim.version.version9", 0)]) CreateDataType("hostd.host.Vmfs", "HostdHostVmfs", "hostd.host.FileSystemVolume", "vim.version.version1", [("blockSize", "int", "vim.version.version1", 0), ("maxBlocks", "int", "vim.version.version1", 0), ("majorVersion", "int", "vim.version.version1", 0), ("version", "string", "vim.version.version1", 0), ("uuid", "string", "vim.version.version1", 0), ("extent", "vim.host.ScsiDisk.Partition[]", "vim.version.version1", 0), ("forceMountedInfo", "vim.host.ForceMountedInfo", "vim.version.version5", F_OPTIONAL), ("ssd", "boolean", "vim.version.version1", F_OPTIONAL), ("local", "boolean", "vim.version.version9", F_OPTIONAL), ("unmapGranularity", "int", "vim.version.version12", F_OPTIONAL), ("unmapPriority", "string", "vim.version.version12", F_OPTIONAL), ("unmapBandwidthSpec", "vim.host.VmfsVolume.UnmapBandwidthSpec", "vim.version.version12", F_OPTIONAL)]) CreateDataType("hostd.host.VvolContainer", "HostdHostVvolContainer", "hostd.host.FileSystemVolume", "vim.version.disabled", [("uuid", "string", "vim.version.disabled", 0), ("hostPE", "vim.host.VvolVolume.HostProtocolEndpoint[]", "vim.version.disabled", F_OPTIONAL), ("vasaProviderInfo", "vim.VimVasaProviderInfo[]", "vim.version.disabled", F_OPTIONAL), ("storageArray", "vim.VasaStorageArray[]", "vim.version.disabled", F_OPTIONAL), ("consolePath", "string", "vim.version.disabled", 0), ("protocolEndpointType", "string", "vim.version.disabled", F_OPTIONAL)]) CreateDataType("hostd.mockup.StatsAugmentationPolicy", "HostdMockupStatsAugmentationPolicy", "vmodl.DynamicData", "vim.version.version1", [("rule", "hostd.mockup.StatsAugmentationRule[]", "vim.version.version1", F_OPTIONAL)]) CreateDataType("hostd.mockup.StatsAugmentationRule", "HostdMockupStatsAugmentationRule", "vmodl.DynamicData", "vim.version.version1", [("pattern", "string", "vim.version.version1", 0), ("invert", "boolean", "vim.version.version1", 0), ("spec", "hostd.mockup.StatsAugmentationSpec[]", "vim.version.version1", F_OPTIONAL)]) CreateDataType("hostd.mockup.StatsAugmentationSpec", "HostdMockupStatsAugmentationSpec", "vmodl.DynamicData", "vim.version.version1", [("counterId", "int[]", "vim.version.version1", F_OPTIONAL), ("instance", "string[]", "vim.version.version1", F_OPTIONAL), ("sequence", "long[]", "vim.version.version1", F_OPTIONAL)]) CreateEnumType("hostd.perfCounter.CounterType", "HostdPerfCounterCounterType", "vim.version.version1", ["counter64", "gauge64"]) CreateDataType("hostd.perfCounter.Metadata", "HostdPerfCounterMetadata", "vmodl.DynamicData", "vim.version.version1", [("name", "string", "vim.version.version1", 0), ("type", "hostd.perfCounter.CounterType", "vim.version.version1", 0), ("minValue", "long", "vim.version.version1", F_OPTIONAL), ("maxValue", "long", "vim.version.version1", F_OPTIONAL)]) CreateManagedType("hostd.perfCounter.Query", "HostdPerfCounterQuery", "vmodl.ManagedObject", "vim.version.version1", [("info", "hostd.perfCounter.Query.Info", "vim.version.version1", 0, None)], [("execute", "Execute", "vim.version.version1", (), (F_OPTIONAL, "hostd.perfCounter.QueryResult[]", "hostd.perfCounter.QueryResult[]"), None, None), ("destroy", "HostdPerfCounterQuery_Destroy", "vim.version.version1", (), (0, "void", "void"), None, None)]) CreateDataType("hostd.perfCounter.Query.Info", "HostdPerfCounterQueryInfo", "vmodl.DynamicData", "vim.version.version1", None) CreateDataType("hostd.perfCounter.Query.NamesInfo", "HostdPerfCounterQueryNamesInfo", "hostd.perfCounter.Query.Info", "vim.version.version1", [("names", "string[]", "vim.version.version1", F_OPTIONAL)]) CreateDataType("hostd.perfCounter.Query.RegexInfo", "HostdPerfCounterQueryRegexInfo", "hostd.perfCounter.Query.Info", "vim.version.version1", [("regex", "string", "vim.version.version1", 0), ("caseSensitive", "boolean", "vim.version.version1", 0)]) CreateDataType("hostd.perfCounter.QueryResult", "HostdPerfCounterQueryResult", "vmodl.DynamicData", "vim.version.version1", [("metadata", "hostd.perfCounter.Metadata", "vim.version.version1", 0), ("value", "long", "vim.version.version1", F_OPTIONAL)]) CreateManagedType("hostd.perfCounter.Registry", "HostdPerfCounterRegistry", "vmodl.ManagedObject", "vim.version.version1", [("query", "hostd.perfCounter.Query[]", "vim.version.version1", 0, None)], [("enumeratePath", "EnumeratePath", "vim.version.version1", (("path", "string", "vim.version.version1", 0, None),), (F_OPTIONAL, "string[]", "string[]"), None, None), ("createNamesQuery", "CreateNamesQuery", "vim.version.version1", (("names", "string[]", "vim.version.version1", F_OPTIONAL, None),), (0, "hostd.perfCounter.Query", "hostd.perfCounter.Query"), None, None), ("createRegexQuery", "CreateRegexQuery", "vim.version.version1", (("regex", "string", "vim.version.version1", 0, None),("caseSensitive", "boolean", "vim.version.version1", 0, None),), (0, "hostd.perfCounter.Query", "hostd.perfCounter.Query"), None, None)])
py
b411301eb8fc95f708d27620eea830de492a5be0
"""Defines the PseudoVoigt1D model using an astropy.modeling.FittableModel.""" import math import numpy as np from astropy.modeling import Fittable1DModel, Parameter __author__ = "Jeffrey Simpson" __copyright__ = "Copyright 2020, Jeffrey Simpson" __credits__ = ["Jeffrey Simpson"] __license__ = "MIT" __version__ = "0.0.1" __maintainer__ = "Jeffrey Simpson" __email__ = "[email protected]" __status__ = "Development" def tie_ca_lines_1(model): """Tie the second CaT line wavelength to the first.""" mean = 8542.09 - 8498.03 + model.x_0_0 return mean def tie_ca_lines_2(model): """Tie the third CaT line wavelength to the first.""" mean = 8662.14 - 8498.03 + model.x_0_0 return mean FLOAT_EPSILON = float(np.finfo(np.float32).tiny) class PseudoVoigt1D(Fittable1DModel): """ One dimensional Pseudo-Voigt model. Parameters ---------- amplitude : float Amplitude of the Pseudo-Voigt. x_0 : float Mean of the Pseudo-Voigt. gamma_L : float Standard deviation of the Lorentzian. gamma_G : float Standard deviation of the Gaussian. Notes ----- Using function has defined by Thompson et al (1987) DOI: 10.1107/S0021889887087090 """ x_0 = Parameter(default=0) # Ensure gamma_X makes sense if their bounds are not explicitly set. # gamma_X must be non-zero and positive. gamma_L = Parameter(default=1, bounds=(FLOAT_EPSILON, None)) gamma_G = Parameter(default=1, bounds=(FLOAT_EPSILON, None)) amplitude = Parameter(default=1, bounds=(FLOAT_EPSILON, None)) @staticmethod def evaluate(x, x_0, gamma_L, gamma_G, amplitude): """Calculate the pseudo-Voigt function.""" Γ_G = 2*math.sqrt(math.log1p(2))*gamma_G Γ_L = 2*gamma_L Γ_int = (Γ_G**5 + 2.69269 * Γ_G**4 * Γ_L**1 + 2.42843 * Γ_G**3 * Γ_L**2 + 4.47163 * Γ_G**2 * Γ_L**3 + 0.07842 * Γ_G**1 * Γ_L**4 + Γ_L**5) Γ = np.power(Γ_int, 1/5) η = (1.36603 * (Γ_L/Γ) - 0.47719 * (Γ_L/Γ)**2 + 0.11116 * (Γ_L/Γ)**3) G_x = ((1/(math.sqrt(np.pi)*gamma_G)) * np.exp((-1*np.power(x-x_0, 2)) / (gamma_G**2))) L_x = gamma_L / (np.pi * (np.power(x-x_0, 2) + gamma_L**2)) return amplitude*(η*L_x + (1-η)*G_x) class ThreePseudoVoigts(PseudoVoigt1D + PseudoVoigt1D + PseudoVoigt1D): """Evaluates the sum of three PseudoVoigt1D."""
py
b41131d883d94b71a55386851dab619883c10641
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ This sample "listen to directory". move the new file and print it, using docker-containers. The following operators are being used: DockerOperator, BashOperator & ShortCircuitOperator. TODO: Review the workflow, change it accordingly to your environment & enable the code. """ # # from airflow import DAG # import airflow # from datetime import timedelta # from airflow.operators import BashOperator # from airflow.operators import ShortCircuitOperator # from airflow.operators.docker_operator import DockerOperator # # default_args = { # 'owner': 'Airflow', # 'depends_on_past': False, # 'start_date': airflow.utils.dates.days_ago(2), # 'email': ['[email protected]'], # 'email_on_failure': False, # 'email_on_retry': False, # 'retries': 1, # 'retry_delay': timedelta(minutes=5), # } # # dag = DAG( # 'docker_sample_copy_data', default_args= # default_args, schedule_interval=timedelta(minutes=10)) # # locate_file_cmd = """ # sleep 10 # find {{params.source_location}} -type f -printf "%f\n" | head -1 # """ # # t_view = BashOperator( # task_id='view_file', # bash_command=locate_file_cmd, # do_xcom_push=True, # params={'source_location': '/your/input_dir/path'}, # dag=dag) # # # def is_data_available(*args, **kwargs): # ti = kwargs['ti'] # data = ti.xcom_pull(key=None, task_ids='view_file') # return not data == '' # # # t_is_data_available = ShortCircuitOperator( # task_id='check_if_data_available', # python_callable=is_data_available, # dag=dag) # # t_move = DockerOperator( # api_version='1.19', # docker_url='tcp://localhost:2375', # replace it with swarm/docker endpoint # image='centos:latest', # network_mode='bridge', # volumes=['/your/host/input_dir/path:/your/input_dir/path', # '/your/host/output_dir/path:/your/output_dir/path'], # command=[ # "/bin/bash", # "-c", # "/bin/sleep 30; " # "/bin/mv {{params.source_location}}/{{ ti.xcom_pull('view_file') }} {{params.target_location}};" # "/bin/echo '{{params.target_location}}/{{ ti.xcom_pull('view_file') }}';" # ], # task_id='move_data', # do_xcom_push=True, # params={'source_location': '/your/input_dir/path', # 'target_location': '/your/output_dir/path'}, # dag=dag) # # print_templated_cmd = """ # cat {{ ti.xcom_pull('move_data') }} # """ # # t_print = DockerOperator( # api_version='1.19', # docker_url='tcp://localhost:2375', # image='centos:latest', # volumes=['/your/host/output_dir/path:/your/output_dir/path'], # command=print_templated_cmd, # task_id='print', # dag=dag) # # t_view.set_downstream(t_is_data_available) # t_is_data_available.set_downstream(t_move) # t_move.set_downstream(t_print)
py
b41132275940a5a4715b633272df9c8feef2db7c
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ # Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import functools import logging import random from binascii import crc32 from botocore.exceptions import ( ChecksumError, ConnectionClosedError, ConnectionError, EndpointConnectionError, ReadTimeoutError, ) logger = logging.getLogger(__name__) # The only supported error for now is GENERAL_CONNECTION_ERROR # which maps to requests generic ConnectionError. If we're able # to get more specific exceptions from requests we can update # this mapping with more specific exceptions. EXCEPTION_MAP = { 'GENERAL_CONNECTION_ERROR': [ ConnectionError, ConnectionClosedError, ReadTimeoutError, EndpointConnectionError ], } def delay_exponential(base, growth_factor, attempts): """Calculate time to sleep based on exponential function. The format is:: base * growth_factor ^ (attempts - 1) If ``base`` is set to 'rand' then a random number between 0 and 1 will be used as the base. Base must be greater than 0, otherwise a ValueError will be raised. """ if base == 'rand': base = random.random() elif base <= 0: raise ValueError("The 'base' param must be greater than 0, " "got: %s" % base) time_to_sleep = base * (growth_factor ** (attempts - 1)) return time_to_sleep def create_exponential_delay_function(base, growth_factor): """Create an exponential delay function based on the attempts. This is used so that you only have to pass it the attempts parameter to calculate the delay. """ return functools.partial( delay_exponential, base=base, growth_factor=growth_factor) def create_retry_handler(config, operation_name=None): checker = create_checker_from_retry_config( config, operation_name=operation_name) action = create_retry_action_from_config( config, operation_name=operation_name) return RetryHandler(checker=checker, action=action) def create_retry_action_from_config(config, operation_name=None): # The spec has the possibility of supporting per policy # actions, but right now, we assume this comes from the # default section, which means that delay functions apply # for every policy in the retry config (per service). delay_config = config['__default__']['delay'] if delay_config['type'] == 'exponential': return create_exponential_delay_function( base=delay_config['base'], growth_factor=delay_config['growth_factor']) def create_checker_from_retry_config(config, operation_name=None): checkers = [] max_attempts = None retryable_exceptions = [] if '__default__' in config: policies = config['__default__'].get('policies', []) max_attempts = config['__default__']['max_attempts'] for key in policies: current_config = policies[key] checkers.append(_create_single_checker(current_config)) retry_exception = _extract_retryable_exception(current_config) if retry_exception is not None: retryable_exceptions.extend(retry_exception) if operation_name is not None and config.get(operation_name) is not None: operation_policies = config[operation_name]['policies'] for key in operation_policies: checkers.append(_create_single_checker(operation_policies[key])) retry_exception = _extract_retryable_exception( operation_policies[key]) if retry_exception is not None: retryable_exceptions.extend(retry_exception) if len(checkers) == 1: # Don't need to use a MultiChecker return MaxAttemptsDecorator(checkers[0], max_attempts=max_attempts) else: multi_checker = MultiChecker(checkers) return MaxAttemptsDecorator( multi_checker, max_attempts=max_attempts, retryable_exceptions=tuple(retryable_exceptions)) def _create_single_checker(config): if 'response' in config['applies_when']: return _create_single_response_checker( config['applies_when']['response']) elif 'socket_errors' in config['applies_when']: return ExceptionRaiser() def _create_single_response_checker(response): if 'service_error_code' in response: checker = ServiceErrorCodeChecker( status_code=response['http_status_code'], error_code=response['service_error_code']) elif 'http_status_code' in response: checker = HTTPStatusCodeChecker( status_code=response['http_status_code']) elif 'crc32body' in response: checker = CRC32Checker(header=response['crc32body']) else: # TODO: send a signal. raise ValueError("Unknown retry policy") return checker def _extract_retryable_exception(config): applies_when = config['applies_when'] if 'crc32body' in applies_when.get('response', {}): return [ChecksumError] elif 'socket_errors' in applies_when: exceptions = [] for name in applies_when['socket_errors']: exceptions.extend(EXCEPTION_MAP[name]) return exceptions class RetryHandler(object): """Retry handler. The retry handler takes two params, ``checker`` object and an ``action`` object. The ``checker`` object must be a callable object and based on a response and an attempt number, determines whether or not sufficient criteria for a retry has been met. If this is the case then the ``action`` object (which also is a callable) determines what needs to happen in the event of a retry. """ def __init__(self, checker, action): self._checker = checker self._action = action def __call__(self, attempts, response, caught_exception, **kwargs): """Handler for a retry. Intended to be hooked up to an event handler (hence the **kwargs), this will process retries appropriately. """ checker_kwargs = { 'attempt_number': attempts, 'response': response, 'caught_exception': caught_exception } if isinstance(self._checker, MaxAttemptsDecorator): retries_context = kwargs['request_dict']['context'].get('retries') checker_kwargs.update({'retries_context': retries_context}) if self._checker(**checker_kwargs): result = self._action(attempts=attempts) logger.debug("Retry needed, action of: %s", result) return result logger.debug("No retry needed.") class BaseChecker(object): """Base class for retry checkers. Each class is responsible for checking a single criteria that determines whether or not a retry should not happen. """ def __call__(self, attempt_number, response, caught_exception): """Determine if retry criteria matches. Note that either ``response`` is not None and ``caught_exception`` is None or ``response`` is None and ``caught_exception`` is not None. :type attempt_number: int :param attempt_number: The total number of times we've attempted to send the request. :param response: The HTTP response (if one was received). :type caught_exception: Exception :param caught_exception: Any exception that was caught while trying to send the HTTP response. :return: True, if the retry criteria matches (and therefore a retry should occur. False if the criteria does not match. """ # The default implementation allows subclasses to not have to check # whether or not response is None or not. if response is not None: return self._check_response(attempt_number, response) elif caught_exception is not None: return self._check_caught_exception( attempt_number, caught_exception) else: raise ValueError("Both response and caught_exception are None.") def _check_response(self, attempt_number, response): pass def _check_caught_exception(self, attempt_number, caught_exception): pass class MaxAttemptsDecorator(BaseChecker): """Allow retries up to a maximum number of attempts. This will pass through calls to the decorated retry checker, provided that the number of attempts does not exceed max_attempts. It will also catch any retryable_exceptions passed in. Once max_attempts has been exceeded, then False will be returned or the retryable_exceptions that was previously being caught will be raised. """ def __init__(self, checker, max_attempts, retryable_exceptions=None): self._checker = checker self._max_attempts = max_attempts self._retryable_exceptions = retryable_exceptions def __call__(self, attempt_number, response, caught_exception, retries_context): if retries_context: retries_context['max'] = max( retries_context.get('max', 0), self._max_attempts ) should_retry = self._should_retry(attempt_number, response, caught_exception) if should_retry: if attempt_number >= self._max_attempts: # explicitly set MaxAttemptsReached if response is not None and 'ResponseMetadata' in response[1]: response[1]['ResponseMetadata']['MaxAttemptsReached'] = True logger.debug("Reached the maximum number of retry " "attempts: %s", attempt_number) return False else: return should_retry else: return False def _should_retry(self, attempt_number, response, caught_exception): if self._retryable_exceptions and \ attempt_number < self._max_attempts: try: return self._checker(attempt_number, response, caught_exception) except self._retryable_exceptions as e: logger.debug("retry needed, retryable exception caught: %s", e, exc_info=True) return True else: # If we've exceeded the max attempts we just let the exception # propogate if one has occurred. return self._checker(attempt_number, response, caught_exception) class HTTPStatusCodeChecker(BaseChecker): def __init__(self, status_code): self._status_code = status_code def _check_response(self, attempt_number, response): if response[0].status_code == self._status_code: logger.debug( "retry needed: retryable HTTP status code received: %s", self._status_code) return True else: return False class ServiceErrorCodeChecker(BaseChecker): def __init__(self, status_code, error_code): self._status_code = status_code self._error_code = error_code def _check_response(self, attempt_number, response): if response[0].status_code == self._status_code: actual_error_code = response[1].get('Error', {}).get('Code') if actual_error_code == self._error_code: logger.debug( "retry needed: matching HTTP status and error code seen: " "%s, %s", self._status_code, self._error_code) return True return False class MultiChecker(BaseChecker): def __init__(self, checkers): self._checkers = checkers def __call__(self, attempt_number, response, caught_exception): for checker in self._checkers: checker_response = checker(attempt_number, response, caught_exception) if checker_response: return checker_response return False class CRC32Checker(BaseChecker): def __init__(self, header): # The header where the expected crc32 is located. self._header_name = header def _check_response(self, attempt_number, response): http_response = response[0] expected_crc = http_response.headers.get(self._header_name) if expected_crc is None: logger.debug("crc32 check skipped, the %s header is not " "in the http response.", self._header_name) else: actual_crc32 = crc32(response[0].content) & 0xffffffff if not actual_crc32 == int(expected_crc): logger.debug( "retry needed: crc32 check failed, expected != actual: " "%s != %s", int(expected_crc), actual_crc32) raise ChecksumError(checksum_type='crc32', expected_checksum=int(expected_crc), actual_checksum=actual_crc32) class ExceptionRaiser(BaseChecker): """Raise any caught exceptions. This class will raise any non None ``caught_exception``. """ def _check_caught_exception(self, attempt_number, caught_exception): # This is implementation specific, but this class is useful by # coordinating with the MaxAttemptsDecorator. # The MaxAttemptsDecorator has a list of exceptions it should catch # and retry, but something needs to come along and actually raise the # caught_exception. That's what this class is being used for. If # the MaxAttemptsDecorator is not interested in retrying the exception # then this exception just propogates out past the retry code. raise caught_exception
py
b411322f29284800512f85a18b1f2866fd6526ab
from setuptools import setup, find_packages with open("DESCRIPTION.txt") as file: long_description = file.read() REQUIREMENTS = [ "click", "pandas", "uszipcode", "tqdm", "phonenumbers", "country_converter", ] # some more details CLASSIFIERS = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", ] setup( name="customer-match-translator", version="1.0.2", description="A simple command line program to translate a CSV containing customer contact information to Google's Customer Match format.", long_description=long_description, url="https://github.com/Esquire-Digital/customer-match-tool", author="Ryan Hartman", author_email="[email protected]", license="MIT", packages=find_packages(), entry_points={ 'console_scripts': ['customer-match-translator=customer_match.cli:main'] }, classifiers=CLASSIFIERS, install_requires=REQUIREMENTS, keywords="google customer match csv", zip_safe = False )
py
b411331cdc7ca469815b3032c056b8fc63c1f119
"""Application configuration module.""" # pylint: disable=no-self-argument,no-self-use # pydantic validators from typing import Optional, List, Union from pathlib import Path from datetime import timedelta from pydantic import ( BaseSettings, BaseConfig, AnyHttpUrl, AnyUrl, validator ) class Settings(BaseSettings): """Application settings.""" SECRET_KEY: str CORS_ORIGINS: List[AnyHttpUrl] ACCESS_TOKEN_EXPIRE_DAYS: timedelta @validator('ACCESS_TOKEN_EXPIRE_DAYS', pre=True) def expire_days_to_timedelta(cls, value: Optional[str]) -> timedelta: """Converts value of `ACCESS_TOKEN_EXPIRE_DAYS` to `timedelta`.""" if isinstance(value, str): return timedelta(int(value)) return value DB_HOST: str DB_PORT: int DB_NAME: str DB_USERNAME: str DB_PASSWORD: str DB_URI: Optional[AnyUrl] = None @validator('DB_URI', pre=True) def assemble_db_uri(cls, value: Optional[str], values: dict) -> str: """Assebles database URI.""" if isinstance(value, str): return value return 'mongodb://{username}:{password}@{host}:{port}/{name}?authSource=admin'.format( username=values.get('DB_USERNAME'), password=values.get('DB_PASSWORD'), host=values.get('DB_HOST'), port=values.get('DB_PORT'), name=values.get('DB_NAME'), ) USER_COLLECTION_NAME: Optional[str] = 'users' TABLE_COLLECTION_NAME: Optional[str] = 'tables' MESSAGE_COLLECTION_NAME: Optional[str] = 'messages' LOGGING_FILENAME: Optional[str] = 'log.json' LOGGING_PATH: Path @validator('LOGGING_PATH', pre=True) def assemble_logging_path(cls, value: Path, values: dict) -> Path: """"Adds filename to the path.""" return Path(value) / values.get('LOGGING_FILENAME') LOGGING_FORMAT: Optional[str] = ( '<level>{level: >8}</level> | ' '<dim>{time:YYYY-MM-DD HH:mm:ss}</dim> | ' '<normal>{message}</normal>' ) LOGGING_LEVEL: Optional[Union[int, str]] = 20 LOGGING_ROTATION: Optional[str] = '1 day' LOGGING_RETENTION: Optional[str] = '1 month' LOGGING_COMPRESSION: Optional[str] = 'tar.gz' class Config(BaseConfig): """Settings config class.""" case_sensitive = True settings = Settings()
py
b411338d210eed0870ad6651637586ace86e3452
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Colab-specific shell customizations.""" import re import textwrap from IPython.utils import coloransi from google.colab import _ipython as ipython from google.colab._import_hooks._cv2 import DisabledFunctionError _GREEN = coloransi.TermColors.Green _RED = coloransi.TermColors.Red _NORMAL = coloransi.TermColors.Normal _SEP = _RED + '-' * 75 # Set of modules that have snippets explaining how they can be installed. Any # ImportErrors for modules in this set will show a custom error message pointing # to the snippet. SNIPPET_MODULES = set([ 'cartopy', 'libarchive', 'pydot', ]) def initialize(): ip = ipython.get_ipython() if ip: _CustomErrorHandlers(ip) class ColabTraceback(object): def __init__(self, stb, error_details): self.stb = stb self.error_details = error_details class FormattedTracebackError(Exception): def __init__(self, message, stb, details): super(FormattedTracebackError, self).__init__(message) self._colab_traceback = ColabTraceback(stb, details) def _render_traceback_(self): return self._colab_traceback class _CustomErrorHandlers(object): """Custom error handler for the IPython shell. Allows us to add custom messaging for certain error types (i.e. ImportError). """ def __init__(self, shell): # The values for this map are functions which return # (custom_message, additional error details). self.custom_error_handlers = { ImportError: _CustomErrorHandlers.import_message, DisabledFunctionError: _CustomErrorHandlers.disabled_message, } shell.set_custom_exc( tuple(self.custom_error_handlers.keys()), self.handle_error) def _get_error_handler(self, etype): for handled_type in self.custom_error_handlers: if issubclass(etype, handled_type): return self.custom_error_handlers[handled_type] return None def handle_error(self, shell, etype, exception, tb, tb_offset=None): """Invoked when the shell catches an error in custom_message_getters.""" handler = self._get_error_handler(etype) if not handler: return shell.showtraceback() result = handler(exception) if result: custom_message, details = result structured_traceback = shell.InteractiveTB.structured_traceback( etype, exception, tb, tb_offset=tb_offset) # Ensure a blank line appears between the standard traceback and custom # error messaging. if custom_message: structured_traceback += ['', custom_message] wrapped = FormattedTracebackError( str(exception), structured_traceback, details) return shell.showtraceback(exc_tuple=(etype, wrapped, tb)) @staticmethod def disabled_message(error): """Return a helpful message for disabled functions.""" funcname = getattr(error, 'funcname', None) if funcname: msg = '' details = { 'actions': [{ 'action': 'open_snippet', 'action_text': 'Search Snippets for {}'.format(funcname), 'snippet_filter': funcname, },], } return msg, details @staticmethod def import_message(error): """Return a helpful message for failed imports.""" # Python 3 ModuleNotFoundErrors have a "name" attribute. Preferring this # over regex matching if the attribute is available. module_name = getattr(error, 'name', None) if not module_name: match = re.search(r'No module named \'?(?P<name>[a-zA-Z0-9_\.]+)\'?', str(error)) module_name = match.groupdict()['name'].split('.')[0] if match else None if module_name in SNIPPET_MODULES: msg = textwrap.dedent("""\ {sep}{green} NOTE: If your import is failing due to a missing package, you can manually install dependencies using either !pip or !apt. To install {snippet}, click the button below. {sep}{normal}\n""".format( sep=_SEP, green=_GREEN, normal=_NORMAL, snippet=module_name)) details = { 'actions': [ { 'action': 'open_snippet', 'action_text': 'Install {}'.format(module_name), # Snippets for installing a custom library always end with # an import of the library itself. 'snippet_filter': 'import {}'.format(module_name), }, ], } return msg, details msg = textwrap.dedent("""\ {sep}{green} NOTE: If your import is failing due to a missing package, you can manually install dependencies using either !pip or !apt. To view examples of installing some common dependencies, click the "Open Examples" button below. {sep}{normal}\n""".format(sep=_SEP, green=_GREEN, normal=_NORMAL)) details = { 'actions': [{ 'action': 'open_url', 'action_text': 'Open Examples', 'url': '/notebooks/snippets/importing_libraries.ipynb', },], } return msg, details def compute_completion_metadata(shell, matches, inspect_magics=True): """Computes completion item metadata. Args: shell: IPython shell matches: List of string completion matches. inspect_magics: (optional, default: True) If unset, don't call object_inspect on any symbols starting with %. Returns: Metadata for each of the matches. """ # We want to temporarily change the default level of detail returned by the # inspector, to avoid slow completions (cf b/112153563). old_str_detail_level = shell.inspector.str_detail_level shell.inspector.str_detail_level = 1 try: infos = [] for match in matches: info = {} if '#' in match: # Runtime type information added by customization._add_type_information. info['type_name'] = match.split('#')[1] elif match.startswith('%') and not inspect_magics: # Inspecting magics may involve loading slow-to-import modules, so we # avoid it until the user requests additional information. info['type_name'] = 'Magic function' else: inspect_results = shell.object_inspect(match) # Use object_inspect to find the type and filter to only what is needed # since there can be a lot of completions to send. info['type_name'] = inspect_results['type_name'] if inspect_results.get('definition', ''): info['definition'] = inspect_results['definition'] elif inspect_results.get('init_definition', ''): info['definition'] = inspect_results['init_definition'] infos.append(info) return infos finally: shell.inspector.str_detail_level = old_str_detail_level
py
b41133d9428ad2e02033ee80593f13cf986a3d87
# Copyright 2011-2014 James McCauley # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Some of POX's core API and functionality is here, largely in the POXCore class (an instance of which is available as pox.core.core). This includes things like component rendezvous, logging, system status (up and down events), etc. """ from __future__ import print_function # Set up initial log state import logging import inspect import time import os import signal _path = inspect.stack()[0][1] _ext_path = _path[0:_path.rindex(os.sep)] _ext_path = os.path.dirname(_ext_path) + os.sep _path = os.path.dirname(_path) + os.sep SQUELCH_TIME = 5 _squelch = '' _squelchTime = 0 _squelchCount = 0 def getLogger (name=None, moreFrames=0): """ In general, you don't need to call this directly, and will use core.getLogger() instead. """ if name is None: s = inspect.stack()[1+moreFrames] name = s[1] if name.endswith('.py'): name = name[0:-3] elif name.endswith('.pyc'): name = name[0:-4] if name.startswith(_path): name = name[len(_path):] elif name.startswith(_ext_path): name = name[len(_ext_path):] name = name.replace('/', '.').replace('\\', '.') #FIXME: use os.path or whatever # Remove double names ("topology.topology" -> "topology") if name.find('.') != -1: n = name.split('.') if len(n) >= 2: if n[-1] == n[-2]: del n[-1] name = '.'.join(n) if name.startswith("ext."): name = name.split("ext.",1)[1] if name.endswith(".__init__"): name = name.rsplit(".__init__",1)[0] l = logging.getLogger(name) g=globals() if not hasattr(l, "print"): def printmsg (*args, **kw): #squelch = kw.get('squelch', True) msg = ' '.join((str(s) for s in args)) s = inspect.stack()[1] o = '[' if 'self' in s[0].f_locals: o += s[0].f_locals['self'].__class__.__name__ + '.' o += s[3] + ':' + str(s[2]) + '] ' o += msg if o == _squelch: if time.time() >= _squelchTime: l.debug("[Previous message repeated %i more times]" % (g['_squelchCount']+1,)) g['_squelchCount'] = 0 g['_squelchTime'] = time.time() + SQUELCH_TIME else: g['_squelchCount'] += 1 else: g['_squelch'] = o if g['_squelchCount'] > 0: l.debug("[Previous message repeated %i more times]" % (g['_squelchCount'],)) g['_squelchCount'] = 0 g['_squelchTime'] = time.time() + SQUELCH_TIME l.debug(o) setattr(l, "print", printmsg) setattr(l, "msg", printmsg) return l # Working around something (don't remember what) log = (lambda : getLogger())() from pox.lib.revent import * # Now use revent's exception hook to put exceptions in event handlers into # the log... def _revent_exception_hook (source, event, args, kw, exc_info): try: c = source t = event if hasattr(c, "__class__"): c = c.__class__.__name__ if isinstance(t, Event): t = t.__class__.__name__ elif issubclass(t, Event): t = t.__name__ except: pass log.exception("Exception while handling %s!%s...\n" % (c,t)) import pox.lib.revent.revent pox.lib.revent.revent.handleEventException = _revent_exception_hook class GoingUpEvent (Event): """ Fired when system is going up. """ pass class GoingDownEvent (Event): """ Fired when system is going down. """ pass class UpEvent (Event): """ Fired when system is up. """ pass class DownEvent (Event): """ Fired when system is down. """ pass class ComponentRegistered (Event): """ This is raised by core whenever a new component is registered. By watching this, a component can monitor whether other components it depends on are available. """ def __init__ (self, name, component): self.name = name self.component = component class RereadConfiguration (Event): """ Fired when modules should reread their configuration files. """ pass import pox.lib.recoco as recoco class POXCore (EventMixin): """ A nexus of of the POX API. pox.core.core is a reference to an instance of this class. This class serves a number of functions. An important one is that it can serve as a rendezvous point for components. A component can register objects on core, and they can then be accessed on the core object (e.g., if you register foo, then there will then be a pox.core.core.foo). In many cases, this means you won't need to import a module. Another purpose to the central registration is that it decouples functionality from a specific module. If myL2Switch and yourL2Switch both register as "switch" and both provide the same API, then it doesn't matter. Doing this with imports is a pain. Additionally, a number of commmon API functions are vailable here. """ _eventMixin_events = set([ UpEvent, DownEvent, GoingUpEvent, GoingDownEvent, ComponentRegistered, RereadConfiguration, ]) version = (0,5,0) version_name = "eel" def __init__ (self, threaded_selecthub=True, epoll_selecthub=False, handle_signals=True): self.debug = False self.running = True self.starting_up = True self.components = {'core':self} self._openflow_wanted = False self._handle_signals = handle_signals import threading self.quit_condition = threading.Condition() print(self.banner) self.scheduler = recoco.Scheduler(daemon=True, threaded_selecthub=threaded_selecthub, use_epoll=epoll_selecthub) self._waiters = [] # List of waiting components @property def banner (self): return "{0} / Copyright 2011-2014 James McCauley, et al.".format( self.version_string) @property def version_string (self): return "POX %s (%s)" % ('.'.join(map(str,self.version)),self.version_name) def callDelayed (_self, _seconds, _func, *args, **kw): """ Calls the function at a later time. This is just a wrapper around a recoco timer. """ t = recoco.Timer(_seconds, _func, args=args, kw=kw, scheduler = _self.scheduler) return t def callLater (_self, _func, *args, **kw): # first arg is `_self` rather than `self` in case the user wants # to specify self as a keyword argument """ Call the given function with the given arguments within the context of the co-operative threading environment. It actually calls it sooner rather than later. ;) Much of POX is written without locks because it's all thread-safe with respect to itself, as it's written using the recoco co-operative threading library. If you have a real thread outside of the co-operative thread context, you need to be careful about calling things within it. This function provides a rather simple way that works for most situations: you give it a callable (like a method) and some arguments, and it will call that callable with those arguments from within the co-operative threader, taking care of synchronization for you. """ _self.scheduler.callLater(_func, *args, **kw) def raiseLater (_self, _obj, *args, **kw): # first arg is `_self` rather than `self` in case the user wants # to specify self as a keyword argument """ This is similar to callLater(), but provides an easy way to raise a revent event from outide the co-operative context. Rather than foo.raiseEvent(BarEvent, baz, spam), you just do core.raiseLater(foo, BarEvent, baz, spam). """ _self.scheduler.callLater(_obj.raiseEvent, *args, **kw) def getLogger (self, *args, **kw): """ Returns a logger. Pass it the name you want if you'd like to specify one (e.g., core.getLogger("foo")). If you don't specify a name, it will make one up based on the module name it is called from. """ return getLogger(moreFrames=1,*args, **kw) def quit (self): """ Shut down POX. """ import threading if (self.starting_up or threading.current_thread() is self.scheduler._thread): t = threading.Thread(target=self._quit) t.daemon = True t.start() else: self._quit() def _quit (self): # Should probably do locking here if not self.running: return if self.starting_up: # Try again later self.quit() return self.running = False log.info("Going down...") import gc gc.collect() self.raiseEvent(GoingDownEvent()) self.callLater(self.scheduler.quit) for i in range(50): if self.scheduler._hasQuit: break gc.collect() time.sleep(.1) if not self.scheduler._allDone: log.warning("Scheduler didn't quit in time") self.raiseEvent(DownEvent()) log.info("Down.") #logging.shutdown() self.quit_condition.acquire() self.quit_condition.notifyAll() core.quit_condition.release() def _get_python_version (self): try: import platform return "{impl} ({vers}/{build})".format( impl=platform.python_implementation(), vers=platform.python_version(), build=platform.python_build()[1].replace(" "," ")) except: return "Unknown Python" def _get_platform_info (self): try: import platform return platform.platform().split("\n")[0] except: return "Unknown Platform" def _add_signal_handlers (self): if not self._handle_signals: return import threading # Note, python 3.4 will have threading.main_thread() # http://bugs.python.org/issue18882 if not isinstance(threading.current_thread(), threading._MainThread): raise RuntimeError("add_signal_handers must be called from MainThread") try: previous = signal.getsignal(signal.SIGHUP) signal.signal(signal.SIGHUP, self._signal_handler_SIGHUP) if previous != signal.SIG_DFL: log.warn('Redefined signal handler for SIGHUP') except (AttributeError, ValueError): # SIGHUP is not supported on some systems (e.g., Windows) log.debug("Didn't install handler for SIGHUP") def _signal_handler_SIGHUP (self, signal, frame): self.raiseLater(core, RereadConfiguration) def goUp (self): log.debug(self.version_string + " going up...") log.debug("Running on " + self._get_python_version()) log.debug("Platform is " + self._get_platform_info()) try: import platform vers = '.'.join(platform.python_version().split(".")[:2]) except: vers = 'an unknown version' if vers != "2.7": l = logging.getLogger("version") if not l.isEnabledFor(logging.WARNING): l.setLevel(logging.WARNING) l.warn("POX requires Python 2.7. You're running %s.", vers) l.warn("If you run into problems, try using Python 2.7 or PyPy.") self.starting_up = False self.raiseEvent(GoingUpEvent()) self._add_signal_handlers() self.raiseEvent(UpEvent()) self._waiter_notify() if self.running: log.info(self.version_string + " is up.") def _waiter_notify (self): if len(self._waiters): waiting_for = set() for entry in self._waiters: _, name, components, _, _ = entry components = [c for c in components if not self.hasComponent(c)] waiting_for.update(components) log.debug("%s still waiting for: %s" % (name, " ".join(components))) names = set([n for _,n,_,_,_ in self._waiters]) #log.info("%i things still waiting on %i components" # % (names, waiting_for)) log.warn("Still waiting on %i component(s)" % (len(waiting_for),)) def hasComponent (self, name): """ Returns True if a component with the given name has been registered. """ if name in ('openflow', 'OpenFlowConnectionArbiter'): self._openflow_wanted = True return name in self.components def registerNew (self, __componentClass, *args, **kw): """ Give it a class (and optional __init__ arguments), and it will create an instance and register it using the class name. If the instance has a _core_name property, it will use that instead. It returns the new instance. core.registerNew(FooClass, arg) is roughly equivalent to core.register("FooClass", FooClass(arg)). """ name = __componentClass.__name__ obj = __componentClass(*args, **kw) if hasattr(obj, '_core_name'): # Default overridden name = obj._core_name self.register(name, obj) return obj def register (self, name, component=None): """ Makes the object "component" available as pox.core.core.name. If only one argument is specified, the given argument is registered using its class name as the name. """ #TODO: weak references? if component is None: component = name name = component.__class__.__name__ if hasattr(component, '_core_name'): # Default overridden name = component._core_name if name in self.components: log.warn("Warning: Registered '%s' multipled times" % (name,)) self.components[name] = component self.raiseEventNoErrors(ComponentRegistered, name, component) self._try_waiters() def call_when_ready (self, callback, components=[], name=None, args=(), kw={}): """ Calls a callback when components are ready. """ if callback is None: callback = lambda:None callback.__name__ = "<None>" if isinstance(components, basestring): components = [components] elif isinstance(components, set): components = list(components) else: try: _ = components[0] components = list(components) except: components = [components] if name is None: #TODO: Use inspect here instead name = getattr(callback, 'func_name') if name is None: name = str(callback) else: name += "()" if hasattr(callback, 'im_class'): name = getattr(callback.__self__.__class__,'__name__','')+'.'+name if hasattr(callback, '__module__'): # Is this a good idea? If not here, we should do it in the # exception printing in try_waiter(). name += " in " + callback.__module__ entry = (callback, name, components, args, kw) self._waiters.append(entry) self._try_waiter(entry) def _try_waiter (self, entry): """ Tries a waiting callback. Calls the callback, removes from _waiters, and returns True if all are satisfied. """ if entry not in self._waiters: # Already handled return callback, name, components, args_, kw_ = entry for c in components: if not self.hasComponent(c): return False self._waiters.remove(entry) try: if callback is not None: callback(*args_,**kw_) except: import traceback msg = "Exception while trying to notify " + name import inspect try: msg += " at " + inspect.getfile(callback) msg += ":" + str(inspect.getsourcelines(callback)[1]) except: pass log.exception(msg) return True def _try_waiters (self): """ Tries to satisfy all component-waiting callbacks """ changed = True while changed: changed = False for entry in list(self._waiters): if self._try_waiter(entry): changed = True def listen_to_dependencies (self, sink, components=None, attrs=True, short_attrs=False, listen_args={}): """ Look through *sink* for handlers named like _handle_component_event. Use that to build a list of components, and append any components explicitly specified by *components*. listen_args is a dict of "component_name"={"arg_name":"arg_value",...}, allowing you to specify additional arguments to addListeners(). When all the referenced components are registered, do the following: 1) Set up all the event listeners 2) Call "_all_dependencies_met" on *sink* if it exists 3) If attrs=True, set attributes on *sink* for each component (e.g, sink._openflow_ would be set to core.openflow) For example, if topology is a dependency, a handler for topology's SwitchJoin event must be defined as so: def _handle_topology_SwitchJoin (self, ...): *NOTE*: The semantics of this function changed somewhat in the Summer 2012 milestone, though its intention remains the same. """ if components is None: components = set() elif isinstance(components, basestring): components = set([components]) else: components = set(components) for c in dir(sink): if not c.startswith("_handle_"): continue if c.count("_") < 3: continue c = '_'.join(c.split("_")[2:-1]) components.add(c) if None in listen_args: # This means add it to all... args = listen_args.pop(None) for k,v in args.iteritems(): for c in components: if c not in listen_args: listen_args[c] = {} if k not in listen_args[c]: listen_args[c][k] = v if set(listen_args).difference(components): log.error("Specified listen_args for missing component(s): %s" % (" ".join(set(listen_args).difference(components)),)) def done (sink, components, attrs, short_attrs): if attrs or short_attrs: for c in components: if short_attrs: attrname = c else: attrname = '_%s_' % (c,) setattr(sink, attrname, getattr(self, c)) for c in components: if hasattr(getattr(self, c), "_eventMixin_events"): kwargs = {"prefix":c} kwargs.update(listen_args.get(c, {})) getattr(self, c).addListeners(sink, **kwargs) getattr(sink, "_all_dependencies_met", lambda : None)() self.call_when_ready(done, components, name=sink.__class__.__name__, args=(sink,components,attrs,short_attrs)) if not self.starting_up: self._waiter_notify() def __getattr__ (self, name): if name in ('openflow', 'OpenFlowConnectionArbiter'): self._openflow_wanted = True c = self.components.get(name) if c is not None: return c raise AttributeError("'%s' not registered" % (name,)) core = None def initialize (threaded_selecthub=True, epoll_selecthub=False, handle_signals=True): global core core = POXCore(threaded_selecthub=threaded_selecthub, epoll_selecthub=epoll_selecthub, handle_signals=handle_signals) return core # The below is a big hack to make tests and doc tools work. # We should do something better. def _maybe_initialize (): import sys if 'unittest' in sys.modules or 'nose' in sys.modules: initialize() return import __main__ mod = getattr(__main__, '__file__', '') if 'pydoc' in mod or 'pdoc' in mod: initialize() return _maybe_initialize()
py
b41133e38cee5a8ebd3623d7a1b0dac0fb8b9236
num=int(input('digite um numero: ')) print('o numero escolhido foi{}'.format(num))
py
b41134227a7821b1b09fee594355a4286b1a5db0
# -*- coding: utf-8 -*- # Copyright (c) Vispy Development Team. All Rights Reserved. # Distributed under the (new) BSD License. See LICENSE.txt for more info. """ Plot clusters of data points and a graph of connections """ from vispy import app, scene, color import numpy as np # Initialize arrays for position, color, edges, and types for each point in # the graph. npts = 400 nedges = 900 ngroups = 7 np.random.seed(127396) pos = np.empty((npts, 2), dtype='float32') colors = np.empty((npts, 3), dtype='float32') edges = np.empty((nedges, 2), dtype='uint32') types = np.empty(npts, dtype=int) # Assign random starting positions pos[:] = np.random.normal(size=pos.shape, scale=4.) # Assign each point to a group grpsize = npts // ngroups ptr = 0 typ = 0 while ptr < npts: size = np.random.random() * grpsize + grpsize // 2 types[int(ptr):int(ptr+size)] = typ typ += 1 ptr = ptr + size # Randomly select connections, with higher connection probability between # points in the same group conn = [] connset = set() while len(conn) < nedges: i, j = np.random.randint(npts, size=2) if i == j: continue p = 0.7 if types[i] == types[j] else 0.01 if np.random.random() < p: if (i, j) in connset: continue connset.add((i, j)) connset.add((j, i)) conn.append([i, j]) edges[:] = conn # Assign colors to each point based on its type cmap = color.get_colormap('cubehelix') typ_colors = np.array([cmap.map(x)[0, :3] for x in np.linspace(0.2, 0.8, typ)]) colors[:] = typ_colors[types] # Add some RGB noise and clip colors *= 1.1 ** np.random.normal(size=colors.shape) colors = np.clip(colors, 0, 1) # Display the data canvas = scene.SceneCanvas(keys='interactive', show=True) view = canvas.central_widget.add_view() view.camera = 'panzoom' view.camera.aspect = 1 lines = scene.Line(pos=pos, connect=edges, antialias=False, method='gl', color=(1, 1, 1, 0.2), parent=view.scene) markers = scene.Markers(pos=pos, face_color=colors, symbol='o', parent=view.scene) view.camera.set_range() i = 1 def update(ev): global pos, edges, lines, markers, view, force, dist, i dx = np.empty((npts, npts, 2), dtype='float32') dx[:] = pos[:, np.newaxis, :] dx -= pos[np.newaxis, :, :] dist = (dx**2).sum(axis=2)**0.5 dist[dist == 0] = 1. ndx = dx / dist[..., np.newaxis] force = np.zeros((npts, npts, 2), dtype='float32') # all points push away from each other force -= 0.1 * ndx / dist[..., np.newaxis]**2 # connected points pull toward each other # pulsed force helps to settle faster: s = 0.1 # s = 0.05 * 5 ** (np.sin(i/20.) / (i/100.)) # s = 0.05 + 1 * 0.99 ** i mask = np.zeros((npts, npts, 1), dtype='float32') mask[edges[:, 0], edges[:, 1]] = s mask[edges[:, 1], edges[:, 0]] = s force += dx * dist[..., np.newaxis] * mask # points do not exert force on themselves force[np.arange(npts), np.arange(npts)] = 0 force = force.sum(axis=0) pos += np.clip(force, -3, 3) * 0.09 lines.set_data(pos=pos) markers.set_data(pos=pos, face_color=colors) i += 1 timer = app.Timer(interval=0, connect=update, start=True) if __name__ == '__main__': app.run()
py
b4113469e04b693b9f6cf95b56b310ec6244ddf2
#!/usr/bin/env python ## # Massimiliano Patacchiola, Plymouth University 2016 # # This is an example of head pose estimation with solvePnP and dlib face detector. # It uses the dlib library and openCV. # To use this example you have to provide an input video file # and an output path: # python ex_pnp_pose_estimation_video.py /home/video.mpg ./output.avi # import numpy import cv2 import sys import os from deepgaze.face_landmark_detection import faceLandmarkDetection #For the frontal face detector import dlib #Antropometric constant values of the human head. #Found on wikipedia and on: # "Head-and-Face Anthropometric Survey of U.S. Respirator Users" # #X-Y-Z with X pointing forward and Y on the left. #The X-Y-Z coordinates used are like the standard # coordinates of ROS (robotic operative system) P3D_RIGHT_SIDE = numpy.float32([-100.0, -77.5, -5.0]) #0 P3D_GONION_RIGHT = numpy.float32([-110.0, -77.5, -85.0]) #4 P3D_MENTON = numpy.float32([0.0, 0.0, -122.7]) #8 P3D_GONION_LEFT = numpy.float32([-110.0, 77.5, -85.0]) #12 P3D_LEFT_SIDE = numpy.float32([-100.0, 77.5, -5.0]) #16 P3D_FRONTAL_BREADTH_RIGHT = numpy.float32([-20.0, -56.1, 10.0]) #17 P3D_FRONTAL_BREADTH_LEFT = numpy.float32([-20.0, 56.1, 10.0]) #26 P3D_SELLION = numpy.float32([0.0, 0.0, 0.0]) #27 P3D_NOSE = numpy.float32([21.1, 0.0, -48.0]) #30 P3D_SUB_NOSE = numpy.float32([5.0, 0.0, -52.0]) #33 P3D_RIGHT_EYE = numpy.float32([-20.0, -65.5,-5.0]) #36 P3D_RIGHT_TEAR = numpy.float32([-10.0, -40.5,-5.0]) #39 P3D_LEFT_TEAR = numpy.float32([-10.0, 40.5,-5.0]) #42 P3D_LEFT_EYE = numpy.float32([-20.0, 65.5,-5.0]) #45 #P3D_LIP_RIGHT = numpy.float32([-20.0, 65.5,-5.0]) #48 #P3D_LIP_LEFT = numpy.float32([-20.0, 65.5,-5.0]) #54 P3D_STOMION = numpy.float32([10.0, 0.0, -75.0]) #62 #The points to track #These points are the ones used by PnP # to estimate the 3D pose of the face TRACKED_POINTS = (0, 4, 8, 12, 16, 17, 26, 27, 30, 33, 36, 39, 42, 45, 62) ALL_POINTS = list(range(0,68)) #Used for debug only def main(): #Check if some argumentshave been passed #pass the path of a video if(len(sys.argv) > 2): file_path = sys.argv[1] if(os.path.isfile(file_path)==False): print("ex_pnp_head_pose_estimation: the file specified does not exist.") return else: #Open the video file video_capture = cv2.VideoCapture(file_path) if(video_capture.isOpened() == True): print("ex_pnp_head_pose_estimation: the video source has been opened correctly...") # Define the codec and create VideoWriter object #fourcc = cv2.VideoWriter_fourcc(*'XVID') output_path = sys.argv[2] fourcc = cv2.VideoWriter_fourcc(*'XVID') out = cv2.VideoWriter(output_path, fourcc, 20.0, (1280,720)) else: print("You have to pass as argument the path to a video file and the path to the output file to produce, for example: \n python ex_pnp_pose_estimation_video.py /home/video.mpg ./output.avi") return #Create the main window and move it cv2.namedWindow('Video') cv2.moveWindow('Video', 20, 20) #Obtaining the CAM dimension cam_w = int(video_capture.get(3)) cam_h = int(video_capture.get(4)) #Defining the camera matrix. #To have better result it is necessary to find the focal # lenght of the camera. fx/fy are the focal lengths (in pixels) # and cx/cy are the optical centres. These values can be obtained # roughly by approximation, for example in a 640x480 camera: # cx = 640/2 = 320 # cy = 480/2 = 240 # fx = fy = cx/tan(60/2 * pi / 180) = 554.26 c_x = cam_w / 2 c_y = cam_h / 2 f_x = c_x / numpy.tan(60/2 * numpy.pi / 180) f_y = f_x #Estimated camera matrix values. camera_matrix = numpy.float32([[f_x, 0.0, c_x], [0.0, f_y, c_y], [0.0, 0.0, 1.0] ]) print("Estimated camera matrix: \n" + str(camera_matrix) + "\n") #These are the camera matrix values estimated on my webcam with # the calibration code (see: src/calibration): #camera_matrix = numpy.float32([[602.10618226, 0.0, 320.27333589], #[ 0.0, 603.55869786, 229.7537026], #[ 0.0, 0.0, 1.0] ]) #Distortion coefficients camera_distortion = numpy.float32([0.0, 0.0, 0.0, 0.0, 0.0]) #Distortion coefficients estimated by calibration #camera_distortion = numpy.float32([ 0.06232237, -0.41559805, 0.00125389, -0.00402566, 0.04879263]) #This matrix contains the 3D points of the # 11 landmarks we want to find. It has been # obtained from antrophometric measurement # on the human head. landmarks_3D = numpy.float32([P3D_RIGHT_SIDE, P3D_GONION_RIGHT, P3D_MENTON, P3D_GONION_LEFT, P3D_LEFT_SIDE, P3D_FRONTAL_BREADTH_RIGHT, P3D_FRONTAL_BREADTH_LEFT, P3D_SELLION, P3D_NOSE, P3D_SUB_NOSE, P3D_RIGHT_EYE, P3D_RIGHT_TEAR, P3D_LEFT_TEAR, P3D_LEFT_EYE, P3D_STOMION]) #Declaring the two classifiers #my_cascade = haarCascade("./etc/haarcascade_frontalface_alt.xml", "./etc/haarcascade_profileface.xml") my_detector = faceLandmarkDetection('./etc/shape_predictor_68_face_landmarks.dat') my_face_detector = dlib.get_frontal_face_detector() while(True): # Capture frame-by-frame ret, frame = video_capture.read() #gray = cv2.cvtColor(frame[roi_y1:roi_y2, roi_x1:roi_x2], cv2.COLOR_BGR2GRAY) faces_array = my_face_detector(frame, 1) print("Total Faces: " + str(len(faces_array))) for i, pos in enumerate(faces_array): face_x1 = pos.left() face_y1 = pos.top() face_x2 = pos.right() face_y2 = pos.bottom() text_x1 = face_x1 text_y1 = face_y1 - 3 cv2.putText(frame, "FACE " + str(i+1), (text_x1,text_y1), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1); cv2.rectangle(frame, (face_x1, face_y1), (face_x2, face_y2), (0, 255, 0), 2) landmarks_2D = my_detector.returnLandmarks(frame, face_x1, face_y1, face_x2, face_y2, points_to_return=TRACKED_POINTS) for point in landmarks_2D: cv2.circle(frame,( point[0], point[1] ), 2, (0,0,255), -1) #Applying the PnP solver to find the 3D pose # of the head from the 2D position of the # landmarks. #retval - bool #rvec - Output rotation vector that, together with tvec, brings # points from the model coordinate system to the camera coordinate system. #tvec - Output translation vector. retval, rvec, tvec = cv2.solvePnP(landmarks_3D, landmarks_2D, camera_matrix, camera_distortion) #Now we project the 3D points into the image plane #Creating a 3-axis to be used as reference in the image. axis = numpy.float32([[50,0,0], [0,50,0], [0,0,50]]) imgpts, jac = cv2.projectPoints(axis, rvec, tvec, camera_matrix, camera_distortion) #Drawing the three axis on the image frame. #The opencv colors are defined as BGR colors such as: # (a, b, c) >> Blue = a, Green = b and Red = c #Our axis/color convention is X=R, Y=G, Z=B sellion_xy = (landmarks_2D[7][0], landmarks_2D[7][1]) cv2.line(frame, sellion_xy, tuple(imgpts[1].ravel()), (0,255,0), 3) #GREEN cv2.line(frame, sellion_xy, tuple(imgpts[2].ravel()), (255,0,0), 3) #BLUE cv2.line(frame, sellion_xy, tuple(imgpts[0].ravel()), (0,0,255), 3) #RED #Writing in the output file out.write(frame) #Showing the frame and waiting # for the exit command cv2.imshow('Video', frame) if cv2.waitKey(1) & 0xFF == ord('q'): break #Release the camera video_capture.release() print("Bye...") if __name__ == "__main__": main()
py
b41135d6a5c68fc85a566dee3bc1869b1fd93a97
# This code is part of Qiskit. # # (C) Copyright IBM 2019. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ .. currentmodule:: qiskit.scheduler.methods Pulse scheduling methods. .. autosummary:: :toctree: ../stubs/ basic """ from qiskit.scheduler.methods.basic import as_soon_as_possible, as_late_as_possible
py
b411363d87889a74edf84a2c30c0c6f4a437d72e
""" Copyright 2013 Steven Diamond Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from .cbc_conif import CBC from .cplex_conif import CPLEX from .cvxopt_conif import CVXOPT from .diffcp_conif import DIFFCP from .ecos_conif import ECOS from .glpk_conif import GLPK from .glpk_mi_conif import GLPK_MI from .gurobi_conif import GUROBI from .mosek_conif import MOSEK from .scs_conif import SCS from .xpress_conif import XPRESS from .nag_conif import NAG
py
b411364e2b6a59ad14fcd8918f63adad733e2ec8
"""Unit Tests for basic info""" import pytest import datetime from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from altaudit.models import Base, Character, Class, Faction, Race import altaudit.sections.basic as Section classes = {'classes': [ {'id': 1, 'name': 'Warrior'}, {'id': 2, 'name': 'Paladin'}, {'id': 3, 'name': 'Hunter'}, {'id': 4, 'name': 'Rogue'}, {'id': 5, 'name': 'Priest'}, {'id': 6, 'name': 'Death Knight'}, {'id': 7, 'name': 'Shaman'}, {'id': 8, 'name': 'Mage'}, {'id': 9, 'name': 'Warlock'}, {'id': 10, 'name': 'Monk'}, {'id': 11, 'name': 'Druid'}, {'id': 12, 'name': 'Demon Hunter'}]} factions = ['Alliance', 'Horde', 'Neutral'] races = [ {'id': 1, 'name': 'Human', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 2, 'name': 'Orc', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 3, 'name': 'Dwarf', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 4, 'name': 'Night Elf', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 5, 'name': 'Undead', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 6, 'name': 'Tauren', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 7, 'name': 'Gnome', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 8, 'name': 'Troll', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 9, 'name': 'Goblin', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 10, 'name': 'Blood Elf', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 11, 'name': 'Draenei', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 22, 'name': 'Worgen', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 24, 'name': 'Pandaren', 'faction': {'type': 'NEUTRAL', 'name': 'Neutral'}, 'is_selectable': True, 'is_allied_race': False}, {'id': 25, 'name': 'Pandaren', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': False, 'is_allied_race': False}, {'id': 26, 'name': 'Pandaren', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': False, 'is_allied_race': False}, {'id': 27, 'name': 'Nightborne', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': True}, {'id': 28, 'name': 'Highmountain Tauren', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': True}, {'id': 29, 'name': 'Void Elf', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': True}, {'id': 30, 'name': 'Lightforged Draenei', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': True}, {'id': 31, 'name': 'Zandalari Troll', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': True}, {'id': 32, 'name': 'Kul Tiran', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': True}, {'id': 34, 'name': 'Dark Iron Dwarf', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': True}, {'id': 35, 'name': 'Vulpera', 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': True}, {'id': 36, 'name': "Mag'har Orc", 'faction': {'type': 'HORDE', 'name': 'Horde'}, 'is_selectable': True, 'is_allied_race': True}, {'id': 37, 'name': 'Mechagnome', 'faction': {'type': 'ALLIANCE', 'name': 'Alliance'}, 'is_selectable': True, 'is_allied_race': True}] @pytest.fixture(scope='module') def db(): engine = create_engine('sqlite://') Base.metadata.create_all(engine) session = sessionmaker(engine)() session.add_all([Faction(f) for f in factions]) session.add_all([Class(c['name'], id=c['id']) for c in classes['classes']]) session.add_all([Race(r['name'], id=r['id'], faction=session.query(Faction).filter_by(name=r['faction']['name']).first()) for r in races]) session.commit() session.close() yield engine Base.metadata.drop_all(engine) @pytest.fixture def db_session(db): session = sessionmaker(db)() yield session session.close() @pytest.fixture def fake_response_maker(): def _maker( name='toon1', gender='Male', race_name='Undead', class_name='Warlock', mainspec='Destruction', realm='realm1', level=120, timestamp=int(datetime.datetime.now().timestamp())*1000, avatar='avatar_url', bust='bust_url', render='render_url'): class_id = next(c['id'] for c in classes['classes'] if c['name'] == class_name) race = next(r for r in races if r['name'] == race_name) race_id = race['id'] faction = race['faction']['name'] gender in ('Male','Female') return { 'summary' : { 'name' : name, 'gender' : { 'type' : gender.upper(), 'name' : gender }, 'faction' : { 'type' : faction.upper(), 'name' : faction }, 'race' : { 'name' : race_name, 'id' : race_id }, 'character_class' : { 'name' : class_name, 'id' : class_id }, 'active_spec' : { 'name' : mainspec }, 'realm' : { 'name' : realm }, 'level' : level, 'last_login_timestamp' : timestamp }, 'media' : { 'assets' : [ { 'key' : 'avatar', 'value' : avatar }, { 'key' : 'inset', 'value' : bust }, { 'key' : 'main', 'value' : render }]} } return _maker def test_basic_info_name(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(name='Jack') Section.basic(jack, response, db_session) assert jack.name_api == 'Jack' def test_basic_info_gender(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(gender='Female') Section.basic(jack, response, db_session) assert jack.gender == 'Female' def test_basic_info_faction(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker() Section.basic(jack, response, db_session) assert type(jack.faction) == Faction assert jack.faction.name == 'Horde' def test_basic_info_race(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(race_name="Undead") Section.basic(jack, response, db_session) assert type(jack.race) == Race assert jack.race.name == "Undead" assert jack.race.id == 5 def test_basic_info_class(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(class_name='Monk') Section.basic(jack, response, db_session) assert type(jack.character_class) == Class assert jack.character_class.name == 'Monk' assert jack.character_class.id == 10 def test_basic_info_mainspec(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(mainspec='Shadow') Section.basic(jack, response, db_session) assert jack.mainspec == 'Shadow' def test_basic_info_mainspec_not_present(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker() del response['summary']['active_spec'] Section.basic(jack, response, db_session) assert jack.mainspec == None def test_basic_info_realm(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(realm="Zin'azshara") Section.basic(jack, response, db_session) assert jack.realm_name == "Zin'azshara" def test_basic_info_level(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(level=45) Section.basic(jack, response, db_session) assert jack.level == 45 def test_basic_info_timestamp(fake_response_maker, db_session): jack = Character('jack') now = datetime.datetime.now().timestamp()*1000 response = fake_response_maker(timestamp=now) Section.basic(jack, response, db_session) assert jack.lastmodified == now def test_basic_info_avatar(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(avatar='realm1/96/184987488-avatar.jpg') Section.basic(jack, response, db_session) assert jack.avatar == 'realm1/96/184987488-avatar.jpg' def test_basic_info_bust(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(bust='realm1/96/184987488-inset.jpg') Section.basic(jack, response, db_session) assert jack.bust == 'realm1/96/184987488-inset.jpg' def test_basic_info_render(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(render='realm1/96/184987488-main.jpg') Section.basic(jack, response, db_session) assert jack.render == 'realm1/96/184987488-main.jpg' def test_basic_info_avatar_old(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(avatar='realm1/96/184987488-avatar.jpg') response['media'] = { 'avatar_url' : 'realm1/96/184987488-avatar.jpg', 'bust_url' : 'bust_url', 'render_url' : 'render_url'} Section.basic(jack, response, db_session) assert jack.avatar == 'realm1/96/184987488-avatar.jpg' def test_basic_info_bust_old(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(bust='realm1/96/184987488-inset.jpg') response['media'] = { 'avatar_url' : 'avatar_url', 'bust_url' : 'realm1/96/184987488-inset.jpg', 'render_url' : 'render_url'} Section.basic(jack, response, db_session) assert jack.bust == 'realm1/96/184987488-inset.jpg' def test_basic_info_render_old(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker(render='realm1/96/184987488-main.jpg') response['media'] = { 'avatar_url' : 'avatar_url', 'bust_url' : 'bust_url', 'render_url' : 'realm1/96/184987488-main.jpg'} Section.basic(jack, response, db_session) assert jack.render == 'realm1/96/184987488-main.jpg' def test_basic_info_no_media(fake_response_maker, db_session): jack = Character('jack') response = fake_response_maker() response['media'] = None Section.basic(jack, response, db_session) assert jack.avatar == None assert jack.bust == None assert jack.render == None
py
b411366ee400d9dd5f769bade5830b5643945638
#!/usr/bin/python3 def add(a, b): i = a j = b return(i + j)
py
b411370bd76133cc6e39cf675355e9954142d0af
# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn import torch.utils.checkpoint as cp from mmcv.cnn import (ConvModule, build_conv_layer, build_norm_layer, constant_init) from mmcv.cnn.bricks import DropPath from mmcv.utils.parrots_wrapper import _BatchNorm from ..builder import BACKBONES from .base_backbone import BaseBackbone eps = 1.0e-5 class BasicBlock(nn.Module): """BasicBlock for ResNet. Args: in_channels (int): Input channels of this block. out_channels (int): Output channels of this block. expansion (int): The ratio of ``out_channels/mid_channels`` where ``mid_channels`` is the output channels of conv1. This is a reserved argument in BasicBlock and should always be 1. Default: 1. stride (int): stride of the block. Default: 1 dilation (int): dilation of convolution. Default: 1 downsample (nn.Module, optional): downsample operation on identity branch. Default: None. style (str): `pytorch` or `caffe`. It is unused and reserved for unified API with Bottleneck. with_cp (bool): Use checkpoint or not. Using checkpoint will save some memory while slowing down the training speed. conv_cfg (dict, optional): dictionary to construct and config conv layer. Default: None norm_cfg (dict): dictionary to construct and config norm layer. Default: dict(type='BN') """ def __init__(self, in_channels, out_channels, expansion=1, stride=1, dilation=1, downsample=None, style='pytorch', with_cp=False, conv_cfg=None, norm_cfg=dict(type='BN'), drop_path_rate=0.0): super(BasicBlock, self).__init__() self.in_channels = in_channels self.out_channels = out_channels self.expansion = expansion assert self.expansion == 1 assert out_channels % expansion == 0 self.mid_channels = out_channels // expansion self.stride = stride self.dilation = dilation self.style = style self.with_cp = with_cp self.conv_cfg = conv_cfg self.norm_cfg = norm_cfg self.norm1_name, norm1 = build_norm_layer( norm_cfg, self.mid_channels, postfix=1) self.norm2_name, norm2 = build_norm_layer( norm_cfg, out_channels, postfix=2) self.conv1 = build_conv_layer( conv_cfg, in_channels, self.mid_channels, 3, stride=stride, padding=dilation, dilation=dilation, bias=False) self.add_module(self.norm1_name, norm1) self.conv2 = build_conv_layer( conv_cfg, self.mid_channels, out_channels, 3, padding=1, bias=False) self.add_module(self.norm2_name, norm2) self.relu = nn.ReLU(inplace=True) self.downsample = downsample self.drop_path = DropPath(drop_prob=drop_path_rate ) if drop_path_rate > eps else nn.Identity() @property def norm1(self): return getattr(self, self.norm1_name) @property def norm2(self): return getattr(self, self.norm2_name) def forward(self, x): def _inner_forward(x): identity = x out = self.conv1(x) out = self.norm1(out) out = self.relu(out) out = self.conv2(out) out = self.norm2(out) if self.downsample is not None: identity = self.downsample(x) out = self.drop_path(out) out += identity return out if self.with_cp and x.requires_grad: out = cp.checkpoint(_inner_forward, x) else: out = _inner_forward(x) out = self.relu(out) return out class Bottleneck(nn.Module): """Bottleneck block for ResNet. Args: in_channels (int): Input channels of this block. out_channels (int): Output channels of this block. expansion (int): The ratio of ``out_channels/mid_channels`` where ``mid_channels`` is the input/output channels of conv2. Default: 4. stride (int): stride of the block. Default: 1 dilation (int): dilation of convolution. Default: 1 downsample (nn.Module, optional): downsample operation on identity branch. Default: None. style (str): ``"pytorch"`` or ``"caffe"``. If set to "pytorch", the stride-two layer is the 3x3 conv layer, otherwise the stride-two layer is the first 1x1 conv layer. Default: "pytorch". with_cp (bool): Use checkpoint or not. Using checkpoint will save some memory while slowing down the training speed. conv_cfg (dict, optional): dictionary to construct and config conv layer. Default: None norm_cfg (dict): dictionary to construct and config norm layer. Default: dict(type='BN') """ def __init__(self, in_channels, out_channels, expansion=4, stride=1, dilation=1, downsample=None, style='pytorch', with_cp=False, conv_cfg=None, norm_cfg=dict(type='BN'), drop_path_rate=0.0): super(Bottleneck, self).__init__() assert style in ['pytorch', 'caffe'] self.in_channels = in_channels self.out_channels = out_channels self.expansion = expansion assert out_channels % expansion == 0 self.mid_channels = out_channels // expansion self.stride = stride self.dilation = dilation self.style = style self.with_cp = with_cp self.conv_cfg = conv_cfg self.norm_cfg = norm_cfg if self.style == 'pytorch': self.conv1_stride = 1 self.conv2_stride = stride else: self.conv1_stride = stride self.conv2_stride = 1 self.norm1_name, norm1 = build_norm_layer( norm_cfg, self.mid_channels, postfix=1) self.norm2_name, norm2 = build_norm_layer( norm_cfg, self.mid_channels, postfix=2) self.norm3_name, norm3 = build_norm_layer( norm_cfg, out_channels, postfix=3) self.conv1 = build_conv_layer( conv_cfg, in_channels, self.mid_channels, kernel_size=1, stride=self.conv1_stride, bias=False) self.add_module(self.norm1_name, norm1) self.conv2 = build_conv_layer( conv_cfg, self.mid_channels, self.mid_channels, kernel_size=3, stride=self.conv2_stride, padding=dilation, dilation=dilation, bias=False) self.add_module(self.norm2_name, norm2) self.conv3 = build_conv_layer( conv_cfg, self.mid_channels, out_channels, kernel_size=1, bias=False) self.add_module(self.norm3_name, norm3) self.relu = nn.ReLU(inplace=True) self.downsample = downsample self.drop_path = DropPath(drop_prob=drop_path_rate ) if drop_path_rate > eps else nn.Identity() @property def norm1(self): return getattr(self, self.norm1_name) @property def norm2(self): return getattr(self, self.norm2_name) @property def norm3(self): return getattr(self, self.norm3_name) def forward(self, x): def _inner_forward(x): identity = x out = self.conv1(x) out = self.norm1(out) out = self.relu(out) out = self.conv2(out) out = self.norm2(out) out = self.relu(out) out = self.conv3(out) out = self.norm3(out) if self.downsample is not None: identity = self.downsample(x) out = self.drop_path(out) out += identity return out if self.with_cp and x.requires_grad: out = cp.checkpoint(_inner_forward, x) else: out = _inner_forward(x) out = self.relu(out) return out def get_expansion(block, expansion=None): """Get the expansion of a residual block. The block expansion will be obtained by the following order: 1. If ``expansion`` is given, just return it. 2. If ``block`` has the attribute ``expansion``, then return ``block.expansion``. 3. Return the default value according the the block type: 1 for ``BasicBlock`` and 4 for ``Bottleneck``. Args: block (class): The block class. expansion (int | None): The given expansion ratio. Returns: int: The expansion of the block. """ if isinstance(expansion, int): assert expansion > 0 elif expansion is None: if hasattr(block, 'expansion'): expansion = block.expansion elif issubclass(block, BasicBlock): expansion = 1 elif issubclass(block, Bottleneck): expansion = 4 else: raise TypeError(f'expansion is not specified for {block.__name__}') else: raise TypeError('expansion must be an integer or None') return expansion class ResLayer(nn.Sequential): """ResLayer to build ResNet style backbone. Args: block (nn.Module): Residual block used to build ResLayer. num_blocks (int): Number of blocks. in_channels (int): Input channels of this block. out_channels (int): Output channels of this block. expansion (int, optional): The expansion for BasicBlock/Bottleneck. If not specified, it will firstly be obtained via ``block.expansion``. If the block has no attribute "expansion", the following default values will be used: 1 for BasicBlock and 4 for Bottleneck. Default: None. stride (int): stride of the first block. Default: 1. avg_down (bool): Use AvgPool instead of stride conv when downsampling in the bottleneck. Default: False conv_cfg (dict, optional): dictionary to construct and config conv layer. Default: None norm_cfg (dict): dictionary to construct and config norm layer. Default: dict(type='BN') """ def __init__(self, block, num_blocks, in_channels, out_channels, expansion=None, stride=1, avg_down=False, conv_cfg=None, norm_cfg=dict(type='BN'), **kwargs): self.block = block self.expansion = get_expansion(block, expansion) downsample = None if stride != 1 or in_channels != out_channels: downsample = [] conv_stride = stride if avg_down and stride != 1: conv_stride = 1 downsample.append( nn.AvgPool2d( kernel_size=stride, stride=stride, ceil_mode=True, count_include_pad=False)) downsample.extend([ build_conv_layer( conv_cfg, in_channels, out_channels, kernel_size=1, stride=conv_stride, bias=False), build_norm_layer(norm_cfg, out_channels)[1] ]) downsample = nn.Sequential(*downsample) layers = [] layers.append( block( in_channels=in_channels, out_channels=out_channels, expansion=self.expansion, stride=stride, downsample=downsample, conv_cfg=conv_cfg, norm_cfg=norm_cfg, **kwargs)) in_channels = out_channels for i in range(1, num_blocks): layers.append( block( in_channels=in_channels, out_channels=out_channels, expansion=self.expansion, stride=1, conv_cfg=conv_cfg, norm_cfg=norm_cfg, **kwargs)) super(ResLayer, self).__init__(*layers) @BACKBONES.register_module() class _JackNet(BaseBackbone): """ResNet backbone. Please refer to the `paper <https://arxiv.org/abs/1512.03385>`__ for details. Args: depth (int): Network depth, from {18, 34, 50, 101, 152}. in_channels (int): Number of input image channels. Default: 3. stem_channels (int): Output channels of the stem layer. Default: 64. base_channels (int): Middle channels of the first stage. Default: 64. num_stages (int): Stages of the network. Default: 4. strides (Sequence[int]): Strides of the first block of each stage. Default: ``(1, 2, 2, 2)``. dilations (Sequence[int]): Dilation of each stage. Default: ``(1, 1, 1, 1)``. out_indices (Sequence[int]): Output from which stages. Default: ``(3, )``. style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two layer is the 3x3 conv layer, otherwise the stride-two layer is the first 1x1 conv layer. deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv. Default: False. avg_down (bool): Use AvgPool instead of stride conv when downsampling in the bottleneck. Default: False. frozen_stages (int): Stages to be frozen (stop grad and set eval mode). -1 means not freezing any parameters. Default: -1. conv_cfg (dict | None): The config dict for conv layers. Default: None. norm_cfg (dict): The config dict for norm layers. norm_eval (bool): Whether to set norm layers to eval mode, namely, freeze running stats (mean and var). Note: Effect on Batch Norm and its variants only. Default: False. with_cp (bool): Use checkpoint or not. Using checkpoint will save some memory while slowing down the training speed. Default: False. zero_init_residual (bool): Whether to use zero init for last norm layer in resblocks to let them behave as identity. Default: True. Example: >>> from mmcls.models import ResNet >>> import torch >>> self = ResNet(depth=18) >>> self.eval() >>> inputs = torch.rand(1, 3, 32, 32) >>> level_outputs = self.forward(inputs) >>> for level_out in level_outputs: ... print(tuple(level_out.shape)) (1, 64, 8, 8) (1, 128, 4, 4) (1, 256, 2, 2) (1, 512, 1, 1) """ arch_settings = { 18: (BasicBlock, (2, 2, 2, 2)), 34: (BasicBlock, (3, 4, 6, 3)), 50: (Bottleneck, (3, 4, 6, 3)), 101: (Bottleneck, (3, 4, 23, 3)), 152: (Bottleneck, (3, 8, 36, 3)) } def __init__(self, depth, in_channels=3, stem_channels=64, base_channels=64, expansion=None, num_stages=4, strides=(1, 2, 2, 2), dilations=(1, 1, 1, 1), out_indices=(3, ), style='pytorch', deep_stem=False, avg_down=False, frozen_stages=-1, conv_cfg=None, norm_cfg=dict(type='BN', requires_grad=True), norm_eval=False, with_cp=False, zero_init_residual=True, init_cfg=[ dict(type='Kaiming', layer=['Conv2d']), dict( type='Constant', val=1, layer=['_BatchNorm', 'GroupNorm']) ], drop_path_rate=0.0): super(_JackNet, self).__init__(init_cfg) if depth not in self.arch_settings: raise KeyError(f'invalid depth {depth} for resnet') self.depth = depth self.stem_channels = stem_channels self.base_channels = base_channels self.num_stages = num_stages assert num_stages >= 1 and num_stages <= 4 self.strides = strides self.dilations = dilations assert len(strides) == len(dilations) == num_stages self.out_indices = out_indices assert max(out_indices) < num_stages self.style = style self.deep_stem = deep_stem self.avg_down = avg_down self.frozen_stages = frozen_stages self.conv_cfg = conv_cfg self.norm_cfg = norm_cfg self.with_cp = with_cp self.norm_eval = norm_eval self.zero_init_residual = zero_init_residual self.block, stage_blocks = self.arch_settings[depth] self.stage_blocks = stage_blocks[:num_stages] self.expansion = get_expansion(self.block, expansion) self._make_stem_layer(in_channels, stem_channels) self.res_layers = [] _in_channels = stem_channels _out_channels = base_channels * self.expansion for i, num_blocks in enumerate(self.stage_blocks): stride = strides[i] dilation = dilations[i] res_layer = self.make_res_layer( block=self.block, num_blocks=num_blocks, in_channels=_in_channels, out_channels=_out_channels, expansion=self.expansion, stride=stride, dilation=dilation, style=self.style, avg_down=self.avg_down, with_cp=with_cp, conv_cfg=conv_cfg, norm_cfg=norm_cfg, drop_path_rate=drop_path_rate) _in_channels = _out_channels _out_channels *= 2 layer_name = f'layer{i + 1}' self.add_module(layer_name, res_layer) self.res_layers.append(layer_name) self._freeze_stages() self.feat_dim = res_layer[-1].out_channels def make_res_layer(self, **kwargs): return ResLayer(**kwargs) @property def norm1(self): return getattr(self, self.norm1_name) def _make_stem_layer(self, in_channels, stem_channels): if self.deep_stem: self.stem = nn.Sequential( ConvModule( in_channels, stem_channels // 2, kernel_size=3, stride=2, padding=1, conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg, inplace=True), ConvModule( stem_channels // 2, stem_channels // 2, kernel_size=3, stride=1, padding=1, conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg, inplace=True), ConvModule( stem_channels // 2, stem_channels, kernel_size=3, stride=1, padding=1, conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg, inplace=True)) else: self.conv1 = build_conv_layer( self.conv_cfg, in_channels, stem_channels, kernel_size=7, stride=2, padding=3, bias=False) self.norm1_name, norm1 = build_norm_layer( self.norm_cfg, stem_channels, postfix=1) self.add_module(self.norm1_name, norm1) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) def _freeze_stages(self): if self.frozen_stages >= 0: if self.deep_stem: self.stem.eval() for param in self.stem.parameters(): param.requires_grad = False else: self.norm1.eval() for m in [self.conv1, self.norm1]: for param in m.parameters(): param.requires_grad = False for i in range(1, self.frozen_stages + 1): m = getattr(self, f'layer{i}') m.eval() for param in m.parameters(): param.requires_grad = False def init_weights(self): super(ResNet, self).init_weights() if (isinstance(self.init_cfg, dict) and self.init_cfg['type'] == 'Pretrained'): # Suppress zero_init_residual if use pretrained model. return if self.zero_init_residual: for m in self.modules(): if isinstance(m, Bottleneck): constant_init(m.norm3, 0) elif isinstance(m, BasicBlock): constant_init(m.norm2, 0) def forward(self, x): if self.deep_stem: x = self.stem(x) else: x = self.conv1(x) x = self.norm1(x) x = self.relu(x) x = self.maxpool(x) outs = [] for i, layer_name in enumerate(self.res_layers): res_layer = getattr(self, layer_name) x = res_layer(x) if i in self.out_indices: outs.append(x) return tuple(outs) def train(self, mode=True): super(ResNet, self).train(mode) self._freeze_stages() if mode and self.norm_eval: for m in self.modules(): # trick: eval have effect on BatchNorm only if isinstance(m, _BatchNorm): m.eval() @BACKBONES.register_module() class JackNet(_JackNet): """ResNetV1d backbone. This variant is described in `Bag of Tricks. <https://arxiv.org/pdf/1812.01187.pdf>`_. Compared with default ResNet(ResNetV1b), ResNetV1d replaces the 7x7 conv in the input stem with three 3x3 convs. And in the downsampling block, a 2x2 avg_pool with stride 2 is added before conv, whose stride is changed to 1. """ def __init__(self, **kwargs): super(JackNet, self).__init__( deep_stem=True, avg_down=True, **kwargs)
py
b411375b333ec4ab39f21feb2a40df278d9aa0c4
"""Test code for broadcasting operators.""" import numpy as np import tvm import topi def verify_expand_dims(in_shape, out_shape, axis, num_newaxis): A = tvm.placeholder(shape=in_shape, name="A") B = topi.cpp.expand_dims(A, axis, num_newaxis) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) target = topi.cpp.TEST_create_target(device) if device == "llvm": s = topi.cpp.generic.schedule_injective(target, [B]) else: s = topi.cpp.cuda.schedule_injective(target, [B]) foo = tvm.build(s, [A, B], device, name="expand_dims") data_npy = np.random.uniform(size=in_shape).astype(A.dtype) out_npy = data_npy.reshape(out_shape) data_nd = tvm.nd.array(data_npy, ctx) out_nd = tvm.nd.array(np.empty(out_shape).astype(B.dtype), ctx) foo(data_nd, out_nd) tvm.testing.assert_allclose(out_nd.asnumpy(), out_npy) for device in ["llvm", "nvptx", "cuda", "opencl", "metal", "rocm"]: check_device(device) def verify_tranpose(in_shape, axes): A = tvm.placeholder(shape=in_shape, name="A") B = topi.cpp.transpose(A, axes) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) target = topi.cpp.TEST_create_target(device) if device == "llvm": s = topi.cpp.generic.schedule_injective(target, [B]) else: s = topi.cpp.cuda.schedule_injective(target, [B]) ctx = tvm.context(device, 0) foo = tvm.build(s, [A, B], device, name="tranpose") data_npy = np.arange(np.prod(in_shape)).reshape(in_shape).astype(A.dtype) out_npy = data_npy.transpose(axes) data_nd = tvm.nd.array(data_npy, ctx) out_nd = tvm.nd.empty(out_npy.shape, ctx=ctx, dtype=B.dtype) foo(data_nd, out_nd) tvm.testing.assert_allclose(out_nd.asnumpy(), out_npy) for device in ["llvm", "nvptx", "cuda", "opencl", "metal", "rocm"]: check_device(device) def verify_reshape(src_shape, dst_shape): A = tvm.placeholder(shape=src_shape, name="A") B = topi.cpp.reshape(A, dst_shape) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) target = topi.cpp.TEST_create_target(device) if device == "llvm": s = topi.cpp.generic.schedule_injective(target, [B]) else: s = topi.cpp.cuda.schedule_injective(target, [B]) foo = tvm.build(s, [A, B], device, name="reshape") data_npy = np.random.normal(size=src_shape).astype(A.dtype) out_npy = np.reshape(data_npy, newshape=dst_shape) data_nd = tvm.nd.array(data_npy, ctx) out_nd = tvm.nd.empty(dst_shape, ctx=ctx, dtype=B.dtype) foo(data_nd, out_nd) tvm.testing.assert_allclose(out_nd.asnumpy(), out_npy) for device in ["llvm", "nvptx", "cuda", "opencl", "metal", "rocm"]: check_device(device) def verify_squeeze(src_shape, axis): A = tvm.placeholder(shape=src_shape, name="A") B = topi.cpp.squeeze(A, axis) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) target = topi.cpp.TEST_create_target(device) if device == "llvm": s = topi.cpp.generic.schedule_injective(target, [B]) else: s = topi.cpp.cuda.schedule_injective(target, [B]) foo = tvm.build(s, [A, B], device, name="squeeze") data_npy = np.random.normal(size=src_shape).astype(A.dtype) out_npy = np.squeeze(data_npy, axis=axis) data_nd = tvm.nd.array(data_npy, ctx) out_nd_shape = out_npy.shape out_nd = tvm.nd.empty(out_nd_shape, ctx=ctx, dtype=B.dtype) foo(data_nd, out_nd) tvm.testing.assert_allclose(out_nd.asnumpy(), out_npy) for device in ["llvm", "nvptx", "cuda", "opencl", "metal", "rocm"]: check_device(device) def verify_concatenate(shapes, axis): tensor_l = [] for i, shape in enumerate(shapes): tensor_l.append(tvm.placeholder(shape, name="A" + str(i))) out_tensor = topi.cpp.concatenate(tensor_l, axis) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) target = topi.cpp.TEST_create_target(device) if device == "llvm": s = topi.cpp.generic.schedule_injective(target, [out_tensor]) else: s = topi.cpp.cuda.schedule_injective(target, [out_tensor]) foo = tvm.build(s, tensor_l + [out_tensor], device, name="concatenate") data_npys = [np.random.normal(size=shape).astype(tensor_l[0].dtype) for shape in shapes] out_npy = np.concatenate(data_npys, axis=axis) data_nds = [tvm.nd.array(data_npy, ctx) for data_npy in data_npys] out_nd = tvm.nd.empty(out_npy.shape, ctx=ctx, dtype=out_tensor.dtype) foo(*(data_nds + [out_nd])) tvm.testing.assert_allclose(out_nd.asnumpy(), out_npy) for device in ["llvm", "nvptx", "cuda", "opencl", "metal", "rocm"]: check_device(device) def verify_split(src_shape, indices_or_sections, axis): A = tvm.placeholder(shape=src_shape, name="A") tensor_l = topi.cpp.split(A, indices_or_sections, axis) tensor_l = list(tensor_l) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) target = topi.cpp.TEST_create_target(device) if device == "llvm": s = topi.cpp.generic.schedule_injective(target, tensor_l) else: s = topi.cpp.cuda.schedule_injective(target, tensor_l) ctx = tvm.context(device, 0) foo = tvm.build(s, [A] + tensor_l, device, name="split") data_npy = np.random.normal(size=src_shape).astype(A.dtype) out_npys = np.split(data_npy, indices_or_sections, axis=axis) data_nd = tvm.nd.array(data_npy, ctx) out_nds = [tvm.nd.empty(out_npy.shape, ctx=ctx, dtype=tensor_l[0].dtype) for out_npy in out_npys] foo(*([data_nd] + out_nds)) for out_nd, out_npy in zip(out_nds, out_npys): tvm.testing.assert_allclose(out_nd.asnumpy(), out_npy) for device in ["llvm", "nvptx", "cuda", "opencl", "metal", "rocm"]: check_device(device) def verify_take(src_shape, indices_src, axis=None): src_dtype = "float32" indices_dtype = "int32" indices_src = np.array(indices_src, dtype=indices_dtype) A = tvm.placeholder(shape=src_shape, dtype=src_dtype, name="A") indices = tvm.placeholder(shape=indices_src.shape, dtype=indices_dtype, name="indices") if axis is None: out_tensor = topi.cpp.take(A, indices) else: out_tensor = topi.cpp.take(A, indices, axis) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_injective(out_tensor) foo = tvm.build(s, [A] + [indices] + [out_tensor] , device, name="take") shape_size = 1 for i in range(len(src_shape)): shape_size = shape_size * src_shape[i] data_npy = np.arange(shape_size, dtype=src_dtype).reshape((src_shape)) if axis is None: out_npys = np.take(data_npy, indices_src) else: out_npys = np.take(data_npy, indices_src, axis=axis) data_nd = tvm.nd.array(data_npy, ctx) indices_nd = tvm.nd.array(indices_src, ctx) out_nd = tvm.nd.empty(out_npys.shape, ctx=ctx, dtype=src_dtype) foo(data_nd, indices_nd, out_nd) tvm.testing.assert_allclose(out_nd.asnumpy(), out_npys) for device in ["llvm", "opencl"]: check_device(device) def verify_where(condition, x, y): dtype = "float32" if len(condition.shape) == 1: np_out = np.array([xv if c else yv for (c,xv,yv) in zip(condition,x,y)]) else: np_out = np.where(condition, x, y) A = tvm.placeholder(shape=condition.shape, dtype=dtype, name="condition") B = tvm.placeholder(shape=x.shape, dtype=dtype, name="x") C = tvm.placeholder(shape=y.shape, dtype=dtype, name="y") out_tensor = topi.cpp.where(A, B, C) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = topi.generic.schedule_injective(out_tensor) foo = tvm.build(s, [A, B, C, out_tensor], device, name="where") tvm_out = tvm.nd.empty(x.shape, ctx=ctx, dtype=dtype) foo(tvm.nd.array(condition, ctx), tvm.nd.array(x, ctx), tvm.nd.array(y, ctx), tvm_out) tvm.testing.assert_allclose(tvm_out.asnumpy(), np_out) for device in ["llvm", "nvptx", "cuda", "opencl", "metal", "rocm"]: check_device(device) def verify_concatenate_split(shapes, axis, indices_or_sections): tensor_l_concatenate = [] for i, shape in enumerate(shapes): tensor_l_concatenate.append(tvm.placeholder(shape, name="A" + str(i))) out_tensor = topi.cpp.concatenate(tensor_l_concatenate, axis) tensor_l = topi.cpp.split(out_tensor, indices_or_sections, axis) tensor_l = list(tensor_l) def check_device(device): if not tvm.module.enabled(device): print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) target = topi.cpp.TEST_create_target(device) if device == "llvm": s = topi.cpp.generic.schedule_injective(target, tensor_l) else: s = topi.cpp.cuda.schedule_injective(target, tensor_l) ctx = tvm.context(device, 0) foo = tvm.build(s, tensor_l_concatenate + tensor_l, device, name="concatenate_split") data_npys = [np.random.normal(size=shape).astype(tensor_l_concatenate[0].dtype) for shape in shapes] out_npy_conc = np.concatenate(data_npys, axis=axis) out_npys_split = np.split(out_npy_conc, indices_or_sections, axis=axis) data_nds = [tvm.nd.array(data_npy, ctx) for data_npy in data_npys] out_nds = [tvm.nd.empty(out_npy.shape, ctx=ctx, dtype=tensor_l[0].dtype) for out_npy in out_npys_split] foo(*(data_nds + out_nds)) for out_nd, out_npy in zip(out_nds, out_npys_split): tvm.testing.assert_allclose(out_nd.asnumpy(), out_npy) for device in ["llvm", "cuda", "opencl", "metal", "rocm"]: check_device(device) def verify_concatenate_broadcast(shapes, axis, rhs_shape): B = tvm.placeholder(shape=rhs_shape, name="B") tensor_l = [] for i, shape in enumerate(shapes): tensor_l.append(tvm.placeholder(shape, name="A" + str(i))) out_tensor = topi.cpp.concatenate(tensor_l, axis) C = out_tensor + B def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) target = topi.cpp.TEST_create_target(device) if device == "llvm": s = topi.cpp.generic.schedule_injective(target, [C]) else: s = topi.cpp.cuda.schedule_injective(target, [C]) ctx = tvm.context(device, 0) foo = tvm.build(s, tensor_l + [B, C], device, name="broadcast_binary_add") data_npys = [np.random.normal(size=shape).astype(tensor_l[0].dtype) for shape in shapes] lhs_npy = np.concatenate(data_npys, axis=axis) rhs_npy = np.random.uniform(size=rhs_shape).astype(B.dtype) out_npy = lhs_npy + rhs_npy data_nds = [tvm.nd.array(data_npy, ctx) for data_npy in data_npys] rhs_nd = tvm.nd.array(rhs_npy, ctx) out_nd = tvm.nd.array(np.empty(out_npy.shape).astype(B.dtype), ctx) for _ in range(1): foo(*(data_nds + [rhs_nd] + [out_nd])) tvm.testing.assert_allclose(out_nd.asnumpy(), out_npy, rtol=1E-4, atol=1E-4) for device in ["llvm", "cuda", "opencl", "metal", "rocm"]: check_device(device) def test_expand_dims(): verify_expand_dims((3, 10), (3, 10, 1, 1), 2, 2) verify_expand_dims((3, 10), (1, 3, 10), -3, 1) def test_tranpose(): verify_tranpose((3, 10, 2), (1, 0, 2)) verify_tranpose((3, 10, 5), (2, 0, 1)) verify_tranpose((3, 10), None) verify_tranpose((3, 10, 5), (2, -3, 1)) def test_reshape(): verify_reshape((1, 2, 3, 4), (2, 3, 4)) verify_reshape((4, 2, 3, 4), (2, 4, 12)) verify_reshape((4, 2, 3, 4), (2, 48)) verify_reshape((16, ), (2, 2, 2, 2)) def test_squeeze(): verify_squeeze((1, 2, 3, 4), 0) verify_squeeze((1, 2, 1, 4), None) verify_squeeze((1, 1, 1, 4), (1, 2)) verify_squeeze((1, 1, 1, 1), None) def test_concatenate(): verify_concatenate([(2,), (2,), (2,)], 0) verify_concatenate([(2, 3, 4), (2, 2, 4), (2, 5, 4)], 1) verify_concatenate([(1, 2, 4), (1, 2, 3), (1, 2, 7), (1, 2, 8), (1, 2, 1)], -1) verify_concatenate([(5, 6, 7, 3), (16, 6, 7, 3), (12, 6, 7, 3), (8, 6, 7, 3), (2, 6, 7, 3)], 0) def test_split(): verify_split((2, 12, 3), 3, 1) verify_split((2, 12, 3), 3, -1) verify_split((2, 12, 3), [2, 4], 1) verify_split((10, 12, 24), [5, 7, 9], -1) def test_take(): verify_take((4,), [1]) verify_take((4,), [[0,1,2,3]]) verify_take((3,3,3), [[11,25]]) verify_take((4,), [[0,1],[2,3]]) verify_take((4,), [1], 0) verify_take((2,2), [[[1,0],[0,1]]], 0) verify_take((2,2), [[[1,0],[0,1]]], 1) verify_take((4,3,5,6), [[2,1,0,0]], -2) def test_where(): shape = (10, 3, 7, 13) condition = np.random.uniform(low=-1, high=1, size=shape).astype("float32") x = np.random.uniform(size=shape).astype("float32") y = np.random.uniform(size=shape).astype("float32") verify_where(condition, x, y) condition = np.random.uniform(low=-1, high=1, size=(shape[0],)).astype("float32") x = np.random.uniform(size=shape).astype("float32") y = np.random.uniform(size=shape).astype("float32") verify_where(condition, x, y) def test_regression_1(): verify_concatenate_split([(2, 3, 4), (2, 2, 4), (2, 5, 4)], 1, [3, 7]) verify_concatenate_split([(3, 4), (2, 4), (3, 4)], 0, [1, 2, 3, 4]) def test_regression_2(): verify_concatenate_broadcast([(5, 1, 3), (5, 1, 3)], 1, [2, 1]) verify_concatenate_broadcast([(5, 1, 2), (5, 1, 3)], 2, [1, 5]) if __name__ == "__main__": test_concatenate() test_tranpose() test_expand_dims() test_reshape() test_squeeze() test_split() test_take() test_where() test_regression_1() test_regression_2()
py
b411387eaca1d94b0a606d99cedab0cff181f5ad
import importlib import inspect import os import re import sys import tempfile from io import StringIO from pathlib import Path from django.conf.urls import url from django.core import mail from django.core.files.uploadedfile import SimpleUploadedFile from django.db import DatabaseError, connection from django.shortcuts import render from django.template import TemplateDoesNotExist from django.test import RequestFactory, SimpleTestCase, override_settings from django.test.utils import LoggingCaptureMixin from django.urls import reverse from django.utils.functional import SimpleLazyObject from django.utils.safestring import mark_safe from django.utils.version import PY36 from django.views.debug import ( CLEANSED_SUBSTITUTE, CallableSettingWrapper, ExceptionReporter, cleanse_setting, technical_500_response, ) from ..views import ( custom_exception_reporter_filter_view, index_page, multivalue_dict_key_error, non_sensitive_view, paranoid_view, sensitive_args_function_caller, sensitive_kwargs_function_caller, sensitive_method_view, sensitive_view, ) class User: def __str__(self): return 'jacob' class WithoutEmptyPathUrls: urlpatterns = [url(r'url/$', index_page, name='url')] class CallableSettingWrapperTests(SimpleTestCase): """ Unittests for CallableSettingWrapper """ def test_repr(self): class WrappedCallable: def __repr__(self): return "repr from the wrapped callable" def __call__(self): pass actual = repr(CallableSettingWrapper(WrappedCallable())) self.assertEqual(actual, "repr from the wrapped callable") @override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls') class DebugViewTests(SimpleTestCase): def test_files(self): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises/') self.assertEqual(response.status_code, 500) data = { 'file_data.txt': SimpleUploadedFile('file_data.txt', b'haha'), } with self.assertLogs('django.request', 'ERROR'): response = self.client.post('/raises/', data) self.assertContains(response, 'file_data.txt', status_code=500) self.assertNotContains(response, 'haha', status_code=500) def test_400(self): # When DEBUG=True, technical_500_template() is called. with self.assertLogs('django.security', 'WARNING'): response = self.client.get('/raises400/') self.assertContains(response, '<div class="context" id="', status_code=400) # Ensure no 403.html template exists to test the default case. @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', }]) def test_403(self): response = self.client.get('/raises403/') self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403) # Set up a test 403.html template. @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': { 'loaders': [ ('django.template.loaders.locmem.Loader', { '403.html': 'This is a test template for a 403 error ({{ exception }}).', }), ], }, }]) def test_403_template(self): response = self.client.get('/raises403/') self.assertContains(response, 'test template', status_code=403) self.assertContains(response, '(Insufficient Permissions).', status_code=403) def test_404(self): response = self.client.get('/raises404/') self.assertEqual(response.status_code, 404) def test_raised_404(self): response = self.client.get('/views/raises404/') self.assertContains(response, "<code>not-in-urls</code>, didn't match", status_code=404) def test_404_not_in_urls(self): response = self.client.get('/not-in-urls') self.assertNotContains(response, "Raised by:", status_code=404) self.assertContains(response, "Django tried these URL patterns", status_code=404) self.assertContains(response, "<code>not-in-urls</code>, didn't match", status_code=404) # Pattern and view name of a RegexURLPattern appear. self.assertContains(response, r"^regex-post/(?P&lt;pk&gt;[0-9]+)/$", status_code=404) self.assertContains(response, "[name='regex-post']", status_code=404) # Pattern and view name of a RoutePattern appear. self.assertContains(response, r"path-post/&lt;int:pk&gt;/", status_code=404) self.assertContains(response, "[name='path-post']", status_code=404) @override_settings(ROOT_URLCONF=WithoutEmptyPathUrls) def test_404_empty_path_not_in_urls(self): response = self.client.get('/') self.assertContains(response, "The empty path didn't match any of these.", status_code=404) def test_technical_404(self): response = self.client.get('/views/technical404/') self.assertContains(response, "Raised by:", status_code=404) self.assertContains(response, "view_tests.views.technical404", status_code=404) def test_classbased_technical_404(self): response = self.client.get('/views/classbased404/') self.assertContains(response, "Raised by:", status_code=404) self.assertContains(response, "view_tests.views.Http404View", status_code=404) def test_non_l10ned_numeric_ids(self): """ Numeric IDs and fancy traceback context blocks line numbers shouldn't be localized. """ with self.settings(DEBUG=True, USE_L10N=True): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises500/') # We look for a HTML fragment of the form # '<div class="context" id="c38123208">', not '<div class="context" id="c38,123,208"' self.assertContains(response, '<div class="context" id="', status_code=500) match = re.search(b'<div class="context" id="(?P<id>[^"]+)">', response.content) self.assertIsNotNone(match) id_repr = match.group('id') self.assertFalse( re.search(b'[^c0-9]', id_repr), "Numeric IDs in debug response HTML page shouldn't be localized (value: %s)." % id_repr.decode() ) def test_template_exceptions(self): with self.assertLogs('django.request', 'ERROR'): try: self.client.get(reverse('template_exception')) except Exception: raising_loc = inspect.trace()[-1][-2][0].strip() self.assertNotEqual( raising_loc.find("raise Exception('boom')"), -1, "Failed to find 'raise Exception' in last frame of " "traceback, instead found: %s" % raising_loc ) def test_template_loader_postmortem(self): """Tests for not existing file""" template_name = "notfound.html" with tempfile.NamedTemporaryFile(prefix=template_name) as tmpfile: tempdir = os.path.dirname(tmpfile.name) template_path = os.path.join(tempdir, template_name) with override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [tempdir], }]), self.assertLogs('django.request', 'ERROR'): response = self.client.get(reverse('raises_template_does_not_exist', kwargs={"path": template_name})) self.assertContains(response, "%s (Source does not exist)" % template_path, status_code=500, count=2) # Assert as HTML. self.assertContains( response, '<li><code>django.template.loaders.filesystem.Loader</code>: ' '%s (Source does not exist)</li>' % os.path.join(tempdir, 'notfound.html'), status_code=500, html=True, ) def test_no_template_source_loaders(self): """ Make sure if you don't specify a template, the debug view doesn't blow up. """ with self.assertLogs('django.request', 'ERROR'): with self.assertRaises(TemplateDoesNotExist): self.client.get('/render_no_template/') @override_settings(ROOT_URLCONF='view_tests.default_urls') def test_default_urlconf_template(self): """ Make sure that the default URLconf template is shown shown instead of the technical 404 page, if the user has not altered their URLconf yet. """ response = self.client.get('/') self.assertContains( response, "<h2>The install worked successfully! Congratulations!</h2>" ) @override_settings(ROOT_URLCONF='view_tests.regression_21530_urls') def test_regression_21530(self): """ Regression test for bug #21530. If the admin app include is replaced with exactly one url pattern, then the technical 404 template should be displayed. The bug here was that an AttributeError caused a 500 response. """ response = self.client.get('/') self.assertContains( response, "Page not found <span>(404)</span>", status_code=404 ) class DebugViewQueriesAllowedTests(SimpleTestCase): # May need a query to initialize MySQL connection allow_database_queries = True def test_handle_db_exception(self): """ Ensure the debug view works when a database exception is raised by performing an invalid query and passing the exception to the debug view. """ with connection.cursor() as cursor: try: cursor.execute('INVALID SQL') except DatabaseError: exc_info = sys.exc_info() rf = RequestFactory() response = technical_500_response(rf.get('/'), *exc_info) self.assertContains(response, 'OperationalError at /', status_code=500) @override_settings( DEBUG=True, ROOT_URLCONF='view_tests.urls', # No template directories are configured, so no templates will be found. TEMPLATES=[{ 'BACKEND': 'django.template.backends.dummy.TemplateStrings', }], ) class NonDjangoTemplatesDebugViewTests(SimpleTestCase): def test_400(self): # When DEBUG=True, technical_500_template() is called. with self.assertLogs('django.security', 'WARNING'): response = self.client.get('/raises400/') self.assertContains(response, '<div class="context" id="', status_code=400) def test_403(self): response = self.client.get('/raises403/') self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403) def test_404(self): response = self.client.get('/raises404/') self.assertEqual(response.status_code, 404) def test_template_not_found_error(self): # Raises a TemplateDoesNotExist exception and shows the debug view. url = reverse('raises_template_does_not_exist', kwargs={"path": "notfound.html"}) with self.assertLogs('django.request', 'ERROR'): response = self.client.get(url) self.assertContains(response, '<div class="context" id="', status_code=500) class ExceptionReporterTests(SimpleTestCase): rf = RequestFactory() def test_request_and_exception(self): "A simple exception report can be generated" try: request = self.rf.get('/test_view/') request.user = User() raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">Can&#39;t find my keys</pre>', html) self.assertIn('<th>Request Method:</th>', html) self.assertIn('<th>Request URL:</th>', html) self.assertIn('<h3 id="user-info">USER</h3>', html) self.assertIn('<p>jacob</p>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertNotIn('<p>Request data not supplied</p>', html) self.assertIn('<p>No POST data</p>', html) def test_no_request(self): "An exception report can be generated without request" try: raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError</h1>', html) self.assertIn('<pre class="exception_value">Can&#39;t find my keys</pre>', html) self.assertNotIn('<th>Request Method:</th>', html) self.assertNotIn('<th>Request URL:</th>', html) self.assertNotIn('<h3 id="user-info">USER</h3>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertIn('<p>Request data not supplied</p>', html) def test_eol_support(self): """The ExceptionReporter supports Unix, Windows and Macintosh EOL markers""" LINES = ['print %d' % i for i in range(1, 6)] reporter = ExceptionReporter(None, None, None, None) for newline in ['\n', '\r\n', '\r']: fd, filename = tempfile.mkstemp(text=False) os.write(fd, (newline.join(LINES) + newline).encode()) os.close(fd) try: self.assertEqual( reporter._get_lines_from_file(filename, 3, 2), (1, LINES[1:3], LINES[3], LINES[4:]) ) finally: os.unlink(filename) def test_no_exception(self): "An exception report can be generated for just a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML('<h1>Report at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">No exception message supplied</pre>', html) self.assertIn('<th>Request Method:</th>', html) self.assertIn('<th>Request URL:</th>', html) self.assertNotIn('<th>Exception Type:</th>', html) self.assertNotIn('<th>Exception Value:</th>', html) self.assertNotIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertNotIn('<p>Request data not supplied</p>', html) def test_reporting_of_nested_exceptions(self): request = self.rf.get('/test_view/') try: try: raise AttributeError(mark_safe('<p>Top level</p>')) except AttributeError as explicit: try: raise ValueError(mark_safe('<p>Second exception</p>')) from explicit except ValueError: raise IndexError(mark_safe('<p>Final exception</p>')) except Exception: # Custom exception handler, just pass it into ExceptionReporter exc_type, exc_value, tb = sys.exc_info() explicit_exc = 'The above exception ({0}) was the direct cause of the following exception:' implicit_exc = 'During handling of the above exception ({0}), another exception occurred:' reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() # Both messages are twice on page -- one rendered as html, # one as plain text (for pastebin) self.assertEqual(2, html.count(explicit_exc.format('&lt;p&gt;Top level&lt;/p&gt;'))) self.assertEqual(2, html.count(implicit_exc.format('&lt;p&gt;Second exception&lt;/p&gt;'))) self.assertEqual(10, html.count('&lt;p&gt;Final exception&lt;/p&gt;')) text = reporter.get_traceback_text() self.assertIn(explicit_exc.format('<p>Top level</p>'), text) self.assertIn(implicit_exc.format('<p>Second exception</p>'), text) self.assertEqual(3, text.count('<p>Final exception</p>')) def test_reporting_frames_without_source(self): try: source = "def funcName():\n raise Error('Whoops')\nfuncName()" namespace = {} code = compile(source, 'generated', 'exec') exec(code, namespace) except Exception: exc_type, exc_value, tb = sys.exc_info() request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, exc_type, exc_value, tb) frames = reporter.get_traceback_frames() last_frame = frames[-1] self.assertEqual(last_frame['context_line'], '<source code not available>') self.assertEqual(last_frame['filename'], 'generated') self.assertEqual(last_frame['function'], 'funcName') self.assertEqual(last_frame['lineno'], 2) html = reporter.get_traceback_html() self.assertIn('generated in funcName', html) text = reporter.get_traceback_text() self.assertIn('"generated" in funcName', text) def test_request_and_message(self): "A message can be provided in addition to a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, "I'm a little teapot", None) html = reporter.get_traceback_html() self.assertInHTML('<h1>Report at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">I&#39;m a little teapot</pre>', html) self.assertIn('<th>Request Method:</th>', html) self.assertIn('<th>Request URL:</th>', html) self.assertNotIn('<th>Exception Type:</th>', html) self.assertNotIn('<th>Exception Value:</th>', html) self.assertNotIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertNotIn('<p>Request data not supplied</p>', html) def test_message_only(self): reporter = ExceptionReporter(None, None, "I'm a little teapot", None) html = reporter.get_traceback_html() self.assertInHTML('<h1>Report</h1>', html) self.assertIn('<pre class="exception_value">I&#39;m a little teapot</pre>', html) self.assertNotIn('<th>Request Method:</th>', html) self.assertNotIn('<th>Request URL:</th>', html) self.assertNotIn('<th>Exception Type:</th>', html) self.assertNotIn('<th>Exception Value:</th>', html) self.assertNotIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertIn('<p>Request data not supplied</p>', html) def test_non_utf8_values_handling(self): "Non-UTF-8 exceptions/values should not make the output generation choke." try: class NonUtf8Output(Exception): def __repr__(self): return b'EXC\xe9EXC' somevar = b'VAL\xe9VAL' # NOQA raise NonUtf8Output() except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn('VAL\\xe9VAL', html) self.assertIn('EXC\\xe9EXC', html) def test_local_variable_escaping(self): """Safe strings in local variables are escaped.""" try: local = mark_safe('<p>Local variable</p>') raise ValueError(local) except Exception: exc_type, exc_value, tb = sys.exc_info() html = ExceptionReporter(None, exc_type, exc_value, tb).get_traceback_html() self.assertIn('<td class="code"><pre>&#39;&lt;p&gt;Local variable&lt;/p&gt;&#39;</pre></td>', html) def test_unprintable_values_handling(self): "Unprintable values should not make the output generation choke." try: class OomOutput: def __repr__(self): raise MemoryError('OOM') oomvalue = OomOutput() # NOQA raise ValueError() except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn('<td class="code"><pre>Error in formatting', html) def test_too_large_values_handling(self): "Large values should not create a large HTML." large = 256 * 1024 repr_of_str_adds = len(repr('')) try: class LargeOutput: def __repr__(self): return repr('A' * large) largevalue = LargeOutput() # NOQA raise ValueError() except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertEqual(len(html) // 1024 // 128, 0) # still fit in 128Kb self.assertIn('&lt;trimmed %d bytes string&gt;' % (large + repr_of_str_adds,), html) def test_encoding_error(self): """ A UnicodeError displays a portion of the problematic string. HTML in safe strings is escaped. """ try: mark_safe('abcdefghijkl<p>mnὀp</p>qrstuwxyz').encode('ascii') except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn('<h2>Unicode error hint</h2>', html) self.assertIn('The string that could not be encoded/decoded was: ', html) self.assertIn('<strong>&lt;p&gt;mnὀp&lt;/p&gt;</strong>', html) def test_unfrozen_importlib(self): """ importlib is not a frozen app, but its loader thinks it's frozen which results in an ImportError. Refs #21443. """ try: request = self.rf.get('/test_view/') importlib.import_module('abc.def.invalid.name') except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>%sError at /test_view/</h1>' % ('ModuleNotFound' if PY36 else 'Import'), html) def test_ignore_traceback_evaluation_exceptions(self): """ Don't trip over exceptions generated by crafted objects when evaluating them while cleansing (#24455). """ class BrokenEvaluation(Exception): pass def broken_setup(): raise BrokenEvaluation request = self.rf.get('/test_view/') broken_lazy = SimpleLazyObject(broken_setup) try: bool(broken_lazy) except BrokenEvaluation: exc_type, exc_value, tb = sys.exc_info() self.assertIn( "BrokenEvaluation", ExceptionReporter(request, exc_type, exc_value, tb).get_traceback_html(), "Evaluation exception reason not mentioned in traceback" ) @override_settings(ALLOWED_HOSTS='example.com') def test_disallowed_host(self): "An exception report can be generated even for a disallowed host." request = self.rf.get('/', HTTP_HOST='evil.com') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertIn("http://evil.com/", html) def test_request_with_items_key(self): """ An exception report can be generated for requests with 'items' in request GET, POST, FILES, or COOKIES QueryDicts. """ value = '<td>items</td><td class="code"><pre>&#39;Oops&#39;</pre></td>' # GET request = self.rf.get('/test_view/?items=Oops') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML(value, html) # POST request = self.rf.post('/test_view/', data={'items': 'Oops'}) reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML(value, html) # FILES fp = StringIO('filecontent') request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp}) reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML( '<td>items</td><td class="code"><pre>&lt;InMemoryUploadedFile: ' 'items (application/octet-stream)&gt;</pre></td>', html ) # COOKES rf = RequestFactory() rf.cookies['items'] = 'Oops' request = rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML('<td>items</td><td class="code"><pre>&#39;Oops&#39;</pre></td>', html) def test_exception_fetching_user(self): """ The error page can be rendered if the current user can't be retrieved (such as when the database is unavailable). """ class ExceptionUser: def __str__(self): raise Exception() request = self.rf.get('/test_view/') request.user = ExceptionUser() try: raise ValueError('Oops') except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">Oops</pre>', html) self.assertIn('<h3 id="user-info">USER</h3>', html) self.assertIn('<p>[unable to retrieve the current user]</p>', html) text = reporter.get_traceback_text() self.assertIn('USER: [unable to retrieve the current user]', text) class PlainTextReportTests(SimpleTestCase): rf = RequestFactory() def test_request_and_exception(self): "A simple exception report can be generated" try: request = self.rf.get('/test_view/') request.user = User() raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) text = reporter.get_traceback_text() self.assertIn('ValueError at /test_view/', text) self.assertIn("Can't find my keys", text) self.assertIn('Request Method:', text) self.assertIn('Request URL:', text) self.assertIn('USER: jacob', text) self.assertIn('Exception Type:', text) self.assertIn('Exception Value:', text) self.assertIn('Traceback:', text) self.assertIn('Request information:', text) self.assertNotIn('Request data not supplied', text) def test_no_request(self): "An exception report can be generated without request" try: raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) text = reporter.get_traceback_text() self.assertIn('ValueError', text) self.assertIn("Can't find my keys", text) self.assertNotIn('Request Method:', text) self.assertNotIn('Request URL:', text) self.assertNotIn('USER:', text) self.assertIn('Exception Type:', text) self.assertIn('Exception Value:', text) self.assertIn('Traceback:', text) self.assertIn('Request data not supplied', text) def test_no_exception(self): "An exception report can be generated for just a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) reporter.get_traceback_text() def test_request_and_message(self): "A message can be provided in addition to a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, "I'm a little teapot", None) reporter.get_traceback_text() @override_settings(DEBUG=True) def test_template_exception(self): request = self.rf.get('/test_view/') try: render(request, 'debug/template_error.html') except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) text = reporter.get_traceback_text() templ_path = Path(Path(__file__).parent.parent, 'templates', 'debug', 'template_error.html') self.assertIn( 'Template error:\n' 'In template %(path)s, error at line 2\n' ' \'cycle\' tag requires at least two arguments\n' ' 1 : Template with error:\n' ' 2 : {%% cycle %%} \n' ' 3 : ' % {'path': templ_path}, text ) def test_request_with_items_key(self): """ An exception report can be generated for requests with 'items' in request GET, POST, FILES, or COOKIES QueryDicts. """ # GET request = self.rf.get('/test_view/?items=Oops') reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("items = 'Oops'", text) # POST request = self.rf.post('/test_view/', data={'items': 'Oops'}) reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("items = 'Oops'", text) # FILES fp = StringIO('filecontent') request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp}) reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn('items = <InMemoryUploadedFile:', text) # COOKES rf = RequestFactory() rf.cookies['items'] = 'Oops' request = rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("items = 'Oops'", text) def test_message_only(self): reporter = ExceptionReporter(None, None, "I'm a little teapot", None) reporter.get_traceback_text() @override_settings(ALLOWED_HOSTS='example.com') def test_disallowed_host(self): "An exception report can be generated even for a disallowed host." request = self.rf.get('/', HTTP_HOST='evil.com') reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("http://evil.com/", text) class ExceptionReportTestMixin: # Mixin used in the ExceptionReporterFilterTests and # AjaxResponseExceptionReporterFilter tests below breakfast_data = {'sausage-key': 'sausage-value', 'baked-beans-key': 'baked-beans-value', 'hash-brown-key': 'hash-brown-value', 'bacon-key': 'bacon-value'} def verify_unsafe_response(self, view, check_for_vars=True, check_for_POST_params=True): """ Asserts that potentially sensitive info are displayed in the response. """ request = self.rf.post('/some_url/', self.breakfast_data) response = view(request) if check_for_vars: # All variables are shown. self.assertContains(response, 'cooked_eggs', status_code=500) self.assertContains(response, 'scrambled', status_code=500) self.assertContains(response, 'sauce', status_code=500) self.assertContains(response, 'worcestershire', status_code=500) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters are shown. self.assertContains(response, k, status_code=500) self.assertContains(response, v, status_code=500) def verify_safe_response(self, view, check_for_vars=True, check_for_POST_params=True): """ Asserts that certain sensitive info are not displayed in the response. """ request = self.rf.post('/some_url/', self.breakfast_data) response = view(request) if check_for_vars: # Non-sensitive variable's name and value are shown. self.assertContains(response, 'cooked_eggs', status_code=500) self.assertContains(response, 'scrambled', status_code=500) # Sensitive variable's name is shown but not its value. self.assertContains(response, 'sauce', status_code=500) self.assertNotContains(response, 'worcestershire', status_code=500) if check_for_POST_params: for k in self.breakfast_data: # All POST parameters' names are shown. self.assertContains(response, k, status_code=500) # Non-sensitive POST parameters' values are shown. self.assertContains(response, 'baked-beans-value', status_code=500) self.assertContains(response, 'hash-brown-value', status_code=500) # Sensitive POST parameters' values are not shown. self.assertNotContains(response, 'sausage-value', status_code=500) self.assertNotContains(response, 'bacon-value', status_code=500) def verify_paranoid_response(self, view, check_for_vars=True, check_for_POST_params=True): """ Asserts that no variables or POST parameters are displayed in the response. """ request = self.rf.post('/some_url/', self.breakfast_data) response = view(request) if check_for_vars: # Show variable names but not their values. self.assertContains(response, 'cooked_eggs', status_code=500) self.assertNotContains(response, 'scrambled', status_code=500) self.assertContains(response, 'sauce', status_code=500) self.assertNotContains(response, 'worcestershire', status_code=500) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters' names are shown. self.assertContains(response, k, status_code=500) # No POST parameters' values are shown. self.assertNotContains(response, v, status_code=500) def verify_unsafe_email(self, view, check_for_POST_params=True): """ Asserts that potentially sensitive info are displayed in the email report. """ with self.settings(ADMINS=[('Admin', '[email protected]')]): mail.outbox = [] # Empty outbox request = self.rf.post('/some_url/', self.breakfast_data) view(request) self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. body_plain = str(email.body) self.assertNotIn('cooked_eggs', body_plain) self.assertNotIn('scrambled', body_plain) self.assertNotIn('sauce', body_plain) self.assertNotIn('worcestershire', body_plain) # Frames vars are shown in html email reports. body_html = str(email.alternatives[0][0]) self.assertIn('cooked_eggs', body_html) self.assertIn('scrambled', body_html) self.assertIn('sauce', body_html) self.assertIn('worcestershire', body_html) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters are shown. self.assertIn(k, body_plain) self.assertIn(v, body_plain) self.assertIn(k, body_html) self.assertIn(v, body_html) def verify_safe_email(self, view, check_for_POST_params=True): """ Asserts that certain sensitive info are not displayed in the email report. """ with self.settings(ADMINS=[('Admin', '[email protected]')]): mail.outbox = [] # Empty outbox request = self.rf.post('/some_url/', self.breakfast_data) view(request) self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. body_plain = str(email.body) self.assertNotIn('cooked_eggs', body_plain) self.assertNotIn('scrambled', body_plain) self.assertNotIn('sauce', body_plain) self.assertNotIn('worcestershire', body_plain) # Frames vars are shown in html email reports. body_html = str(email.alternatives[0][0]) self.assertIn('cooked_eggs', body_html) self.assertIn('scrambled', body_html) self.assertIn('sauce', body_html) self.assertNotIn('worcestershire', body_html) if check_for_POST_params: for k in self.breakfast_data: # All POST parameters' names are shown. self.assertIn(k, body_plain) # Non-sensitive POST parameters' values are shown. self.assertIn('baked-beans-value', body_plain) self.assertIn('hash-brown-value', body_plain) self.assertIn('baked-beans-value', body_html) self.assertIn('hash-brown-value', body_html) # Sensitive POST parameters' values are not shown. self.assertNotIn('sausage-value', body_plain) self.assertNotIn('bacon-value', body_plain) self.assertNotIn('sausage-value', body_html) self.assertNotIn('bacon-value', body_html) def verify_paranoid_email(self, view): """ Asserts that no variables or POST parameters are displayed in the email report. """ with self.settings(ADMINS=[('Admin', '[email protected]')]): mail.outbox = [] # Empty outbox request = self.rf.post('/some_url/', self.breakfast_data) view(request) self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. body = str(email.body) self.assertNotIn('cooked_eggs', body) self.assertNotIn('scrambled', body) self.assertNotIn('sauce', body) self.assertNotIn('worcestershire', body) for k, v in self.breakfast_data.items(): # All POST parameters' names are shown. self.assertIn(k, body) # No POST parameters' values are shown. self.assertNotIn(v, body) @override_settings(ROOT_URLCONF='view_tests.urls') class ExceptionReporterFilterTests(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase): """ Sensitive information can be filtered out of error reports (#14614). """ rf = RequestFactory() def test_non_sensitive_request(self): """ Everything (request info and frame variables) can bee seen in the default error reports for non-sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(non_sensitive_view) self.verify_unsafe_email(non_sensitive_view) with self.settings(DEBUG=False): self.verify_unsafe_response(non_sensitive_view) self.verify_unsafe_email(non_sensitive_view) def test_sensitive_request(self): """ Sensitive POST parameters and frame variables cannot be seen in the default error reports for sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_view) self.verify_unsafe_email(sensitive_view) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_view) self.verify_safe_email(sensitive_view) def test_paranoid_request(self): """ No POST parameters and frame variables can be seen in the default error reports for "paranoid" requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(paranoid_view) self.verify_unsafe_email(paranoid_view) with self.settings(DEBUG=False): self.verify_paranoid_response(paranoid_view) self.verify_paranoid_email(paranoid_view) def test_multivalue_dict_key_error(self): """ #21098 -- Sensitive POST parameters cannot be seen in the error reports for if request.POST['nonexistent_key'] throws an error. """ with self.settings(DEBUG=True): self.verify_unsafe_response(multivalue_dict_key_error) self.verify_unsafe_email(multivalue_dict_key_error) with self.settings(DEBUG=False): self.verify_safe_response(multivalue_dict_key_error) self.verify_safe_email(multivalue_dict_key_error) def test_custom_exception_reporter_filter(self): """ It's possible to assign an exception reporter filter to the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER. """ with self.settings(DEBUG=True): self.verify_unsafe_response(custom_exception_reporter_filter_view) self.verify_unsafe_email(custom_exception_reporter_filter_view) with self.settings(DEBUG=False): self.verify_unsafe_response(custom_exception_reporter_filter_view) self.verify_unsafe_email(custom_exception_reporter_filter_view) def test_sensitive_method(self): """ The sensitive_variables decorator works with object methods. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_method_view, check_for_POST_params=False) self.verify_unsafe_email(sensitive_method_view, check_for_POST_params=False) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_method_view, check_for_POST_params=False) self.verify_safe_email(sensitive_method_view, check_for_POST_params=False) def test_sensitive_function_arguments(self): """ Sensitive variables don't leak in the sensitive_variables decorator's frame, when those variables are passed as arguments to the decorated function. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_args_function_caller) self.verify_unsafe_email(sensitive_args_function_caller) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_args_function_caller, check_for_POST_params=False) self.verify_safe_email(sensitive_args_function_caller, check_for_POST_params=False) def test_sensitive_function_keyword_arguments(self): """ Sensitive variables don't leak in the sensitive_variables decorator's frame, when those variables are passed as keyword arguments to the decorated function. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_kwargs_function_caller) self.verify_unsafe_email(sensitive_kwargs_function_caller) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_kwargs_function_caller, check_for_POST_params=False) self.verify_safe_email(sensitive_kwargs_function_caller, check_for_POST_params=False) def test_callable_settings(self): """ Callable settings should not be evaluated in the debug page (#21345). """ def callable_setting(): return "This should not be displayed" with self.settings(DEBUG=True, FOOBAR=callable_setting): response = self.client.get('/raises500/') self.assertNotContains(response, "This should not be displayed", status_code=500) def test_callable_settings_forbidding_to_set_attributes(self): """ Callable settings which forbid to set attributes should not break the debug page (#23070). """ class CallableSettingWithSlots: __slots__ = [] def __call__(self): return "This should not be displayed" with self.settings(DEBUG=True, WITH_SLOTS=CallableSettingWithSlots()): response = self.client.get('/raises500/') self.assertNotContains(response, "This should not be displayed", status_code=500) def test_dict_setting_with_non_str_key(self): """ A dict setting containing a non-string key should not break the debug page (#12744). """ with self.settings(DEBUG=True, FOOBAR={42: None}): response = self.client.get('/raises500/') self.assertContains(response, 'FOOBAR', status_code=500) def test_sensitive_settings(self): """ The debug page should not show some sensitive settings (password, secret key, ...). """ sensitive_settings = [ 'SECRET_KEY', 'PASSWORD', 'API_KEY', 'AUTH_TOKEN', ] for setting in sensitive_settings: with self.settings(DEBUG=True, **{setting: "should not be displayed"}): response = self.client.get('/raises500/') self.assertNotContains(response, 'should not be displayed', status_code=500) def test_settings_with_sensitive_keys(self): """ The debug page should filter out some sensitive information found in dict settings. """ sensitive_settings = [ 'SECRET_KEY', 'PASSWORD', 'API_KEY', 'AUTH_TOKEN', ] for setting in sensitive_settings: FOOBAR = { setting: "should not be displayed", 'recursive': {setting: "should not be displayed"}, } with self.settings(DEBUG=True, FOOBAR=FOOBAR): response = self.client.get('/raises500/') self.assertNotContains(response, 'should not be displayed', status_code=500) class AjaxResponseExceptionReporterFilter(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase): """ Sensitive information can be filtered out of error reports. Here we specifically test the plain text 500 debug-only error page served when it has been detected the request was sent by JS code. We don't check for (non)existence of frames vars in the traceback information section of the response content because we don't include them in these error pages. Refs #14614. """ rf = RequestFactory(HTTP_X_REQUESTED_WITH='XMLHttpRequest') def test_non_sensitive_request(self): """ Request info can bee seen in the default error reports for non-sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(non_sensitive_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_unsafe_response(non_sensitive_view, check_for_vars=False) def test_sensitive_request(self): """ Sensitive POST parameters cannot be seen in the default error reports for sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_view, check_for_vars=False) def test_paranoid_request(self): """ No POST parameters can be seen in the default error reports for "paranoid" requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(paranoid_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_paranoid_response(paranoid_view, check_for_vars=False) def test_custom_exception_reporter_filter(self): """ It's possible to assign an exception reporter filter to the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER. """ with self.settings(DEBUG=True): self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False) @override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls') def test_ajax_response_encoding(self): response = self.client.get('/raises500/', HTTP_X_REQUESTED_WITH='XMLHttpRequest') self.assertEqual(response['Content-Type'], 'text/plain; charset=utf-8') class HelperFunctionTests(SimpleTestCase): def test_cleanse_setting_basic(self): self.assertEqual(cleanse_setting('TEST', 'TEST'), 'TEST') self.assertEqual(cleanse_setting('PASSWORD', 'super_secret'), CLEANSED_SUBSTITUTE) def test_cleanse_setting_ignore_case(self): self.assertEqual(cleanse_setting('password', 'super_secret'), CLEANSED_SUBSTITUTE) def test_cleanse_setting_recurses_in_dictionary(self): initial = {'login': 'cooper', 'password': 'secret'} expected = {'login': 'cooper', 'password': CLEANSED_SUBSTITUTE} self.assertEqual(cleanse_setting('SETTING_NAME', initial), expected)
py
b41138a33779887be08e6b2fbf120a47ce710e90
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # */AIPND-revision/intropyproject-classify-pet-images/classify_images.py # # PROGRAMMER: Brahim Kellou # DATE CREATED: 02/18/2020 # REVISED DATE: # PURPOSE: Create a function classify_images that uses the classifier function # to create the classifier labels and then compares the classifier # labels to the pet image labels. This function inputs: # -The Image Folder as image_dir within classify_images and function # and as in_arg.dir for function call within main. # -The results dictionary as results_dic within classify_images # function and results for the functin call within main. # -The CNN model architecture as model wihtin classify_images function # and in_arg.arch for the function call within main. # This function uses the extend function to add items to the list # that's the 'value' of the results dictionary. You will be adding the # classifier label as the item at index 1 of the list and the comparison # of the pet and classifier labels as the item at index 2 of the list. # ## # Imports classifier function for using CNN to classify images from classifier import classifier def classify_images(images_dir, results_dic, model): """ Creates classifier labels with classifier function, compares pet labels to the classifier labels, and adds the classifier label and the comparison of the labels to the results dictionary using the extend function. Be sure to format the classifier labels so that they will match your pet image labels. The format will include putting the classifier labels in all lower case letters and strip the leading and trailing whitespace characters from them. For example, the Classifier function returns = 'Maltese dog, Maltese terrier, Maltese' so the classifier label = 'maltese dog, maltese terrier, maltese'. Recall that dog names from the classifier function can be a string of dog names separated by commas when a particular breed of dog has multiple dog names associated with that breed. For example, you will find pet images of a 'dalmatian'(pet label) and it will match to the classifier label 'dalmatian, coach dog, carriage dog' if the classifier function correctly classified the pet images of dalmatians. PLEASE NOTE: This function uses the classifier() function defined in classifier.py within this function. The proper use of this function is in test_classifier.py Please refer to this program prior to using the classifier() function to classify images within this function Parameters: images_dir - The (full) path to the folder of images that are to be classified by the classifier function (string) results_dic - Results Dictionary with 'key' as image filename and 'value' as a List. Where the list will contain the following items: index 0 = pet image label (string) --- where index 1 & index 2 are added by this function --- NEW - index 1 = classifier label (string) NEW - index 2 = 1/0 (int) where 1 = match between pet image and classifer labels and 0 = no match between labels model - Indicates which CNN model architecture will be used by the classifier function to classify the pet images, values must be either: resnet alexnet vgg (string) Returns: None - results_dic is mutable data type so no return needed. """ for key in results_dic: try: image_path = images_dir + key image_classification = classifier(image_path, model) # Convert all characters within model_label # to lowercase letters and then strip spaces. model_label = image_classification.lower().strip() # Defines pet_label as pet image label pet_label = results_dic[key][0] # Determine if pet_label matches model_label is_match = int() if pet_label in model_label: is_match = 1 else: is_match = 0 results_dic[key].extend([model_label, is_match]) # Catch error when file is not found except OSError as e: print('Error file does\'nt exist: ', str(e))
py
b411390e2dda970f7b9f4526b172c69095e1f505
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( ExtractorError, extract_attributes, int_or_none, js_to_json, merge_dicts, ) class PokemonIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?pokemon\.com/[a-z]{2}(?:.*?play=(?P<id>[a-z0-9]{32})|/(?:[^/]+/)+(?P<display_id>[^/?#&]+))' _TESTS = [{ 'url': 'https://www.pokemon.com/us/pokemon-episodes/20_30-the-ol-raise-and-switch/', 'md5': '2fe8eaec69768b25ef898cda9c43062e', 'info_dict': { 'id': 'afe22e30f01c41f49d4f1d9eab5cd9a4', 'ext': 'mp4', 'title': 'The Ol’ Raise and Switch!', 'description': 'md5:7db77f7107f98ba88401d3adc80ff7af', }, 'add_id': ['LimelightMedia'], }, { # no data-video-title 'url': 'https://www.pokemon.com/fr/episodes-pokemon/films-pokemon/pokemon-lascension-de-darkrai-2008', 'info_dict': { 'id': 'dfbaf830d7e54e179837c50c0c6cc0e1', 'ext': 'mp4', 'title': "Pokémon : L'ascension de Darkrai", 'description': 'md5:d1dbc9e206070c3e14a06ff557659fb5', }, 'add_id': ['LimelightMedia'], 'params': { 'skip_download': True, }, }, { 'url': 'http://www.pokemon.com/uk/pokemon-episodes/?play=2e8b5c761f1d4a9286165d7748c1ece2', 'only_matching': True, }, { 'url': 'http://www.pokemon.com/fr/episodes-pokemon/18_09-un-hiver-inattendu/', 'only_matching': True, }, { 'url': 'http://www.pokemon.com/de/pokemon-folgen/01_20-bye-bye-smettbo/', 'only_matching': True, }] def _real_extract(self, url): video_id, display_id = self._match_valid_url(url).groups() webpage = self._download_webpage(url, video_id or display_id) video_data = extract_attributes(self._search_regex( r'(<[^>]+data-video-id="%s"[^>]*>)' % (video_id if video_id else '[a-z0-9]{32}'), webpage, 'video data element')) video_id = video_data['data-video-id'] title = video_data.get('data-video-title') or self._html_search_meta( 'pkm-title', webpage, ' title', default=None) or self._search_regex( r'<h1[^>]+\bclass=["\']us-title[^>]+>([^<]+)', webpage, 'title') return { '_type': 'url_transparent', 'id': video_id, 'url': 'limelight:media:%s' % video_id, 'title': title, 'description': video_data.get('data-video-summary'), 'thumbnail': video_data.get('data-video-poster'), 'series': 'Pokémon', 'season_number': int_or_none(video_data.get('data-video-season')), 'episode': title, 'episode_number': int_or_none(video_data.get('data-video-episode')), 'ie_key': 'LimelightMedia', } class PokemonWatchIE(InfoExtractor): _VALID_URL = r'https?://watch\.pokemon\.com/[a-z]{2}-[a-z]{2}/(?:#/)?player(?:\.html)?\?id=(?P<id>[a-z0-9]{32})' _API_URL = 'https://www.pokemon.com/api/pokemontv/v2/channels/{0:}' _TESTS = [{ 'url': 'https://watch.pokemon.com/en-us/player.html?id=8309a40969894a8e8d5bc1311e9c5667', 'md5': '62833938a31e61ab49ada92f524c42ff', 'info_dict': { 'id': '8309a40969894a8e8d5bc1311e9c5667', 'ext': 'mp4', 'title': 'Lillier and the Staff!', 'description': 'md5:338841b8c21b283d24bdc9b568849f04', } }, { 'url': 'https://watch.pokemon.com/en-us/#/player?id=3fe7752ba09141f0b0f7756d1981c6b2', 'only_matching': True }, { 'url': 'https://watch.pokemon.com/de-de/player.html?id=b3c402e111a4459eb47e12160ab0ba07', 'only_matching': True }] def _extract_media(self, channel_array, video_id): for channel in channel_array: for media in channel.get('media'): if media.get('id') == video_id: return media return None def _real_extract(self, url): video_id = self._match_id(url) info = { '_type': 'url', 'id': video_id, 'url': 'limelight:media:%s' % video_id, 'ie_key': 'LimelightMedia', } # API call can be avoided entirely if we are listing formats if self.get_param('listformats', False): return info webpage = self._download_webpage(url, video_id) build_vars = self._parse_json(self._search_regex( r'(?s)buildVars\s*=\s*({.*?})', webpage, 'build vars'), video_id, transform_source=js_to_json) region = build_vars.get('region') channel_array = self._download_json(self._API_URL.format(region), video_id) video_data = self._extract_media(channel_array, video_id) if video_data is None: raise ExtractorError( 'Video %s does not exist' % video_id, expected=True) info['_type'] = 'url_transparent' images = video_data.get('images') return merge_dicts(info, { 'title': video_data.get('title'), 'description': video_data.get('description'), 'thumbnail': images.get('medium') or images.get('small'), 'series': 'Pokémon', 'season_number': int_or_none(video_data.get('season')), 'episode': video_data.get('title'), 'episode_number': int_or_none(video_data.get('episode')), }) class PokemonSoundLibraryIE(InfoExtractor): _VALID_URL = r'https?://soundlibrary\.pokemon\.co\.jp' _TESTS = [{ 'url': 'https://soundlibrary.pokemon.co.jp/', 'info_dict': { 'title': 'Pokémon Diamond and Pearl Sound Tracks', }, 'playlist_mincount': 149, }] def _real_extract(self, url): musicbox_webpage = self._download_webpage( 'https://soundlibrary.pokemon.co.jp/musicbox', None, 'Downloading list of songs') song_titles = [x.group(1) for x in re.finditer(r'<span>([^>]+?)</span><br/>をてもち曲に加えます。', musicbox_webpage)] song_titles = song_titles[4::2] # each songs don't have permalink; instead we return all songs at once song_entries = [{ 'id': f'pokemon-soundlibrary-{song_id}', 'url': f'https://soundlibrary.pokemon.co.jp/api/assets/signing/sounds/wav/{song_id}.wav', # note: the server always serves MP3 files, despite its extension of the URL above 'ext': 'mp3', 'acodec': 'mp3', 'vcodec': 'none', 'title': song_title, 'track': song_title, 'artist': 'Nintendo / Creatures Inc. / GAME FREAK inc.', 'uploader': 'Pokémon', 'release_year': 2006, 'release_date': '20060928', 'track_number': song_id, 'album': 'Pokémon Diamond and Pearl', } for song_id, song_title in enumerate(song_titles, 1)] return self.playlist_result(song_entries, playlist_title='Pokémon Diamond and Pearl Sound Tracks')
py
b41139719a133e814ff9fa8f6a238eed71f73895
#!/usr/bin/python3 # -*- coding: utf-8 -*- import os import sys BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # 基地址是本项目根目录 sys.path.append(BASE_DIR) import datetime from yxf_yixue.bazi import BaziApi from yxf_yixue.wannianli import WannianliApi if __name__ == '__main__': a = BaziApi() string = '1996/02/29 23:16' obj = datetime.datetime(1996, 7, 12, 12, 40) print(a.paipan(obj, xingbie='男')) print(a.print_pan()) # a.get_lianghuafenxi() pass
py
b411398bb0ce7e3da44a634d50f7683fe4480746
import wandb import torch import torch.optim as optim import torch.nn as nn import numpy as np from model import TransZero from dataset import SUNDataLoader from helper_func import eval_zs_gzsl # init wandb from config file wandb.init(project='TransZero', config='wandb_config/sun_gzsl.yaml') config = wandb.config print('Config file from wandb:', config) # load dataset dataloader = SUNDataLoader('.', config.device) # set random seed seed = config.random_seed torch.manual_seed(seed) torch.cuda.manual_seed_all(seed) np.random.seed(seed) # TransZero model model = TransZero(config, dataloader.att, dataloader.w2v_att, dataloader.seenclasses, dataloader.unseenclasses).to(config.device) optimizer = optim.SGD(model.parameters(), lr=0.0001, weight_decay=0.0001, momentum=0.9) # main loop niters = dataloader.ntrain * config.epochs//config.batch_size report_interval = niters//config.epochs best_performance = [0, 0, 0, 0] best_performance_zsl = 0 for i in range(0, niters): model.train() optimizer.zero_grad() batch_label, batch_feature, batch_att = dataloader.next_batch(config.batch_size) out_package = model(batch_feature) in_package = out_package in_package['batch_label'] = batch_label out_package=model.compute_loss(in_package) loss, loss_CE, loss_cal, loss_reg = out_package['loss'], out_package[ 'loss_CE'], out_package['loss_cal'], out_package['loss_reg'] loss.backward() optimizer.step() # report result if i % report_interval==0: print('-'*30) acc_seen, acc_novel, H, acc_zs = eval_zs_gzsl( dataloader, model, config.device, bias_seen=0, bias_unseen=0) if H > best_performance[2]: best_performance = [acc_novel, acc_seen, H, acc_zs] if acc_zs > best_performance_zsl: best_performance_zsl = acc_zs print('iter/epoch=%d/%d | loss=%.3f, loss_CE=%.3f, loss_cal=%.3f, ' 'loss_reg=%.3f | acc_unseen=%.3f, acc_seen=%.3f, H=%.3f | ' 'acc_zs=%.3f' % ( i, int(i//report_interval), loss.item(), loss_CE.item(), loss_cal.item(), loss_reg.item(), best_performance[0], best_performance[1], best_performance[2], best_performance_zsl)) wandb.log({ 'iter': i, 'loss': loss.item(), 'loss_CE': loss_CE.item(), 'loss_cal': loss_cal.item(), 'loss_reg': loss_reg.item(), 'acc_unseen': acc_novel, 'acc_seen': acc_seen, 'H': H, 'acc_zs': acc_zs, 'best_acc_unseen': best_performance[0], 'best_acc_seen': best_performance[1], 'best_H': best_performance[2], 'best_acc_zs': best_performance_zsl })
py
b4113b9e3890737913348708e8c3e01518aeeac8
import socket, struct from PyQt5 import QtCore class ChatClient(QtCore.QObject): receiveMessageTrigger = QtCore.pyqtSignal(str) timeStampTrigger = QtCore.pyqtSignal(str) def __init__(self, addr, port, isHost): QtCore.QThread.__init__(self) self.clientSocket = None self.addr = addr self.port = port self.isHost = isHost self.intializeSocket() def intializeSocket(self): try: self.clientSocket = socket.socket() self.clientSocket.connect((self.addr, self.port)) return True except: return None def receiveMessage(self): try: binMessageSize = self.clientSocket.recv(2) if not binMessageSize: return None messageSize = struct.unpack('h', binMessageSize)[0] message = self.clientSocket.recv(messageSize) if not message: return None return message.decode('utf-8') except struct.error: return None def listenForIncomingMessages(self): while True: message = self.receiveMessage() if not message: return None if message.startswith('1234joined'): user = message.split(':')[1] message = user + " has joined the chat." if message.startswith('2345TimeStamp'): if not self.isHost: self.timeStampTrigger.emit(message) continue self.receiveMessageTrigger.emit(message) def sendMessage(self, message): messageSize = struct.pack('h', len(message)) if not self.clientSocket.send(messageSize) or not self.clientSocket.send(message.encode('utf-8')): return None return True def __del__(self): self.clientSocket.close()
py
b4113bb42e3f60a3dde7c7a3a554c0e84c544fee
import logging from flask_babel import lazy_gettext from ..filters import BaseFilter, BaseFilterConverter, FilterRelation log = logging.getLogger(__name__) __all__ = [ "MongoEngineFilterConverter", "FilterEqual", "FilterContains", "FilterNotContains", "FilterNotStartsWith", "FilterStartsWith", "FilterRelationOneToManyEqual", "FilterRelationManyToManyEqual", ] class FilterEqual(BaseFilter): name = lazy_gettext("Equal to") def apply(self, query, value): if self.datamodel.is_boolean(self.column_name): if value == "y": value = True flt = {"%s" % self.column_name: value} return query.filter(**flt) class FilterNotEqual(BaseFilter): name = lazy_gettext("Not Equal to") def apply(self, query, value): if self.datamodel.is_boolean(self.column_name): if value == "y": value = True flt = {"%s__ne" % self.column_name: value} return query.filter(**flt) class FilterGreater(BaseFilter): name = lazy_gettext("Greater than") def apply(self, query, value): flt = {"%s__gt" % self.column_name: value} return query.filter(**flt) class FilterSmaller(BaseFilter): name = lazy_gettext("Smaller than") def apply(self, query, value): flt = {"%s__lt" % self.column_name: value} return query.filter(**flt) class FilterStartsWith(BaseFilter): name = lazy_gettext("Starts with") def apply(self, query, value): flt = {"%s__%s" % (self.column_name, "startswith"): value} return query.filter(**flt) class FilterNotStartsWith(BaseFilter): name = lazy_gettext("Not Starts with") def apply(self, query, value): flt = {"%s__not__%s" % (self.column_name, "startswith"): value} return query.filter(**flt) class FilterContains(BaseFilter): name = lazy_gettext("Contains") def apply(self, query, value): flt = {"%s__%s" % (self.column_name, "icontains"): value} return query.filter(**flt) class FilterNotContains(BaseFilter): name = lazy_gettext("Not Contains") def apply(self, query, value): flt = {"%s__not__%s" % (self.column_name, "icontains"): value} return query.filter(**flt) class FilterRelationOneToManyEqual(FilterRelation): name = lazy_gettext("Relation") def apply(self, query, value): rel_obj = self.datamodel.get_related_obj(self.column_name, value) flt = {"%s" % self.column_name: rel_obj} return query.filter(**flt) class FilterRelationManyToManyEqual(FilterRelation): name = lazy_gettext("Relation as Many") def apply(self, query, value): rel_obj = self.datamodel.get_related_obj(self.column_name, value) flt = {"%s__%s" % (self.column_name, "icontains"): rel_obj} return query.filter(**flt) class FilterEqualFunction(BaseFilter): name = "Filter view with a function" def apply(self, query, func): flt = {"%s" % self.column_name: func()} return query.filter(**flt) class MongoEngineFilterConverter(BaseFilterConverter): """ Class for converting columns into a supported list of filters specific for SQLAlchemy. """ conversion_table = ( ("is_relation_many_to_one", [FilterRelationOneToManyEqual]), ("is_relation_one_to_one", [FilterRelationOneToManyEqual]), ("is_relation_many_to_many", [FilterRelationManyToManyEqual]), ("is_relation_one_to_many", [FilterRelationManyToManyEqual]), ("is_object_id", [FilterEqual]), ( "is_string", [ FilterEqual, FilterNotEqual, FilterStartsWith, FilterNotStartsWith, FilterContains, FilterNotContains, ], ), ("is_boolean", [FilterEqual, FilterNotEqual]), ("is_datetime", [FilterEqual, FilterNotEqual, FilterGreater, FilterSmaller]), ("is_integer", [FilterEqual, FilterNotEqual, FilterGreater, FilterSmaller]), ("is_float", [FilterEqual, FilterNotEqual, FilterGreater, FilterSmaller]), )
py
b4113bd02e53f6a3f616a37e4c719e22a0d48a27
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: processor.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() import kv_pb2 as kv__pb2 import storage_basic_pb2 as storage__basic__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='processor.proto', package='com.webank.ai.eggroll.api.computing.processor', syntax='proto3', serialized_options=None, serialized_pb=_b('\n\x0fprocessor.proto\x12-com.webank.ai.eggroll.api.computing.processor\x1a\x08kv.proto\x1a\x13storage-basic.proto\"#\n\x0bProcessConf\x12\x14\n\x0cnamingPolicy\x18\x01 \x01(\t\"d\n\x08TaskInfo\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x13\n\x0b\x66unction_id\x18\x02 \x01(\t\x12\x16\n\x0e\x66unction_bytes\x18\x03 \x01(\x0c\x12\x1a\n\x12isInPlaceComputing\x18\x04 \x01(\x08\"\xe3\x01\n\x0cUnaryProcess\x12\x45\n\x04info\x18\x01 \x01(\x0b\x32\x37.com.webank.ai.eggroll.api.computing.processor.TaskInfo\x12\x42\n\x07operand\x18\x02 \x01(\x0b\x32\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12H\n\x04\x63onf\x18\x03 \x01(\x0b\x32:.com.webank.ai.eggroll.api.computing.processor.ProcessConf\"\xa3\x02\n\rBinaryProcess\x12\x45\n\x04info\x18\x01 \x01(\x0b\x32\x37.com.webank.ai.eggroll.api.computing.processor.TaskInfo\x12?\n\x04left\x18\x02 \x01(\x0b\x32\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12@\n\x05right\x18\x03 \x01(\x0b\x32\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12H\n\x04\x63onf\x18\x04 \x01(\x0b\x32:.com.webank.ai.eggroll.api.computing.processor.ProcessConf2\xd7\n\n\x0eProcessService\x12u\n\x03map\x12;.com.webank.ai.eggroll.api.computing.processor.UnaryProcess\x1a\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12{\n\tmapValues\x12;.com.webank.ai.eggroll.api.computing.processor.UnaryProcess\x1a\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12w\n\x04join\x12<.com.webank.ai.eggroll.api.computing.processor.BinaryProcess\x1a\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12s\n\x06reduce\x12;.com.webank.ai.eggroll.api.computing.processor.UnaryProcess\x1a*.com.webank.ai.eggroll.api.storage.Operand0\x01\x12\x7f\n\rmapPartitions\x12;.com.webank.ai.eggroll.api.computing.processor.UnaryProcess\x1a\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12v\n\x04glom\x12;.com.webank.ai.eggroll.api.computing.processor.UnaryProcess\x1a\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12x\n\x06sample\x12;.com.webank.ai.eggroll.api.computing.processor.UnaryProcess\x1a\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12\x80\x01\n\rsubtractByKey\x12<.com.webank.ai.eggroll.api.computing.processor.BinaryProcess\x1a\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12x\n\x06\x66ilter\x12;.com.webank.ai.eggroll.api.computing.processor.UnaryProcess\x1a\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12x\n\x05union\x12<.com.webank.ai.eggroll.api.computing.processor.BinaryProcess\x1a\x31.com.webank.ai.eggroll.api.storage.StorageLocator\x12y\n\x07\x66latMap\x12;.com.webank.ai.eggroll.api.computing.processor.UnaryProcess\x1a\x31.com.webank.ai.eggroll.api.storage.StorageLocatorb\x06proto3') , dependencies=[kv__pb2.DESCRIPTOR,storage__basic__pb2.DESCRIPTOR,]) _PROCESSCONF = _descriptor.Descriptor( name='ProcessConf', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessConf', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='namingPolicy', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessConf.namingPolicy', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=97, serialized_end=132, ) _TASKINFO = _descriptor.Descriptor( name='TaskInfo', full_name='com.webank.ai.eggroll.api.computing.processor.TaskInfo', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='task_id', full_name='com.webank.ai.eggroll.api.computing.processor.TaskInfo.task_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='function_id', full_name='com.webank.ai.eggroll.api.computing.processor.TaskInfo.function_id', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='function_bytes', full_name='com.webank.ai.eggroll.api.computing.processor.TaskInfo.function_bytes', index=2, number=3, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='isInPlaceComputing', full_name='com.webank.ai.eggroll.api.computing.processor.TaskInfo.isInPlaceComputing', index=3, number=4, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=134, serialized_end=234, ) _UNARYPROCESS = _descriptor.Descriptor( name='UnaryProcess', full_name='com.webank.ai.eggroll.api.computing.processor.UnaryProcess', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='info', full_name='com.webank.ai.eggroll.api.computing.processor.UnaryProcess.info', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='operand', full_name='com.webank.ai.eggroll.api.computing.processor.UnaryProcess.operand', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='conf', full_name='com.webank.ai.eggroll.api.computing.processor.UnaryProcess.conf', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=237, serialized_end=464, ) _BINARYPROCESS = _descriptor.Descriptor( name='BinaryProcess', full_name='com.webank.ai.eggroll.api.computing.processor.BinaryProcess', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='info', full_name='com.webank.ai.eggroll.api.computing.processor.BinaryProcess.info', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='left', full_name='com.webank.ai.eggroll.api.computing.processor.BinaryProcess.left', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='right', full_name='com.webank.ai.eggroll.api.computing.processor.BinaryProcess.right', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='conf', full_name='com.webank.ai.eggroll.api.computing.processor.BinaryProcess.conf', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=467, serialized_end=758, ) _UNARYPROCESS.fields_by_name['info'].message_type = _TASKINFO _UNARYPROCESS.fields_by_name['operand'].message_type = storage__basic__pb2._STORAGELOCATOR _UNARYPROCESS.fields_by_name['conf'].message_type = _PROCESSCONF _BINARYPROCESS.fields_by_name['info'].message_type = _TASKINFO _BINARYPROCESS.fields_by_name['left'].message_type = storage__basic__pb2._STORAGELOCATOR _BINARYPROCESS.fields_by_name['right'].message_type = storage__basic__pb2._STORAGELOCATOR _BINARYPROCESS.fields_by_name['conf'].message_type = _PROCESSCONF DESCRIPTOR.message_types_by_name['ProcessConf'] = _PROCESSCONF DESCRIPTOR.message_types_by_name['TaskInfo'] = _TASKINFO DESCRIPTOR.message_types_by_name['UnaryProcess'] = _UNARYPROCESS DESCRIPTOR.message_types_by_name['BinaryProcess'] = _BINARYPROCESS _sym_db.RegisterFileDescriptor(DESCRIPTOR) ProcessConf = _reflection.GeneratedProtocolMessageType('ProcessConf', (_message.Message,), dict( DESCRIPTOR = _PROCESSCONF, __module__ = 'processor_pb2' # @@protoc_insertion_point(class_scope:com.webank.ai.eggroll.api.computing.processor.ProcessConf) )) _sym_db.RegisterMessage(ProcessConf) TaskInfo = _reflection.GeneratedProtocolMessageType('TaskInfo', (_message.Message,), dict( DESCRIPTOR = _TASKINFO, __module__ = 'processor_pb2' # @@protoc_insertion_point(class_scope:com.webank.ai.eggroll.api.computing.processor.TaskInfo) )) _sym_db.RegisterMessage(TaskInfo) UnaryProcess = _reflection.GeneratedProtocolMessageType('UnaryProcess', (_message.Message,), dict( DESCRIPTOR = _UNARYPROCESS, __module__ = 'processor_pb2' # @@protoc_insertion_point(class_scope:com.webank.ai.eggroll.api.computing.processor.UnaryProcess) )) _sym_db.RegisterMessage(UnaryProcess) BinaryProcess = _reflection.GeneratedProtocolMessageType('BinaryProcess', (_message.Message,), dict( DESCRIPTOR = _BINARYPROCESS, __module__ = 'processor_pb2' # @@protoc_insertion_point(class_scope:com.webank.ai.eggroll.api.computing.processor.BinaryProcess) )) _sym_db.RegisterMessage(BinaryProcess) _PROCESSSERVICE = _descriptor.ServiceDescriptor( name='ProcessService', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService', file=DESCRIPTOR, index=0, serialized_options=None, serialized_start=761, serialized_end=2128, methods=[ _descriptor.MethodDescriptor( name='map', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.map', index=0, containing_service=None, input_type=_UNARYPROCESS, output_type=storage__basic__pb2._STORAGELOCATOR, serialized_options=None, ), _descriptor.MethodDescriptor( name='mapValues', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.mapValues', index=1, containing_service=None, input_type=_UNARYPROCESS, output_type=storage__basic__pb2._STORAGELOCATOR, serialized_options=None, ), _descriptor.MethodDescriptor( name='join', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.join', index=2, containing_service=None, input_type=_BINARYPROCESS, output_type=storage__basic__pb2._STORAGELOCATOR, serialized_options=None, ), _descriptor.MethodDescriptor( name='reduce', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.reduce', index=3, containing_service=None, input_type=_UNARYPROCESS, output_type=kv__pb2._OPERAND, serialized_options=None, ), _descriptor.MethodDescriptor( name='mapPartitions', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.mapPartitions', index=4, containing_service=None, input_type=_UNARYPROCESS, output_type=storage__basic__pb2._STORAGELOCATOR, serialized_options=None, ), _descriptor.MethodDescriptor( name='glom', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.glom', index=5, containing_service=None, input_type=_UNARYPROCESS, output_type=storage__basic__pb2._STORAGELOCATOR, serialized_options=None, ), _descriptor.MethodDescriptor( name='sample', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.sample', index=6, containing_service=None, input_type=_UNARYPROCESS, output_type=storage__basic__pb2._STORAGELOCATOR, serialized_options=None, ), _descriptor.MethodDescriptor( name='subtractByKey', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.subtractByKey', index=7, containing_service=None, input_type=_BINARYPROCESS, output_type=storage__basic__pb2._STORAGELOCATOR, serialized_options=None, ), _descriptor.MethodDescriptor( name='filter', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.filter', index=8, containing_service=None, input_type=_UNARYPROCESS, output_type=storage__basic__pb2._STORAGELOCATOR, serialized_options=None, ), _descriptor.MethodDescriptor( name='union', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.union', index=9, containing_service=None, input_type=_BINARYPROCESS, output_type=storage__basic__pb2._STORAGELOCATOR, serialized_options=None, ), _descriptor.MethodDescriptor( name='flatMap', full_name='com.webank.ai.eggroll.api.computing.processor.ProcessService.flatMap', index=10, containing_service=None, input_type=_UNARYPROCESS, output_type=storage__basic__pb2._STORAGELOCATOR, serialized_options=None, ), ]) _sym_db.RegisterServiceDescriptor(_PROCESSSERVICE) DESCRIPTOR.services_by_name['ProcessService'] = _PROCESSSERVICE # @@protoc_insertion_point(module_scope)
py
b4113c6dd4f728dd3570b5ffd57061bf7f19dc70
#!/usr/bin/env python3 # encoding: utf-8 from functools import singledispatch @singledispatch def foo(x): return 'x is something' @foo.register(int) def foo_int(x): return 'x is an int' @foo.register(str) def foo_string(x): return x
py
b4113cb5e8827889bdc9d095143b7610c26ceb95
import cv2 from CameraLib import baseCamera, faceTracking from IotLib.log import Log from IotLib.iotNode import IotNode from IotLib.pyUtils import startThread def startVideoStream(camera, config, debug=False): """ start video stream """ port = config.getOrAddInt('video.httpVideoPort', 8000) streamer = VideoStream('video', parent=None, camera=camera, config=config, debug=debug) streamer.startUp() streamer.runVideoStreaming(port) def startVideoStreamAsync(camera, config, debug=False): """ start video stream in a separate thread """ port = config.getOrAddInt('video.httpVideoPort', 8000) streamer = VideoStream('video', parent=None, camera=camera, config=config, debug=debug) streamer.startUp() videoThread=startThread('VideoStream', target=streamer.runVideoStreaming, front=True, args=(port,)) class VideoStream(IotNode): """ video streaming with optional face tracking """ def __init__(self, name, parent, camera, config, debug=False): """ construct a PiCamera """ super(VideoStream, self).__init__(name, parent) self.camera = camera self.config = config self.debug = debug def startUp(self): """ override to start the components """ width, height = self.camera.resolution() # update index.html with proper width and height try: indexHtmlFile = self.config.getOrAdd('video.indexHtml', '/home/pi/src/VideoLib/templates/index.html') with open(indexHtmlFile, "w", encoding="utf-8") as f: url = "{{ url_for('video_feed') }}" html='<html> <head> <title>Video Streaming</title> </head> <body> <img src="%s" width="%i" height="%i"> </body></html>' %(url, width, height) f.writelines('%s\n' %(html)) except: pass self.faceTracker = None enableFaceTracking = self.config.getOrAddBool('video.enableFaceTracking', 'true') if enableFaceTracking: filePath = self.config.getOrAdd('video.classifier', '/home/pi/src/data/haarcascade_frontalface_alt.xml') self.classifier = cv2.CascadeClassifier(filePath) #self.classifier = cv2.CascadeClassifier('/home/pi/adeept_picar-b/server/data/haarcascade_frontalface_alt.xml') self.faceTracker = faceTracking.FaceTracker(self.classifier, debug=self.debug) Log.info('Streaming camera (%i x %i) with classifier: %s' %(width, height, filePath)) else: self.faceTracker = None Log.info('Streaming camera (%i x %i)' %(width, height)) def runVideoStreaming(self, port): """ run video streaming (flask app) as a web. Should be called from a dedicated thread. """ Log.info('starting httpVideoStreaming on port %d' %port) runVideoStreaming(port, self.camera, tracker=self.faceTracker, debug=self.debug, threaded=True) from flask import Flask, render_template, Response _app = Flask(__name__) # the camera object (derived from BaseCamera) for video capture _streamingCamera = None # face tracking object (FaceTracker) _faceTracker = None def runVideoStreaming(port, camera, classifier=None, tracker=None, debug=False, threaded=True): """ run video streaming (flask app) as a web. calling parameters: port: the port number for the http web camera: a camera instance that is derived from baseCamera.BaseCamera classifier: face tracking with FaceTracker using the specified classifier tracker: face tracking object (FaceTracker or instance of derived class) debug: whether to run the flask app under debug threaded: whether to run flask app threaded """ global _streamingCamera, _faceTracker _streamingCamera = camera if tracker != None: _faceTracker = tracker elif classifier != None: _faceTracker = FaceTracker(classifier, debug=debug) _app.run(host='0.0.0.0', port=port, debug=debug, threaded=threaded, use_reloader=False) @_app.route('/') def index(): """Video streaming home page.""" return render_template('index.html') def gen(camera): """Video streaming generator function.""" while True: tracking = _faceTracker != None # and opencv_mode != 0 img = camera.get_frame(tracking) # encode as a jpeg image and return it frame = cv2.imencode('.jpg', img)[1].tobytes() yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') @_app.route('/video_feed') def video_feed(): """ Video streaming route. """ _streamingCamera.start(_faceTracker) return Response(gen(_streamingCamera), mimetype='multipart/x-mixed-replace; boundary=frame')
py
b4113cb9b38a972640add1fd7296436a4e9022b5
name = input("Enter Customer Name : ") billno = input("Enter Bill Number : ") unit = int(input("Enter Units : ")) if(unit<=100): charge = 0 elif(unit>100 and unit<=200): charge = (100*0)+((unit-100)*1) elif(unit>200 and unit<=300): charge = (100*0)+(100*1)+((unit-200)*2) else: charge = unit*3 print("***** Invoice *****") print("Customer Name : ",name) print("Bill No : ",billno) print("Total Unit : ",unit) print("Total Amount : ",charge)
py
b4113d060642825619fa9e7741a252e66ef43e8b
# -*- coding: utf-8 -*- ''' This module implements :class:`IrregularlySampledSignal`, an array of analog signals with samples taken at arbitrary time points. :class:`IrregularlySampledSignal` inherits from :class:`basesignal.BaseSignal` which derives from :class:`BaseNeo`, from :module:`neo.core.baseneo`, and from :class:`quantities.Quantity`, which in turn inherits from :class:`numpy.ndarray`. Inheritance from :class:`numpy.array` is explained here: http://docs.scipy.org/doc/numpy/user/basics.subclassing.html In brief: * Initialization of a new object from constructor happens in :meth:`__new__`. This is where user-specified attributes are set. * :meth:`__array_finalize__` is called for all new objects, including those created by slicing. This is where attributes are copied over from the old object. ''' # needed for Python 3 compatibility from __future__ import absolute_import, division, print_function from copy import deepcopy, copy import numpy as np import quantities as pq from neo.core.baseneo import BaseNeo, MergeError, merge_annotations from neo.core.basesignal import BaseSignal from neo.core.channelindex import ChannelIndex from neo.core.dataobject import DataObject def _new_IrregularlySampledSignal(cls, times, signal, units=None, time_units=None, dtype=None, copy=True, name=None, file_origin=None, description=None, array_annotations=None, annotations=None, segment=None, channel_index=None): ''' A function to map IrregularlySampledSignal.__new__ to a function that does not do the unit checking. This is needed for pickle to work. ''' iss = cls(times=times, signal=signal, units=units, time_units=time_units, dtype=dtype, copy=copy, name=name, file_origin=file_origin, description=description, array_annotations=array_annotations, **annotations) iss.segment = segment iss.channel_index = channel_index return iss class IrregularlySampledSignal(BaseSignal): ''' An array of one or more analog signals with samples taken at arbitrary time points. A representation of one or more continuous, analog signals acquired at time :attr:`t_start` with a varying sampling interval. Each channel is sampled at the same time points. Inherits from :class:`quantities.Quantity`, which in turn inherits from :class:`numpy.ndarray`. *Usage*:: >>> from neo.core import IrregularlySampledSignal >>> from quantities import s, nA >>> >>> irsig0 = IrregularlySampledSignal([0.0, 1.23, 6.78], [1, 2, 3], ... units='mV', time_units='ms') >>> irsig1 = IrregularlySampledSignal([0.01, 0.03, 0.12]*s, ... [[4, 5], [5, 4], [6, 3]]*nA) *Required attributes/properties*: :times: (quantity array 1D, numpy array 1D, or list) The time of each data point. Must have the same size as :attr:`signal`. :signal: (quantity array 2D, numpy array 2D, or list (data, channel)) The data itself. :units: (quantity units) Required if the signal is a list or NumPy array, not if it is a :class:`Quantity`. :time_units: (quantity units) Required if :attr:`times` is a list or NumPy array, not if it is a :class:`Quantity`. *Recommended attributes/properties*:. :name: (str) A label for the dataset :description: (str) Text description. :file_origin: (str) Filesystem path or URL of the original data file. *Optional attributes/properties*: :dtype: (numpy dtype or str) Override the dtype of the signal array. (times are always floats). :copy: (bool) True by default. :array_annotations: (dict) Dict mapping strings to numpy arrays containing annotations \ for all data points Note: Any other additional arguments are assumed to be user-specific metadata and stored in :attr:`annotations`. *Properties available on this object*: :sampling_intervals: (quantity array 1D) Interval between each adjacent pair of samples. (``times[1:] - times[:-1]``) :duration: (quantity scalar) Signal duration, read-only. (``times[-1] - times[0]``) :t_start: (quantity scalar) Time when signal begins, read-only. (``times[0]``) :t_stop: (quantity scalar) Time when signal ends, read-only. (``times[-1]``) *Slicing*: :class:`IrregularlySampledSignal` objects can be sliced. When this occurs, a new :class:`IrregularlySampledSignal` (actually a view) is returned, with the same metadata, except that :attr:`times` is also sliced in the same way. *Operations available on this object*: == != + * / ''' _single_parent_objects = ('Segment', 'ChannelIndex') _single_parent_attrs = ('segment', 'channel_index') _quantity_attr = 'signal' _necessary_attrs = (('times', pq.Quantity, 1), ('signal', pq.Quantity, 2)) def __new__(cls, times, signal, units=None, time_units=None, dtype=None, copy=True, name=None, file_origin=None, description=None, array_annotations=None, **annotations): ''' Construct a new :class:`IrregularlySampledSignal` instance. This is called whenever a new :class:`IrregularlySampledSignal` is created from the constructor, but not when slicing. ''' signal = cls._rescale(signal, units=units) if time_units is None: if hasattr(times, "units"): time_units = times.units else: raise ValueError("Time units must be specified") elif isinstance(times, pq.Quantity): # could improve this test, what if units is a string? if time_units != times.units: times = times.rescale(time_units) # should check time units have correct dimensions obj = pq.Quantity.__new__(cls, signal, units=units, dtype=dtype, copy=copy) if obj.ndim == 1: obj = obj.reshape(-1, 1) if len(times) != obj.shape[0]: raise ValueError("times array and signal array must " "have same length") obj.times = pq.Quantity(times, units=time_units, dtype=float, copy=copy) obj.segment = None obj.channel_index = None return obj def __init__(self, times, signal, units=None, time_units=None, dtype=None, copy=True, name=None, file_origin=None, description=None, array_annotations=None, **annotations): ''' Initializes a newly constructed :class:`IrregularlySampledSignal` instance. ''' DataObject.__init__(self, name=name, file_origin=file_origin, description=description, array_annotations=array_annotations, **annotations) def __reduce__(self): ''' Map the __new__ function onto _new_IrregularlySampledSignal, so that pickle works ''' return _new_IrregularlySampledSignal, (self.__class__, self.times, np.array(self), self.units, self.times.units, self.dtype, True, self.name, self.file_origin, self.description, self.array_annotations, self.annotations, self.segment, self.channel_index) def _array_finalize_spec(self, obj): ''' Set default values for attributes specific to :class:`IrregularlySampledSignal`. Common attributes are defined in :meth:`__array_finalize__` in :class:`basesignal.BaseSignal`), which is called every time a new signal is created and calls this method. ''' self.times = getattr(obj, 'times', None) return obj def __repr__(self): ''' Returns a string representing the :class:`IrregularlySampledSignal`. ''' return '<%s(%s at times %s)>' % ( self.__class__.__name__, super(IrregularlySampledSignal, self).__repr__(), self.times) def __getitem__(self, i): ''' Get the item or slice :attr:`i`. ''' if isinstance(i, (int, np.integer)): # a single point in time across all channels obj = super(IrregularlySampledSignal, self).__getitem__(i) obj = pq.Quantity(obj.magnitude, units=obj.units) elif isinstance(i, tuple): obj = super(IrregularlySampledSignal, self).__getitem__(i) j, k = i if isinstance(j, (int, np.integer)): # a single point in time across some channels obj = pq.Quantity(obj.magnitude, units=obj.units) else: if isinstance(j, slice): obj.times = self.times.__getitem__(j) elif isinstance(j, np.ndarray): raise NotImplementedError("Arrays not yet supported") else: raise TypeError("%s not supported" % type(j)) if isinstance(k, (int, np.integer)): obj = obj.reshape(-1, 1) # add if channel_index obj.array_annotations = deepcopy(self.array_annotations_at_index(k)) elif isinstance(i, slice): obj = super(IrregularlySampledSignal, self).__getitem__(i) obj.times = self.times.__getitem__(i) obj.array_annotations = deepcopy(self.array_annotations) elif isinstance(i, np.ndarray): # Indexing of an IrregularlySampledSignal is only consistent if the resulting # number of samples is the same for each trace. The time axis for these samples is not # guaranteed to be continuous, so returning a Quantity instead of an # IrregularlySampledSignal here. new_time_dims = np.sum(i, axis=0) if len(new_time_dims) and all(new_time_dims == new_time_dims[0]): obj = np.asarray(self).T.__getitem__(i.T) obj = obj.T.reshape(self.shape[1], -1).T obj = pq.Quantity(obj, units=self.units) else: raise IndexError("indexing of an IrregularlySampledSignal needs to keep the same " "number of sample for each trace contained") else: raise IndexError("index should be an integer, tuple, slice or boolean numpy array") return obj @property def duration(self): ''' Signal duration. (:attr:`times`[-1] - :attr:`times`[0]) ''' return self.times[-1] - self.times[0] @property def t_start(self): ''' Time when signal begins. (:attr:`times`[0]) ''' return self.times[0] @property def t_stop(self): ''' Time when signal ends. (:attr:`times`[-1]) ''' return self.times[-1] def __eq__(self, other): ''' Equality test (==) ''' if (isinstance(other, IrregularlySampledSignal) and not (self.times == other.times).all()): return False return super(IrregularlySampledSignal, self).__eq__(other) def _check_consistency(self, other): ''' Check if the attributes of another :class:`IrregularlySampledSignal` are compatible with this one. ''' # if not an array, then allow the calculation if not hasattr(other, 'ndim'): return # if a scalar array, then allow the calculation if not other.ndim: return # dimensionality should match if self.ndim != other.ndim: raise ValueError('Dimensionality does not match: %s vs %s' % (self.ndim, other.ndim)) # if if the other array does not have a times property, # then it should be okay to add it directly if not hasattr(other, 'times'): return # if there is a times property, the times need to be the same if not (self.times == other.times).all(): raise ValueError('Times do not match: %s vs %s' % (self.times, other.times)) def __rsub__(self, other, *args): ''' Backwards subtraction (other-self) ''' return self.__mul__(-1) + other def _repr_pretty_(self, pp, cycle): ''' Handle pretty-printing the :class:`IrregularlySampledSignal`. ''' pp.text("{cls} with {channels} channels of length {length}; " "units {units}; datatype {dtype} ".format(cls=self.__class__.__name__, channels=self.shape[1], length=self.shape[0], units=self.units.dimensionality.string, dtype=self.dtype)) if self._has_repr_pretty_attrs_(): pp.breakable() self._repr_pretty_attrs_(pp, cycle) def _pp(line): pp.breakable() with pp.group(indent=1): pp.text(line) for line in ["sample times: {0}".format(self.times)]: _pp(line) @property def sampling_intervals(self): ''' Interval between each adjacent pair of samples. (:attr:`times[1:]` - :attr:`times`[:-1]) ''' return self.times[1:] - self.times[:-1] def mean(self, interpolation=None): ''' Calculates the mean, optionally using interpolation between sampling times. If :attr:`interpolation` is None, we assume that values change stepwise at sampling times. ''' if interpolation is None: return (self[:-1] * self.sampling_intervals.reshape(-1, 1)).sum() / self.duration else: raise NotImplementedError def resample(self, at=None, interpolation=None): ''' Resample the signal, returning either an :class:`AnalogSignal` object or another :class:`IrregularlySampledSignal` object. Arguments: :at: either a :class:`Quantity` array containing the times at which samples should be created (times must be within the signal duration, there is no extrapolation), a sampling rate with dimensions (1/Time) or a sampling interval with dimensions (Time). :interpolation: one of: None, 'linear' ''' # further interpolation methods could be added raise NotImplementedError def time_slice(self, t_start, t_stop): ''' Creates a new :class:`IrregularlySampledSignal` corresponding to the time slice of the original :class:`IrregularlySampledSignal` between times `t_start` and `t_stop`. Either parameter can also be None to use infinite endpoints for the time interval. ''' _t_start = t_start _t_stop = t_stop if t_start is None: _t_start = -np.inf if t_stop is None: _t_stop = np.inf indices = (self.times >= _t_start) & (self.times <= _t_stop) count = 0 id_start = None id_stop = None for i in indices: if id_start is None: if i: id_start = count else: if not i: id_stop = count break count += 1 # Time slicing should create a deep copy of the object new_st = deepcopy(self[id_start:id_stop]) return new_st def time_shift(self, t_shift): """ Shifts a :class:`IrregularlySampledSignal` to start at a new time. Parameters: ----------- t_shift: Quantity (time) Amount of time by which to shift the :class:`IrregularlySampledSignal`. Returns: -------- new_sig: :class:`SpikeTrain` New instance of a :class:`IrregularlySampledSignal` object starting at t_shift later than the original :class:`IrregularlySampledSignal` (the original :class:`IrregularlySampledSignal` is not modified). """ new_sig = deepcopy(self) new_sig.times += t_shift return new_sig def merge(self, other): ''' Merge another signal into this one. The signal objects are concatenated horizontally (column-wise, :func:`np.hstack`). If the attributes of the two signals are not compatible, an Exception is raised. Required attributes of the signal are used. ''' if not np.array_equal(self.times, other.times): raise MergeError("Cannot merge these two signals as the sample times differ.") if self.segment != other.segment: raise MergeError( "Cannot merge these two signals as they belong to different segments.") if hasattr(self, "lazy_shape"): if hasattr(other, "lazy_shape"): if self.lazy_shape[0] != other.lazy_shape[0]: raise MergeError("Cannot merge signals of different length.") merged_lazy_shape = (self.lazy_shape[0], self.lazy_shape[1] + other.lazy_shape[1]) else: raise MergeError("Cannot merge a lazy object with a real object.") if other.units != self.units: other = other.rescale(self.units) stack = np.hstack((self.magnitude, other.magnitude)) kwargs = {} for name in ("name", "description", "file_origin"): attr_self = getattr(self, name) attr_other = getattr(other, name) if attr_self == attr_other: kwargs[name] = attr_self else: kwargs[name] = "merge(%s, %s)" % (attr_self, attr_other) merged_annotations = merge_annotations(self.annotations, other.annotations) kwargs.update(merged_annotations) signal = self.__class__(self.times, stack, units=self.units, dtype=self.dtype, copy=False, **kwargs) signal.segment = self.segment signal.array_annotate(**self._merge_array_annotations(other)) if hasattr(self, "lazy_shape"): signal.lazy_shape = merged_lazy_shape # merge channel_index (move to ChannelIndex.merge()?) if self.channel_index and other.channel_index: signal.channel_index = ChannelIndex(index=np.arange(signal.shape[1]), channel_ids=np.hstack( [self.channel_index.channel_ids, other.channel_index.channel_ids]), channel_names=np.hstack( [self.channel_index.channel_names, other.channel_index.channel_names])) else: signal.channel_index = ChannelIndex(index=np.arange(signal.shape[1])) return signal
py
b4113d877836b692ae40f68a2ca442a1fb183dea
""" These the test the public routines exposed in types/common.py related to inference and not otherwise tested in types/test_common.py """ import collections from collections import namedtuple from datetime import ( date, datetime, time, timedelta, ) from decimal import Decimal from fractions import Fraction from io import StringIO import itertools from numbers import Number import re import numpy as np import pytest import pytz from pandas._libs import ( lib, missing as libmissing, ops as libops, ) import pandas.util._test_decorators as td from pandas.core.dtypes import inference from pandas.core.dtypes.common import ( ensure_int32, is_bool, is_complex, is_datetime64_any_dtype, is_datetime64_dtype, is_datetime64_ns_dtype, is_datetime64tz_dtype, is_float, is_integer, is_number, is_scalar, is_scipy_sparse, is_timedelta64_dtype, is_timedelta64_ns_dtype, ) import pandas as pd from pandas import ( Categorical, DataFrame, DateOffset, DatetimeIndex, Index, Interval, Period, PeriodIndex, Series, Timedelta, TimedeltaIndex, Timestamp, ) import pandas._testing as tm from pandas.core.arrays import ( BooleanArray, FloatingArray, IntegerArray, ) @pytest.fixture(params=[True, False], ids=str) def coerce(request): return request.param class MockNumpyLikeArray: """ A class which is numpy-like (e.g. Pint's Quantity) but not actually numpy The key is that it is not actually a numpy array so ``util.is_array(mock_numpy_like_array_instance)`` returns ``False``. Other important properties are that the class defines a :meth:`__iter__` method (so that ``isinstance(abc.Iterable)`` returns ``True``) and has a :meth:`ndim` property, as pandas special-cases 0-dimensional arrays in some cases. We expect pandas to behave with respect to such duck arrays exactly as with real numpy arrays. In particular, a 0-dimensional duck array is *NOT* a scalar (`is_scalar(np.array(1)) == False`), but it is not list-like either. """ def __init__(self, values): self._values = values def __iter__(self): iter_values = iter(self._values) def it_outer(): yield from iter_values return it_outer() def __len__(self): return len(self._values) def __array__(self, t=None): return np.asarray(self._values, dtype=t) @property def ndim(self): return self._values.ndim @property def dtype(self): return self._values.dtype @property def size(self): return self._values.size @property def shape(self): return self._values.shape # collect all objects to be tested for list-like-ness; use tuples of objects, # whether they are list-like or not (special casing for sets), and their ID ll_params = [ ([1], True, "list"), ([], True, "list-empty"), ((1,), True, "tuple"), ((), True, "tuple-empty"), ({"a": 1}, True, "dict"), ({}, True, "dict-empty"), ({"a", 1}, "set", "set"), (set(), "set", "set-empty"), (frozenset({"a", 1}), "set", "frozenset"), (frozenset(), "set", "frozenset-empty"), (iter([1, 2]), True, "iterator"), (iter([]), True, "iterator-empty"), ((x for x in [1, 2]), True, "generator"), ((_ for _ in []), True, "generator-empty"), (Series([1]), True, "Series"), (Series([], dtype=object), True, "Series-empty"), (Series(["a"]).str, True, "StringMethods"), (Series([], dtype="O").str, True, "StringMethods-empty"), (Index([1]), True, "Index"), (Index([]), True, "Index-empty"), (DataFrame([[1]]), True, "DataFrame"), (DataFrame(), True, "DataFrame-empty"), (np.ndarray((2,) * 1), True, "ndarray-1d"), (np.array([]), True, "ndarray-1d-empty"), (np.ndarray((2,) * 2), True, "ndarray-2d"), (np.array([[]]), True, "ndarray-2d-empty"), (np.ndarray((2,) * 3), True, "ndarray-3d"), (np.array([[[]]]), True, "ndarray-3d-empty"), (np.ndarray((2,) * 4), True, "ndarray-4d"), (np.array([[[[]]]]), True, "ndarray-4d-empty"), (np.array(2), False, "ndarray-0d"), (MockNumpyLikeArray(np.ndarray((2,) * 1)), True, "duck-ndarray-1d"), (MockNumpyLikeArray(np.array([])), True, "duck-ndarray-1d-empty"), (MockNumpyLikeArray(np.ndarray((2,) * 2)), True, "duck-ndarray-2d"), (MockNumpyLikeArray(np.array([[]])), True, "duck-ndarray-2d-empty"), (MockNumpyLikeArray(np.ndarray((2,) * 3)), True, "duck-ndarray-3d"), (MockNumpyLikeArray(np.array([[[]]])), True, "duck-ndarray-3d-empty"), (MockNumpyLikeArray(np.ndarray((2,) * 4)), True, "duck-ndarray-4d"), (MockNumpyLikeArray(np.array([[[[]]]])), True, "duck-ndarray-4d-empty"), (MockNumpyLikeArray(np.array(2)), False, "duck-ndarray-0d"), (1, False, "int"), (b"123", False, "bytes"), (b"", False, "bytes-empty"), ("123", False, "string"), ("", False, "string-empty"), (str, False, "string-type"), (object(), False, "object"), (np.nan, False, "NaN"), (None, False, "None"), ] objs, expected, ids = zip(*ll_params) @pytest.fixture(params=zip(objs, expected), ids=ids) def maybe_list_like(request): return request.param def test_is_list_like(maybe_list_like): obj, expected = maybe_list_like expected = True if expected == "set" else expected assert inference.is_list_like(obj) == expected def test_is_list_like_disallow_sets(maybe_list_like): obj, expected = maybe_list_like expected = False if expected == "set" else expected assert inference.is_list_like(obj, allow_sets=False) == expected def test_is_list_like_recursion(): # GH 33721 # interpreter would crash with SIGABRT def foo(): inference.is_list_like([]) foo() with tm.external_error_raised(RecursionError): foo() def test_is_list_like_iter_is_none(): # GH 43373 # is_list_like was yielding false positives with __iter__ == None class NotListLike: def __getitem__(self, item): return self __iter__ = None assert not inference.is_list_like(NotListLike()) def test_is_sequence(): is_seq = inference.is_sequence assert is_seq((1, 2)) assert is_seq([1, 2]) assert not is_seq("abcd") assert not is_seq(np.int64) class A: def __getitem__(self): return 1 assert not is_seq(A()) def test_is_array_like(): assert inference.is_array_like(Series([], dtype=object)) assert inference.is_array_like(Series([1, 2])) assert inference.is_array_like(np.array(["a", "b"])) assert inference.is_array_like(Index(["2016-01-01"])) assert inference.is_array_like(np.array([2, 3])) assert inference.is_array_like(MockNumpyLikeArray(np.array([2, 3]))) class DtypeList(list): dtype = "special" assert inference.is_array_like(DtypeList()) assert not inference.is_array_like([1, 2, 3]) assert not inference.is_array_like(()) assert not inference.is_array_like("foo") assert not inference.is_array_like(123) @pytest.mark.parametrize( "inner", [ [], [1], (1,), (1, 2), {"a": 1}, {1, "a"}, Series([1]), Series([], dtype=object), Series(["a"]).str, (x for x in range(5)), ], ) @pytest.mark.parametrize("outer", [list, Series, np.array, tuple]) def test_is_nested_list_like_passes(inner, outer): result = outer([inner for _ in range(5)]) assert inference.is_list_like(result) @pytest.mark.parametrize( "obj", [ "abc", [], [1], (1,), ["a"], "a", {"a"}, [1, 2, 3], Series([1]), DataFrame({"A": [1]}), ([1, 2] for _ in range(5)), ], ) def test_is_nested_list_like_fails(obj): assert not inference.is_nested_list_like(obj) @pytest.mark.parametrize("ll", [{}, {"A": 1}, Series([1]), collections.defaultdict()]) def test_is_dict_like_passes(ll): assert inference.is_dict_like(ll) @pytest.mark.parametrize( "ll", [ "1", 1, [1, 2], (1, 2), range(2), Index([1]), dict, collections.defaultdict, Series, ], ) def test_is_dict_like_fails(ll): assert not inference.is_dict_like(ll) @pytest.mark.parametrize("has_keys", [True, False]) @pytest.mark.parametrize("has_getitem", [True, False]) @pytest.mark.parametrize("has_contains", [True, False]) def test_is_dict_like_duck_type(has_keys, has_getitem, has_contains): class DictLike: def __init__(self, d): self.d = d if has_keys: def keys(self): return self.d.keys() if has_getitem: def __getitem__(self, key): return self.d.__getitem__(key) if has_contains: def __contains__(self, key) -> bool: return self.d.__contains__(key) d = DictLike({1: 2}) result = inference.is_dict_like(d) expected = has_keys and has_getitem and has_contains assert result is expected def test_is_file_like(): class MockFile: pass is_file = inference.is_file_like data = StringIO("data") assert is_file(data) # No read / write attributes # No iterator attributes m = MockFile() assert not is_file(m) MockFile.write = lambda self: 0 # Write attribute but not an iterator m = MockFile() assert not is_file(m) # gh-16530: Valid iterator just means we have the # __iter__ attribute for our purposes. MockFile.__iter__ = lambda self: self # Valid write-only file m = MockFile() assert is_file(m) del MockFile.write MockFile.read = lambda self: 0 # Valid read-only file m = MockFile() assert is_file(m) # Iterator but no read / write attributes data = [1, 2, 3] assert not is_file(data) test_tuple = collections.namedtuple("test_tuple", ["a", "b", "c"]) @pytest.mark.parametrize("ll", [test_tuple(1, 2, 3)]) def test_is_names_tuple_passes(ll): assert inference.is_named_tuple(ll) @pytest.mark.parametrize("ll", [(1, 2, 3), "a", Series({"pi": 3.14})]) def test_is_names_tuple_fails(ll): assert not inference.is_named_tuple(ll) def test_is_hashable(): # all new-style classes are hashable by default class HashableClass: pass class UnhashableClass1: __hash__ = None class UnhashableClass2: def __hash__(self): raise TypeError("Not hashable") hashable = (1, 3.14, np.float64(3.14), "a", (), (1,), HashableClass()) not_hashable = ([], UnhashableClass1()) abc_hashable_not_really_hashable = (([],), UnhashableClass2()) for i in hashable: assert inference.is_hashable(i) for i in not_hashable: assert not inference.is_hashable(i) for i in abc_hashable_not_really_hashable: assert not inference.is_hashable(i) # numpy.array is no longer collections.abc.Hashable as of # https://github.com/numpy/numpy/pull/5326, just test # is_hashable() assert not inference.is_hashable(np.array([])) @pytest.mark.parametrize("ll", [re.compile("ad")]) def test_is_re_passes(ll): assert inference.is_re(ll) @pytest.mark.parametrize("ll", ["x", 2, 3, object()]) def test_is_re_fails(ll): assert not inference.is_re(ll) @pytest.mark.parametrize( "ll", [r"a", "x", r"asdf", re.compile("adsf"), r"\u2233\s*", re.compile(r"")] ) def test_is_recompilable_passes(ll): assert inference.is_re_compilable(ll) @pytest.mark.parametrize("ll", [1, [], object()]) def test_is_recompilable_fails(ll): assert not inference.is_re_compilable(ll) class TestInference: @pytest.mark.parametrize( "arr", [ np.array(list("abc"), dtype="S1"), np.array(list("abc"), dtype="S1").astype(object), [b"a", np.nan, b"c"], ], ) def test_infer_dtype_bytes(self, arr): result = lib.infer_dtype(arr, skipna=True) assert result == "bytes" @pytest.mark.parametrize( "value, expected", [ (float("inf"), True), (np.inf, True), (-np.inf, False), (1, False), ("a", False), ], ) def test_isposinf_scalar(self, value, expected): # GH 11352 result = libmissing.isposinf_scalar(value) assert result is expected @pytest.mark.parametrize( "value, expected", [ (float("-inf"), True), (-np.inf, True), (np.inf, False), (1, False), ("a", False), ], ) def test_isneginf_scalar(self, value, expected): result = libmissing.isneginf_scalar(value) assert result is expected @pytest.mark.parametrize( "convert_to_masked_nullable, exp", [ ( True, BooleanArray( np.array([True, False], dtype="bool"), np.array([False, True]) ), ), (False, np.array([True, np.nan], dtype="object")), ], ) def test_maybe_convert_nullable_boolean(self, convert_to_masked_nullable, exp): # GH 40687 arr = np.array([True, np.NaN], dtype=object) result = libops.maybe_convert_bool( arr, set(), convert_to_masked_nullable=convert_to_masked_nullable ) if convert_to_masked_nullable: tm.assert_extension_array_equal(BooleanArray(*result), exp) else: result = result[0] tm.assert_numpy_array_equal(result, exp) @pytest.mark.parametrize("convert_to_masked_nullable", [True, False]) @pytest.mark.parametrize("coerce_numeric", [True, False]) @pytest.mark.parametrize( "infinity", ["inf", "inF", "iNf", "Inf", "iNF", "InF", "INf", "INF"] ) @pytest.mark.parametrize("prefix", ["", "-", "+"]) def test_maybe_convert_numeric_infinities( self, coerce_numeric, infinity, prefix, convert_to_masked_nullable ): # see gh-13274 result, _ = lib.maybe_convert_numeric( np.array([prefix + infinity], dtype=object), na_values={"", "NULL", "nan"}, coerce_numeric=coerce_numeric, convert_to_masked_nullable=convert_to_masked_nullable, ) expected = np.array([np.inf if prefix in ["", "+"] else -np.inf]) tm.assert_numpy_array_equal(result, expected) @pytest.mark.parametrize("convert_to_masked_nullable", [True, False]) def test_maybe_convert_numeric_infinities_raises(self, convert_to_masked_nullable): msg = "Unable to parse string" with pytest.raises(ValueError, match=msg): lib.maybe_convert_numeric( np.array(["foo_inf"], dtype=object), na_values={"", "NULL", "nan"}, coerce_numeric=False, convert_to_masked_nullable=convert_to_masked_nullable, ) @pytest.mark.parametrize("convert_to_masked_nullable", [True, False]) def test_maybe_convert_numeric_post_floatify_nan( self, coerce, convert_to_masked_nullable ): # see gh-13314 data = np.array(["1.200", "-999.000", "4.500"], dtype=object) expected = np.array([1.2, np.nan, 4.5], dtype=np.float64) nan_values = {-999, -999.0} out = lib.maybe_convert_numeric( data, nan_values, coerce, convert_to_masked_nullable=convert_to_masked_nullable, ) if convert_to_masked_nullable: expected = FloatingArray(expected, np.isnan(expected)) tm.assert_extension_array_equal(expected, FloatingArray(*out)) else: out = out[0] tm.assert_numpy_array_equal(out, expected) def test_convert_infs(self): arr = np.array(["inf", "inf", "inf"], dtype="O") result, _ = lib.maybe_convert_numeric(arr, set(), False) assert result.dtype == np.float64 arr = np.array(["-inf", "-inf", "-inf"], dtype="O") result, _ = lib.maybe_convert_numeric(arr, set(), False) assert result.dtype == np.float64 def test_scientific_no_exponent(self): # See PR 12215 arr = np.array(["42E", "2E", "99e", "6e"], dtype="O") result, _ = lib.maybe_convert_numeric(arr, set(), False, True) assert np.all(np.isnan(result)) def test_convert_non_hashable(self): # GH13324 # make sure that we are handing non-hashables arr = np.array([[10.0, 2], 1.0, "apple"], dtype=object) result, _ = lib.maybe_convert_numeric(arr, set(), False, True) tm.assert_numpy_array_equal(result, np.array([np.nan, 1.0, np.nan])) def test_convert_numeric_uint64(self): arr = np.array([2**63], dtype=object) exp = np.array([2**63], dtype=np.uint64) tm.assert_numpy_array_equal(lib.maybe_convert_numeric(arr, set())[0], exp) arr = np.array([str(2**63)], dtype=object) exp = np.array([2**63], dtype=np.uint64) tm.assert_numpy_array_equal(lib.maybe_convert_numeric(arr, set())[0], exp) arr = np.array([np.uint64(2**63)], dtype=object) exp = np.array([2**63], dtype=np.uint64) tm.assert_numpy_array_equal(lib.maybe_convert_numeric(arr, set())[0], exp) @pytest.mark.parametrize( "arr", [ np.array([2**63, np.nan], dtype=object), np.array([str(2**63), np.nan], dtype=object), np.array([np.nan, 2**63], dtype=object), np.array([np.nan, str(2**63)], dtype=object), ], ) def test_convert_numeric_uint64_nan(self, coerce, arr): expected = arr.astype(float) if coerce else arr.copy() result, _ = lib.maybe_convert_numeric(arr, set(), coerce_numeric=coerce) tm.assert_almost_equal(result, expected) @pytest.mark.parametrize("convert_to_masked_nullable", [True, False]) def test_convert_numeric_uint64_nan_values( self, coerce, convert_to_masked_nullable ): arr = np.array([2**63, 2**63 + 1], dtype=object) na_values = {2**63} expected = ( np.array([np.nan, 2**63 + 1], dtype=float) if coerce else arr.copy() ) result = lib.maybe_convert_numeric( arr, na_values, coerce_numeric=coerce, convert_to_masked_nullable=convert_to_masked_nullable, ) if convert_to_masked_nullable and coerce: expected = IntegerArray( np.array([0, 2**63 + 1], dtype="u8"), np.array([True, False], dtype="bool"), ) result = IntegerArray(*result) else: result = result[0] # discard mask tm.assert_almost_equal(result, expected) @pytest.mark.parametrize( "case", [ np.array([2**63, -1], dtype=object), np.array([str(2**63), -1], dtype=object), np.array([str(2**63), str(-1)], dtype=object), np.array([-1, 2**63], dtype=object), np.array([-1, str(2**63)], dtype=object), np.array([str(-1), str(2**63)], dtype=object), ], ) @pytest.mark.parametrize("convert_to_masked_nullable", [True, False]) def test_convert_numeric_int64_uint64( self, case, coerce, convert_to_masked_nullable ): expected = case.astype(float) if coerce else case.copy() result, _ = lib.maybe_convert_numeric( case, set(), coerce_numeric=coerce, convert_to_masked_nullable=convert_to_masked_nullable, ) tm.assert_almost_equal(result, expected) @pytest.mark.parametrize("convert_to_masked_nullable", [True, False]) def test_convert_numeric_string_uint64(self, convert_to_masked_nullable): # GH32394 result = lib.maybe_convert_numeric( np.array(["uint64"], dtype=object), set(), coerce_numeric=True, convert_to_masked_nullable=convert_to_masked_nullable, ) if convert_to_masked_nullable: result = FloatingArray(*result) else: result = result[0] assert np.isnan(result) @pytest.mark.parametrize("value", [-(2**63) - 1, 2**64]) def test_convert_int_overflow(self, value): # see gh-18584 arr = np.array([value], dtype=object) result = lib.maybe_convert_objects(arr) tm.assert_numpy_array_equal(arr, result) def test_maybe_convert_objects_uint64(self): # see gh-4471 arr = np.array([2**63], dtype=object) exp = np.array([2**63], dtype=np.uint64) tm.assert_numpy_array_equal(lib.maybe_convert_objects(arr), exp) # NumPy bug: can't compare uint64 to int64, as that # results in both casting to float64, so we should # make sure that this function is robust against it arr = np.array([np.uint64(2**63)], dtype=object) exp = np.array([2**63], dtype=np.uint64) tm.assert_numpy_array_equal(lib.maybe_convert_objects(arr), exp) arr = np.array([2, -1], dtype=object) exp = np.array([2, -1], dtype=np.int64) tm.assert_numpy_array_equal(lib.maybe_convert_objects(arr), exp) arr = np.array([2**63, -1], dtype=object) exp = np.array([2**63, -1], dtype=object) tm.assert_numpy_array_equal(lib.maybe_convert_objects(arr), exp) def test_maybe_convert_objects_datetime(self): # GH27438 arr = np.array( [np.datetime64("2000-01-01"), np.timedelta64(1, "s")], dtype=object ) exp = arr.copy() out = lib.maybe_convert_objects( arr, convert_datetime=True, convert_timedelta=True ) tm.assert_numpy_array_equal(out, exp) arr = np.array([pd.NaT, np.timedelta64(1, "s")], dtype=object) exp = np.array([np.timedelta64("NaT"), np.timedelta64(1, "s")], dtype="m8[ns]") out = lib.maybe_convert_objects( arr, convert_datetime=True, convert_timedelta=True ) tm.assert_numpy_array_equal(out, exp) # with convert_timedelta=True, the nan is a valid NA value for td64 arr = np.array([np.timedelta64(1, "s"), np.nan], dtype=object) exp = exp[::-1] out = lib.maybe_convert_objects( arr, convert_datetime=True, convert_timedelta=True ) tm.assert_numpy_array_equal(out, exp) def test_maybe_convert_objects_dtype_if_all_nat(self): arr = np.array([pd.NaT, pd.NaT], dtype=object) out = lib.maybe_convert_objects( arr, convert_datetime=True, convert_timedelta=True ) # no dtype_if_all_nat passed -> we dont guess tm.assert_numpy_array_equal(out, arr) out = lib.maybe_convert_objects( arr, convert_datetime=True, convert_timedelta=True, dtype_if_all_nat=np.dtype("timedelta64[ns]"), ) exp = np.array(["NaT", "NaT"], dtype="timedelta64[ns]") tm.assert_numpy_array_equal(out, exp) out = lib.maybe_convert_objects( arr, convert_datetime=True, convert_timedelta=True, dtype_if_all_nat=np.dtype("datetime64[ns]"), ) exp = np.array(["NaT", "NaT"], dtype="datetime64[ns]") tm.assert_numpy_array_equal(out, exp) def test_maybe_convert_objects_dtype_if_all_nat_invalid(self): # we accept datetime64[ns], timedelta64[ns], and EADtype arr = np.array([pd.NaT, pd.NaT], dtype=object) with pytest.raises(ValueError, match="int64"): lib.maybe_convert_objects( arr, convert_datetime=True, convert_timedelta=True, dtype_if_all_nat=np.dtype("int64"), ) @pytest.mark.parametrize("dtype", ["datetime64[ns]", "timedelta64[ns]"]) def test_maybe_convert_objects_datetime_overflow_safe(self, dtype): stamp = datetime(2363, 10, 4) # Enterprise-D launch date if dtype == "timedelta64[ns]": stamp = stamp - datetime(1970, 1, 1) arr = np.array([stamp], dtype=object) out = lib.maybe_convert_objects( arr, convert_datetime=True, convert_timedelta=True ) # no OutOfBoundsDatetime/OutOfBoundsTimedeltas tm.assert_numpy_array_equal(out, arr) def test_maybe_convert_objects_mixed_datetimes(self): ts = Timestamp("now") vals = [ts, ts.to_pydatetime(), ts.to_datetime64(), pd.NaT, np.nan, None] for data in itertools.permutations(vals): data = np.array(list(data), dtype=object) expected = DatetimeIndex(data)._data._ndarray result = lib.maybe_convert_objects(data, convert_datetime=True) tm.assert_numpy_array_equal(result, expected) def test_maybe_convert_objects_timedelta64_nat(self): obj = np.timedelta64("NaT", "ns") arr = np.array([obj], dtype=object) assert arr[0] is obj result = lib.maybe_convert_objects(arr, convert_timedelta=True) expected = np.array([obj], dtype="m8[ns]") tm.assert_numpy_array_equal(result, expected) @pytest.mark.parametrize( "exp", [ IntegerArray(np.array([2, 0], dtype="i8"), np.array([False, True])), IntegerArray(np.array([2, 0], dtype="int64"), np.array([False, True])), ], ) def test_maybe_convert_objects_nullable_integer(self, exp): # GH27335 arr = np.array([2, np.NaN], dtype=object) result = lib.maybe_convert_objects(arr, convert_to_nullable_integer=True) tm.assert_extension_array_equal(result, exp) @pytest.mark.parametrize( "convert_to_masked_nullable, exp", [ (True, IntegerArray(np.array([2, 0], dtype="i8"), np.array([False, True]))), (False, np.array([2, np.nan], dtype="float64")), ], ) def test_maybe_convert_numeric_nullable_integer( self, convert_to_masked_nullable, exp ): # GH 40687 arr = np.array([2, np.NaN], dtype=object) result = lib.maybe_convert_numeric( arr, set(), convert_to_masked_nullable=convert_to_masked_nullable ) if convert_to_masked_nullable: result = IntegerArray(*result) tm.assert_extension_array_equal(result, exp) else: result = result[0] tm.assert_numpy_array_equal(result, exp) @pytest.mark.parametrize( "convert_to_masked_nullable, exp", [ ( True, FloatingArray( np.array([2.0, 0.0], dtype="float64"), np.array([False, True]) ), ), (False, np.array([2.0, np.nan], dtype="float64")), ], ) def test_maybe_convert_numeric_floating_array( self, convert_to_masked_nullable, exp ): # GH 40687 arr = np.array([2.0, np.nan], dtype=object) result = lib.maybe_convert_numeric( arr, set(), convert_to_masked_nullable=convert_to_masked_nullable ) if convert_to_masked_nullable: tm.assert_extension_array_equal(FloatingArray(*result), exp) else: result = result[0] tm.assert_numpy_array_equal(result, exp) def test_maybe_convert_objects_bool_nan(self): # GH32146 ind = Index([True, False, np.nan], dtype=object) exp = np.array([True, False, np.nan], dtype=object) out = lib.maybe_convert_objects(ind.values, safe=1) tm.assert_numpy_array_equal(out, exp) @pytest.mark.parametrize( "data0", [ True, 1, 1.0, 1.0 + 1.0j, np.int8(1), np.int16(1), np.int32(1), np.int64(1), np.float16(1), np.float32(1), np.float64(1), np.complex64(1), np.complex128(1), ], ) @pytest.mark.parametrize( "data1", [ True, 1, 1.0, 1.0 + 1.0j, np.int8(1), np.int16(1), np.int32(1), np.int64(1), np.float16(1), np.float32(1), np.float64(1), np.complex64(1), np.complex128(1), ], ) def test_maybe_convert_objects_itemsize(self, data0, data1): # GH 40908 data = [data0, data1] arr = np.array(data, dtype="object") common_kind = np.find_common_type( [type(data0), type(data1)], scalar_types=[] ).kind kind0 = "python" if not hasattr(data0, "dtype") else data0.dtype.kind kind1 = "python" if not hasattr(data1, "dtype") else data1.dtype.kind if kind0 != "python" and kind1 != "python": kind = common_kind itemsize = max(data0.dtype.itemsize, data1.dtype.itemsize) elif is_bool(data0) or is_bool(data1): kind = "bool" if (is_bool(data0) and is_bool(data1)) else "object" itemsize = "" elif is_complex(data0) or is_complex(data1): kind = common_kind itemsize = 16 else: kind = common_kind itemsize = 8 expected = np.array(data, dtype=f"{kind}{itemsize}") result = lib.maybe_convert_objects(arr) tm.assert_numpy_array_equal(result, expected) def test_mixed_dtypes_remain_object_array(self): # GH14956 arr = np.array([datetime(2015, 1, 1, tzinfo=pytz.utc), 1], dtype=object) result = lib.maybe_convert_objects(arr, convert_datetime=True) tm.assert_numpy_array_equal(result, arr) @pytest.mark.parametrize( "idx", [ pd.IntervalIndex.from_breaks(range(5), closed="both"), pd.period_range("2016-01-01", periods=3, freq="D"), ], ) def test_maybe_convert_objects_ea(self, idx): result = lib.maybe_convert_objects( np.array(idx, dtype=object), convert_period=True, convert_interval=True, ) tm.assert_extension_array_equal(result, idx._data) class TestTypeInference: # Dummy class used for testing with Python objects class Dummy: pass def test_inferred_dtype_fixture(self, any_skipna_inferred_dtype): # see pandas/conftest.py inferred_dtype, values = any_skipna_inferred_dtype # make sure the inferred dtype of the fixture is as requested assert inferred_dtype == lib.infer_dtype(values, skipna=True) @pytest.mark.parametrize("skipna", [True, False]) def test_length_zero(self, skipna): result = lib.infer_dtype(np.array([], dtype="i4"), skipna=skipna) assert result == "integer" result = lib.infer_dtype([], skipna=skipna) assert result == "empty" # GH 18004 arr = np.array([np.array([], dtype=object), np.array([], dtype=object)]) result = lib.infer_dtype(arr, skipna=skipna) assert result == "empty" def test_integers(self): arr = np.array([1, 2, 3, np.int64(4), np.int32(5)], dtype="O") result = lib.infer_dtype(arr, skipna=True) assert result == "integer" arr = np.array([1, 2, 3, np.int64(4), np.int32(5), "foo"], dtype="O") result = lib.infer_dtype(arr, skipna=True) assert result == "mixed-integer" arr = np.array([1, 2, 3, 4, 5], dtype="i4") result = lib.infer_dtype(arr, skipna=True) assert result == "integer" @pytest.mark.parametrize( "arr, skipna", [ (np.array([1, 2, np.nan, np.nan, 3], dtype="O"), False), (np.array([1, 2, np.nan, np.nan, 3], dtype="O"), True), (np.array([1, 2, 3, np.int64(4), np.int32(5), np.nan], dtype="O"), False), (np.array([1, 2, 3, np.int64(4), np.int32(5), np.nan], dtype="O"), True), ], ) def test_integer_na(self, arr, skipna): # GH 27392 result = lib.infer_dtype(arr, skipna=skipna) expected = "integer" if skipna else "integer-na" assert result == expected def test_infer_dtype_skipna_default(self): # infer_dtype `skipna` default deprecated in GH#24050, # changed to True in GH#29876 arr = np.array([1, 2, 3, np.nan], dtype=object) result = lib.infer_dtype(arr) assert result == "integer" def test_bools(self): arr = np.array([True, False, True, True, True], dtype="O") result = lib.infer_dtype(arr, skipna=True) assert result == "boolean" arr = np.array([np.bool_(True), np.bool_(False)], dtype="O") result = lib.infer_dtype(arr, skipna=True) assert result == "boolean" arr = np.array([True, False, True, "foo"], dtype="O") result = lib.infer_dtype(arr, skipna=True) assert result == "mixed" arr = np.array([True, False, True], dtype=bool) result = lib.infer_dtype(arr, skipna=True) assert result == "boolean" arr = np.array([True, np.nan, False], dtype="O") result = lib.infer_dtype(arr, skipna=True) assert result == "boolean" result = lib.infer_dtype(arr, skipna=False) assert result == "mixed" def test_floats(self): arr = np.array([1.0, 2.0, 3.0, np.float64(4), np.float32(5)], dtype="O") result = lib.infer_dtype(arr, skipna=True) assert result == "floating" arr = np.array([1, 2, 3, np.float64(4), np.float32(5), "foo"], dtype="O") result = lib.infer_dtype(arr, skipna=True) assert result == "mixed-integer" arr = np.array([1, 2, 3, 4, 5], dtype="f4") result = lib.infer_dtype(arr, skipna=True) assert result == "floating" arr = np.array([1, 2, 3, 4, 5], dtype="f8") result = lib.infer_dtype(arr, skipna=True) assert result == "floating" def test_decimals(self): # GH15690 arr = np.array([Decimal(1), Decimal(2), Decimal(3)]) result = lib.infer_dtype(arr, skipna=True) assert result == "decimal" arr = np.array([1.0, 2.0, Decimal(3)]) result = lib.infer_dtype(arr, skipna=True) assert result == "mixed" result = lib.infer_dtype(arr[::-1], skipna=True) assert result == "mixed" arr = np.array([Decimal(1), Decimal("NaN"), Decimal(3)]) result = lib.infer_dtype(arr, skipna=True) assert result == "decimal" arr = np.array([Decimal(1), np.nan, Decimal(3)], dtype="O") result = lib.infer_dtype(arr, skipna=True) assert result == "decimal" # complex is compatible with nan, so skipna has no effect @pytest.mark.parametrize("skipna", [True, False]) def test_complex(self, skipna): # gets cast to complex on array construction arr = np.array([1.0, 2.0, 1 + 1j]) result = lib.infer_dtype(arr, skipna=skipna) assert result == "complex" arr = np.array([1.0, 2.0, 1 + 1j], dtype="O") result = lib.infer_dtype(arr, skipna=skipna) assert result == "mixed" result = lib.infer_dtype(arr[::-1], skipna=skipna) assert result == "mixed" # gets cast to complex on array construction arr = np.array([1, np.nan, 1 + 1j]) result = lib.infer_dtype(arr, skipna=skipna) assert result == "complex" arr = np.array([1.0, np.nan, 1 + 1j], dtype="O") result = lib.infer_dtype(arr, skipna=skipna) assert result == "mixed" # complex with nans stays complex arr = np.array([1 + 1j, np.nan, 3 + 3j], dtype="O") result = lib.infer_dtype(arr, skipna=skipna) assert result == "complex" # test smaller complex dtype; will pass through _try_infer_map fastpath arr = np.array([1 + 1j, np.nan, 3 + 3j], dtype=np.complex64) result = lib.infer_dtype(arr, skipna=skipna) assert result == "complex" def test_string(self): pass def test_unicode(self): arr = ["a", np.nan, "c"] result = lib.infer_dtype(arr, skipna=False) # This currently returns "mixed", but it's not clear that's optimal. # This could also return "string" or "mixed-string" assert result == "mixed" arr = ["a", np.nan, "c"] result = lib.infer_dtype(arr, skipna=True) assert result == "string" arr = ["a", "c"] result = lib.infer_dtype(arr, skipna=False) assert result == "string" @pytest.mark.parametrize( "dtype, missing, skipna, expected", [ (float, np.nan, False, "floating"), (float, np.nan, True, "floating"), (object, np.nan, False, "floating"), (object, np.nan, True, "empty"), (object, None, False, "mixed"), (object, None, True, "empty"), ], ) @pytest.mark.parametrize("box", [Series, np.array]) def test_object_empty(self, box, missing, dtype, skipna, expected): # GH 23421 arr = box([missing, missing], dtype=dtype) result = lib.infer_dtype(arr, skipna=skipna) assert result == expected def test_datetime(self): dates = [datetime(2012, 1, x) for x in range(1, 20)] index = Index(dates) assert index.inferred_type == "datetime64" def test_infer_dtype_datetime64(self): arr = np.array( [np.datetime64("2011-01-01"), np.datetime64("2011-01-01")], dtype=object ) assert lib.infer_dtype(arr, skipna=True) == "datetime64" @pytest.mark.parametrize("na_value", [pd.NaT, np.nan]) def test_infer_dtype_datetime64_with_na(self, na_value): # starts with nan arr = np.array([na_value, np.datetime64("2011-01-02")]) assert lib.infer_dtype(arr, skipna=True) == "datetime64" arr = np.array([na_value, np.datetime64("2011-01-02"), na_value]) assert lib.infer_dtype(arr, skipna=True) == "datetime64" @pytest.mark.parametrize( "arr", [ np.array( [np.timedelta64("nat"), np.datetime64("2011-01-02")], dtype=object ), np.array( [np.datetime64("2011-01-02"), np.timedelta64("nat")], dtype=object ), np.array([np.datetime64("2011-01-01"), Timestamp("2011-01-02")]), np.array([Timestamp("2011-01-02"), np.datetime64("2011-01-01")]), np.array([np.nan, Timestamp("2011-01-02"), 1.1]), np.array([np.nan, "2011-01-01", Timestamp("2011-01-02")], dtype=object), np.array([np.datetime64("nat"), np.timedelta64(1, "D")], dtype=object), np.array([np.timedelta64(1, "D"), np.datetime64("nat")], dtype=object), ], ) def test_infer_datetimelike_dtype_mixed(self, arr): assert lib.infer_dtype(arr, skipna=False) == "mixed" def test_infer_dtype_mixed_integer(self): arr = np.array([np.nan, Timestamp("2011-01-02"), 1]) assert lib.infer_dtype(arr, skipna=True) == "mixed-integer" @pytest.mark.parametrize( "arr", [ np.array([Timestamp("2011-01-01"), Timestamp("2011-01-02")]), np.array([datetime(2011, 1, 1), datetime(2012, 2, 1)]), np.array([datetime(2011, 1, 1), Timestamp("2011-01-02")]), ], ) def test_infer_dtype_datetime(self, arr): assert lib.infer_dtype(arr, skipna=True) == "datetime" @pytest.mark.parametrize("na_value", [pd.NaT, np.nan]) @pytest.mark.parametrize( "time_stamp", [Timestamp("2011-01-01"), datetime(2011, 1, 1)] ) def test_infer_dtype_datetime_with_na(self, na_value, time_stamp): # starts with nan arr = np.array([na_value, time_stamp]) assert lib.infer_dtype(arr, skipna=True) == "datetime" arr = np.array([na_value, time_stamp, na_value]) assert lib.infer_dtype(arr, skipna=True) == "datetime" @pytest.mark.parametrize( "arr", [ np.array([Timedelta("1 days"), Timedelta("2 days")]), np.array([np.timedelta64(1, "D"), np.timedelta64(2, "D")], dtype=object), np.array([timedelta(1), timedelta(2)]), ], ) def test_infer_dtype_timedelta(self, arr): assert lib.infer_dtype(arr, skipna=True) == "timedelta" @pytest.mark.parametrize("na_value", [pd.NaT, np.nan]) @pytest.mark.parametrize( "delta", [Timedelta("1 days"), np.timedelta64(1, "D"), timedelta(1)] ) def test_infer_dtype_timedelta_with_na(self, na_value, delta): # starts with nan arr = np.array([na_value, delta]) assert lib.infer_dtype(arr, skipna=True) == "timedelta" arr = np.array([na_value, delta, na_value]) assert lib.infer_dtype(arr, skipna=True) == "timedelta" def test_infer_dtype_period(self): # GH 13664 arr = np.array([Period("2011-01", freq="D"), Period("2011-02", freq="D")]) assert lib.infer_dtype(arr, skipna=True) == "period" # non-homogeneous freqs -> mixed arr = np.array([Period("2011-01", freq="D"), Period("2011-02", freq="M")]) assert lib.infer_dtype(arr, skipna=True) == "mixed" @pytest.mark.parametrize("klass", [pd.array, Series, Index]) @pytest.mark.parametrize("skipna", [True, False]) def test_infer_dtype_period_array(self, klass, skipna): # https://github.com/pandas-dev/pandas/issues/23553 values = klass( [ Period("2011-01-01", freq="D"), Period("2011-01-02", freq="D"), pd.NaT, ] ) assert lib.infer_dtype(values, skipna=skipna) == "period" # periods but mixed freq values = klass( [ Period("2011-01-01", freq="D"), Period("2011-01-02", freq="M"), pd.NaT, ] ) # with pd.array this becomes PandasArray which ends up as "unknown-array" exp = "unknown-array" if klass is pd.array else "mixed" assert lib.infer_dtype(values, skipna=skipna) == exp def test_infer_dtype_period_mixed(self): arr = np.array( [Period("2011-01", freq="M"), np.datetime64("nat")], dtype=object ) assert lib.infer_dtype(arr, skipna=False) == "mixed" arr = np.array( [np.datetime64("nat"), Period("2011-01", freq="M")], dtype=object ) assert lib.infer_dtype(arr, skipna=False) == "mixed" @pytest.mark.parametrize("na_value", [pd.NaT, np.nan]) def test_infer_dtype_period_with_na(self, na_value): # starts with nan arr = np.array([na_value, Period("2011-01", freq="D")]) assert lib.infer_dtype(arr, skipna=True) == "period" arr = np.array([na_value, Period("2011-01", freq="D"), na_value]) assert lib.infer_dtype(arr, skipna=True) == "period" @pytest.mark.parametrize( "data", [ [datetime(2017, 6, 12, 19, 30), datetime(2017, 3, 11, 1, 15)], [Timestamp("20170612"), Timestamp("20170311")], [ Timestamp("20170612", tz="US/Eastern"), Timestamp("20170311", tz="US/Eastern"), ], [date(2017, 6, 12), Timestamp("20170311", tz="US/Eastern")], [np.datetime64("2017-06-12"), np.datetime64("2017-03-11")], [np.datetime64("2017-06-12"), datetime(2017, 3, 11, 1, 15)], ], ) def test_infer_datetimelike_array_datetime(self, data): assert lib.infer_datetimelike_array(data) == ("datetime", False) @pytest.mark.parametrize( "data", [ [timedelta(2017, 6, 12), timedelta(2017, 3, 11)], [timedelta(2017, 6, 12), date(2017, 3, 11)], [np.timedelta64(2017, "D"), np.timedelta64(6, "s")], [np.timedelta64(2017, "D"), timedelta(2017, 3, 11)], ], ) def test_infer_datetimelike_array_timedelta(self, data): assert lib.infer_datetimelike_array(data) == ("timedelta", False) def test_infer_datetimelike_array_date(self): arr = [date(2017, 6, 12), date(2017, 3, 11)] assert lib.infer_datetimelike_array(arr) == ("date", False) @pytest.mark.parametrize( "data", [ ["2017-06-12", "2017-03-11"], [20170612, 20170311], [20170612.5, 20170311.8], [Dummy(), Dummy()], [Timestamp("20170612"), Timestamp("20170311", tz="US/Eastern")], [Timestamp("20170612"), 20170311], [timedelta(2017, 6, 12), Timestamp("20170311", tz="US/Eastern")], ], ) def test_infer_datetimelike_array_mixed(self, data): assert lib.infer_datetimelike_array(data)[0] == "mixed" @pytest.mark.parametrize( "first, expected", [ [[None], "mixed"], [[np.nan], "mixed"], [[pd.NaT], "nat"], [[datetime(2017, 6, 12, 19, 30), pd.NaT], "datetime"], [[np.datetime64("2017-06-12"), pd.NaT], "datetime"], [[date(2017, 6, 12), pd.NaT], "date"], [[timedelta(2017, 6, 12), pd.NaT], "timedelta"], [[np.timedelta64(2017, "D"), pd.NaT], "timedelta"], ], ) @pytest.mark.parametrize("second", [None, np.nan]) def test_infer_datetimelike_array_nan_nat_like(self, first, second, expected): first.append(second) assert lib.infer_datetimelike_array(first) == (expected, False) def test_infer_dtype_all_nan_nat_like(self): arr = np.array([np.nan, np.nan]) assert lib.infer_dtype(arr, skipna=True) == "floating" # nan and None mix are result in mixed arr = np.array([np.nan, np.nan, None]) assert lib.infer_dtype(arr, skipna=True) == "empty" assert lib.infer_dtype(arr, skipna=False) == "mixed" arr = np.array([None, np.nan, np.nan]) assert lib.infer_dtype(arr, skipna=True) == "empty" assert lib.infer_dtype(arr, skipna=False) == "mixed" # pd.NaT arr = np.array([pd.NaT]) assert lib.infer_dtype(arr, skipna=False) == "datetime" arr = np.array([pd.NaT, np.nan]) assert lib.infer_dtype(arr, skipna=False) == "datetime" arr = np.array([np.nan, pd.NaT]) assert lib.infer_dtype(arr, skipna=False) == "datetime" arr = np.array([np.nan, pd.NaT, np.nan]) assert lib.infer_dtype(arr, skipna=False) == "datetime" arr = np.array([None, pd.NaT, None]) assert lib.infer_dtype(arr, skipna=False) == "datetime" # np.datetime64(nat) arr = np.array([np.datetime64("nat")]) assert lib.infer_dtype(arr, skipna=False) == "datetime64" for n in [np.nan, pd.NaT, None]: arr = np.array([n, np.datetime64("nat"), n]) assert lib.infer_dtype(arr, skipna=False) == "datetime64" arr = np.array([pd.NaT, n, np.datetime64("nat"), n]) assert lib.infer_dtype(arr, skipna=False) == "datetime64" arr = np.array([np.timedelta64("nat")], dtype=object) assert lib.infer_dtype(arr, skipna=False) == "timedelta" for n in [np.nan, pd.NaT, None]: arr = np.array([n, np.timedelta64("nat"), n]) assert lib.infer_dtype(arr, skipna=False) == "timedelta" arr = np.array([pd.NaT, n, np.timedelta64("nat"), n]) assert lib.infer_dtype(arr, skipna=False) == "timedelta" # datetime / timedelta mixed arr = np.array([pd.NaT, np.datetime64("nat"), np.timedelta64("nat"), np.nan]) assert lib.infer_dtype(arr, skipna=False) == "mixed" arr = np.array([np.timedelta64("nat"), np.datetime64("nat")], dtype=object) assert lib.infer_dtype(arr, skipna=False) == "mixed" def test_is_datetimelike_array_all_nan_nat_like(self): arr = np.array([np.nan, pd.NaT, np.datetime64("nat")]) assert lib.is_datetime_array(arr) assert lib.is_datetime64_array(arr) assert not lib.is_timedelta_or_timedelta64_array(arr) arr = np.array([np.nan, pd.NaT, np.timedelta64("nat")]) assert not lib.is_datetime_array(arr) assert not lib.is_datetime64_array(arr) assert lib.is_timedelta_or_timedelta64_array(arr) arr = np.array([np.nan, pd.NaT, np.datetime64("nat"), np.timedelta64("nat")]) assert not lib.is_datetime_array(arr) assert not lib.is_datetime64_array(arr) assert not lib.is_timedelta_or_timedelta64_array(arr) arr = np.array([np.nan, pd.NaT]) assert lib.is_datetime_array(arr) assert lib.is_datetime64_array(arr) assert lib.is_timedelta_or_timedelta64_array(arr) arr = np.array([np.nan, np.nan], dtype=object) assert not lib.is_datetime_array(arr) assert not lib.is_datetime64_array(arr) assert not lib.is_timedelta_or_timedelta64_array(arr) assert lib.is_datetime_with_singletz_array( np.array( [ Timestamp("20130101", tz="US/Eastern"), Timestamp("20130102", tz="US/Eastern"), ], dtype=object, ) ) assert not lib.is_datetime_with_singletz_array( np.array( [ Timestamp("20130101", tz="US/Eastern"), Timestamp("20130102", tz="CET"), ], dtype=object, ) ) @pytest.mark.parametrize( "func", [ "is_datetime_array", "is_datetime64_array", "is_bool_array", "is_timedelta_or_timedelta64_array", "is_date_array", "is_time_array", "is_interval_array", ], ) def test_other_dtypes_for_array(self, func): func = getattr(lib, func) arr = np.array(["foo", "bar"]) assert not func(arr) assert not func(arr.reshape(2, 1)) arr = np.array([1, 2]) assert not func(arr) assert not func(arr.reshape(2, 1)) def test_date(self): dates = [date(2012, 1, day) for day in range(1, 20)] index = Index(dates) assert index.inferred_type == "date" dates = [date(2012, 1, day) for day in range(1, 20)] + [np.nan] result = lib.infer_dtype(dates, skipna=False) assert result == "mixed" result = lib.infer_dtype(dates, skipna=True) assert result == "date" @pytest.mark.parametrize( "values", [ [date(2020, 1, 1), Timestamp("2020-01-01")], [Timestamp("2020-01-01"), date(2020, 1, 1)], [date(2020, 1, 1), pd.NaT], [pd.NaT, date(2020, 1, 1)], ], ) @pytest.mark.parametrize("skipna", [True, False]) def test_infer_dtype_date_order_invariant(self, values, skipna): # https://github.com/pandas-dev/pandas/issues/33741 result = lib.infer_dtype(values, skipna=skipna) assert result == "date" def test_is_numeric_array(self): assert lib.is_float_array(np.array([1, 2.0])) assert lib.is_float_array(np.array([1, 2.0, np.nan])) assert not lib.is_float_array(np.array([1, 2])) assert lib.is_integer_array(np.array([1, 2])) assert not lib.is_integer_array(np.array([1, 2.0])) def test_is_string_array(self): assert lib.is_string_array(np.array(["foo", "bar"])) assert not lib.is_string_array( np.array(["foo", "bar", pd.NA], dtype=object), skipna=False ) assert lib.is_string_array( np.array(["foo", "bar", pd.NA], dtype=object), skipna=True ) # NaN is not valid for string array, just NA assert not lib.is_string_array( np.array(["foo", "bar", np.nan], dtype=object), skipna=True ) assert not lib.is_string_array(np.array([1, 2])) def test_to_object_array_tuples(self): r = (5, 6) values = [r] lib.to_object_array_tuples(values) # make sure record array works record = namedtuple("record", "x y") r = record(5, 6) values = [r] lib.to_object_array_tuples(values) def test_object(self): # GH 7431 # cannot infer more than this as only a single element arr = np.array([None], dtype="O") result = lib.infer_dtype(arr, skipna=False) assert result == "mixed" result = lib.infer_dtype(arr, skipna=True) assert result == "empty" def test_to_object_array_width(self): # see gh-13320 rows = [[1, 2, 3], [4, 5, 6]] expected = np.array(rows, dtype=object) out = lib.to_object_array(rows) tm.assert_numpy_array_equal(out, expected) expected = np.array(rows, dtype=object) out = lib.to_object_array(rows, min_width=1) tm.assert_numpy_array_equal(out, expected) expected = np.array( [[1, 2, 3, None, None], [4, 5, 6, None, None]], dtype=object ) out = lib.to_object_array(rows, min_width=5) tm.assert_numpy_array_equal(out, expected) def test_is_period(self): assert lib.is_period(Period("2011-01", freq="M")) assert not lib.is_period(PeriodIndex(["2011-01"], freq="M")) assert not lib.is_period(Timestamp("2011-01")) assert not lib.is_period(1) assert not lib.is_period(np.nan) def test_categorical(self): # GH 8974 arr = Categorical(list("abc")) result = lib.infer_dtype(arr, skipna=True) assert result == "categorical" result = lib.infer_dtype(Series(arr), skipna=True) assert result == "categorical" arr = Categorical(list("abc"), categories=["cegfab"], ordered=True) result = lib.infer_dtype(arr, skipna=True) assert result == "categorical" result = lib.infer_dtype(Series(arr), skipna=True) assert result == "categorical" @pytest.mark.parametrize("asobject", [True, False]) def test_interval(self, asobject): idx = pd.IntervalIndex.from_breaks(range(5), closed="both") if asobject: idx = idx.astype(object) inferred = lib.infer_dtype(idx, skipna=False) assert inferred == "interval" inferred = lib.infer_dtype(idx._data, skipna=False) assert inferred == "interval" inferred = lib.infer_dtype(Series(idx, dtype=idx.dtype), skipna=False) assert inferred == "interval" @pytest.mark.parametrize("value", [Timestamp(0), Timedelta(0), 0, 0.0]) def test_interval_mismatched_closed(self, value): first = Interval(value, value, closed="left") second = Interval(value, value, closed="right") # if closed match, we should infer "interval" arr = np.array([first, first], dtype=object) assert lib.infer_dtype(arr, skipna=False) == "interval" # if closed dont match, we should _not_ get "interval" arr2 = np.array([first, second], dtype=object) assert lib.infer_dtype(arr2, skipna=False) == "mixed" def test_interval_mismatched_subtype(self): first = Interval(0, 1, closed="left") second = Interval(Timestamp(0), Timestamp(1), closed="left") third = Interval(Timedelta(0), Timedelta(1), closed="left") arr = np.array([first, second]) assert lib.infer_dtype(arr, skipna=False) == "mixed" arr = np.array([second, third]) assert lib.infer_dtype(arr, skipna=False) == "mixed" arr = np.array([first, third]) assert lib.infer_dtype(arr, skipna=False) == "mixed" # float vs int subdtype are compatible flt_interval = Interval(1.5, 2.5, closed="left") arr = np.array([first, flt_interval], dtype=object) assert lib.infer_dtype(arr, skipna=False) == "interval" @pytest.mark.parametrize("klass", [pd.array, Series]) @pytest.mark.parametrize("skipna", [True, False]) @pytest.mark.parametrize("data", [["a", "b", "c"], ["a", "b", pd.NA]]) def test_string_dtype(self, data, skipna, klass, nullable_string_dtype): # StringArray val = klass(data, dtype=nullable_string_dtype) inferred = lib.infer_dtype(val, skipna=skipna) assert inferred == "string" @pytest.mark.parametrize("klass", [pd.array, Series]) @pytest.mark.parametrize("skipna", [True, False]) @pytest.mark.parametrize("data", [[True, False, True], [True, False, pd.NA]]) def test_boolean_dtype(self, data, skipna, klass): # BooleanArray val = klass(data, dtype="boolean") inferred = lib.infer_dtype(val, skipna=skipna) assert inferred == "boolean" class TestNumberScalar: def test_is_number(self): assert is_number(True) assert is_number(1) assert is_number(1.1) assert is_number(1 + 3j) assert is_number(np.int64(1)) assert is_number(np.float64(1.1)) assert is_number(np.complex128(1 + 3j)) assert is_number(np.nan) assert not is_number(None) assert not is_number("x") assert not is_number(datetime(2011, 1, 1)) assert not is_number(np.datetime64("2011-01-01")) assert not is_number(Timestamp("2011-01-01")) assert not is_number(Timestamp("2011-01-01", tz="US/Eastern")) assert not is_number(timedelta(1000)) assert not is_number(Timedelta("1 days")) # questionable assert not is_number(np.bool_(False)) assert is_number(np.timedelta64(1, "D")) def test_is_bool(self): assert is_bool(True) assert is_bool(False) assert is_bool(np.bool_(False)) assert not is_bool(1) assert not is_bool(1.1) assert not is_bool(1 + 3j) assert not is_bool(np.int64(1)) assert not is_bool(np.float64(1.1)) assert not is_bool(np.complex128(1 + 3j)) assert not is_bool(np.nan) assert not is_bool(None) assert not is_bool("x") assert not is_bool(datetime(2011, 1, 1)) assert not is_bool(np.datetime64("2011-01-01")) assert not is_bool(Timestamp("2011-01-01")) assert not is_bool(Timestamp("2011-01-01", tz="US/Eastern")) assert not is_bool(timedelta(1000)) assert not is_bool(np.timedelta64(1, "D")) assert not is_bool(Timedelta("1 days")) def test_is_integer(self): assert is_integer(1) assert is_integer(np.int64(1)) assert not is_integer(True) assert not is_integer(1.1) assert not is_integer(1 + 3j) assert not is_integer(False) assert not is_integer(np.bool_(False)) assert not is_integer(np.float64(1.1)) assert not is_integer(np.complex128(1 + 3j)) assert not is_integer(np.nan) assert not is_integer(None) assert not is_integer("x") assert not is_integer(datetime(2011, 1, 1)) assert not is_integer(np.datetime64("2011-01-01")) assert not is_integer(Timestamp("2011-01-01")) assert not is_integer(Timestamp("2011-01-01", tz="US/Eastern")) assert not is_integer(timedelta(1000)) assert not is_integer(Timedelta("1 days")) assert not is_integer(np.timedelta64(1, "D")) def test_is_float(self): assert is_float(1.1) assert is_float(np.float64(1.1)) assert is_float(np.nan) assert not is_float(True) assert not is_float(1) assert not is_float(1 + 3j) assert not is_float(False) assert not is_float(np.bool_(False)) assert not is_float(np.int64(1)) assert not is_float(np.complex128(1 + 3j)) assert not is_float(None) assert not is_float("x") assert not is_float(datetime(2011, 1, 1)) assert not is_float(np.datetime64("2011-01-01")) assert not is_float(Timestamp("2011-01-01")) assert not is_float(Timestamp("2011-01-01", tz="US/Eastern")) assert not is_float(timedelta(1000)) assert not is_float(np.timedelta64(1, "D")) assert not is_float(Timedelta("1 days")) def test_is_datetime_dtypes(self): ts = pd.date_range("20130101", periods=3) tsa = pd.date_range("20130101", periods=3, tz="US/Eastern") assert is_datetime64_dtype("datetime64") assert is_datetime64_dtype("datetime64[ns]") assert is_datetime64_dtype(ts) assert not is_datetime64_dtype(tsa) assert not is_datetime64_ns_dtype("datetime64") assert is_datetime64_ns_dtype("datetime64[ns]") assert is_datetime64_ns_dtype(ts) assert is_datetime64_ns_dtype(tsa) assert is_datetime64_any_dtype("datetime64") assert is_datetime64_any_dtype("datetime64[ns]") assert is_datetime64_any_dtype(ts) assert is_datetime64_any_dtype(tsa) assert not is_datetime64tz_dtype("datetime64") assert not is_datetime64tz_dtype("datetime64[ns]") assert not is_datetime64tz_dtype(ts) assert is_datetime64tz_dtype(tsa) for tz in ["US/Eastern", "UTC"]: dtype = f"datetime64[ns, {tz}]" assert not is_datetime64_dtype(dtype) assert is_datetime64tz_dtype(dtype) assert is_datetime64_ns_dtype(dtype) assert is_datetime64_any_dtype(dtype) def test_is_timedelta(self): assert is_timedelta64_dtype("timedelta64") assert is_timedelta64_dtype("timedelta64[ns]") assert not is_timedelta64_ns_dtype("timedelta64") assert is_timedelta64_ns_dtype("timedelta64[ns]") tdi = TimedeltaIndex([1e14, 2e14], dtype="timedelta64[ns]") assert is_timedelta64_dtype(tdi) assert is_timedelta64_ns_dtype(tdi) assert is_timedelta64_ns_dtype(tdi.astype("timedelta64[ns]")) # Conversion to Int64Index: assert not is_timedelta64_ns_dtype(tdi.astype("timedelta64")) assert not is_timedelta64_ns_dtype(tdi.astype("timedelta64[h]")) class TestIsScalar: def test_is_scalar_builtin_scalars(self): assert is_scalar(None) assert is_scalar(True) assert is_scalar(False) assert is_scalar(Fraction()) assert is_scalar(0.0) assert is_scalar(1) assert is_scalar(complex(2)) assert is_scalar(float("NaN")) assert is_scalar(np.nan) assert is_scalar("foobar") assert is_scalar(b"foobar") assert is_scalar(datetime(2014, 1, 1)) assert is_scalar(date(2014, 1, 1)) assert is_scalar(time(12, 0)) assert is_scalar(timedelta(hours=1)) assert is_scalar(pd.NaT) assert is_scalar(pd.NA) def test_is_scalar_builtin_nonscalars(self): assert not is_scalar({}) assert not is_scalar([]) assert not is_scalar([1]) assert not is_scalar(()) assert not is_scalar((1,)) assert not is_scalar(slice(None)) assert not is_scalar(Ellipsis) def test_is_scalar_numpy_array_scalars(self): assert is_scalar(np.int64(1)) assert is_scalar(np.float64(1.0)) assert is_scalar(np.int32(1)) assert is_scalar(np.complex64(2)) assert is_scalar(np.object_("foobar")) assert is_scalar(np.str_("foobar")) assert is_scalar(np.unicode_("foobar")) assert is_scalar(np.bytes_(b"foobar")) assert is_scalar(np.datetime64("2014-01-01")) assert is_scalar(np.timedelta64(1, "h")) def test_is_scalar_numpy_zerodim_arrays(self): for zerodim in [ np.array(1), np.array("foobar"), np.array(np.datetime64("2014-01-01")), np.array(np.timedelta64(1, "h")), np.array(np.datetime64("NaT")), ]: assert not is_scalar(zerodim) assert is_scalar(lib.item_from_zerodim(zerodim)) @pytest.mark.filterwarnings("ignore::PendingDeprecationWarning") def test_is_scalar_numpy_arrays(self): for a in [ np.array([]), np.array([[]]), np.matrix("1; 2"), ]: assert not is_scalar(a) assert not is_scalar(MockNumpyLikeArray(a)) def test_is_scalar_pandas_scalars(self): assert is_scalar(Timestamp("2014-01-01")) assert is_scalar(Timedelta(hours=1)) assert is_scalar(Period("2014-01-01")) assert is_scalar(Interval(left=0, right=1)) assert is_scalar(DateOffset(days=1)) assert is_scalar(pd.offsets.Minute(3)) def test_is_scalar_pandas_containers(self): assert not is_scalar(Series(dtype=object)) assert not is_scalar(Series([1])) assert not is_scalar(DataFrame()) assert not is_scalar(DataFrame([[1]])) assert not is_scalar(Index([])) assert not is_scalar(Index([1])) assert not is_scalar(Categorical([])) assert not is_scalar(DatetimeIndex([])._data) assert not is_scalar(TimedeltaIndex([])._data) assert not is_scalar(DatetimeIndex([])._data.to_period("D")) assert not is_scalar(pd.array([1, 2, 3])) def test_is_scalar_number(self): # Number() is not recognied by PyNumber_Check, so by extension # is not recognized by is_scalar, but instances of non-abstract # subclasses are. class Numeric(Number): def __init__(self, value): self.value = value def __int__(self): return self.value num = Numeric(1) assert is_scalar(num) def test_datetimeindex_from_empty_datetime64_array(): for unit in ["ms", "us", "ns"]: idx = DatetimeIndex(np.array([], dtype=f"datetime64[{unit}]")) assert len(idx) == 0 def test_nan_to_nat_conversions(): df = DataFrame( {"A": np.asarray(range(10), dtype="float64"), "B": Timestamp("20010101")} ) df.iloc[3:6, :] = np.nan result = df.loc[4, "B"] assert result is pd.NaT s = df["B"].copy() s[8:9] = np.nan assert s[8] is pd.NaT @td.skip_if_no_scipy @pytest.mark.filterwarnings("ignore::PendingDeprecationWarning") def test_is_scipy_sparse(spmatrix): assert is_scipy_sparse(spmatrix([[0, 1]])) assert not is_scipy_sparse(np.array([1])) def test_ensure_int32(): values = np.arange(10, dtype=np.int32) result = ensure_int32(values) assert result.dtype == np.int32 values = np.arange(10, dtype=np.int64) result = ensure_int32(values) assert result.dtype == np.int32
py
b4113e756128832843b9cddc6ea7cc6102917fb1
from ..base_atari_env import BaseAtariEnv, base_env_wrapper_fn def raw_env(**kwargs): return BaseAtariEnv(game="warlords", num_players=4, mode_num=None, **kwargs) env = base_env_wrapper_fn(raw_env)
py
b4113ebeccfcab3aba87fb40f584dae86f72b7e4
r""" Early Stopping ============== Monitor a validation metric and stop training when it stops improving. """ from copy import deepcopy import numpy as np import torch from pytorch_lightning import _logger as log from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.utilities import rank_zero_warn torch_inf = torch.tensor(np.Inf) class EarlyStopping(Callback): r""" Args: monitor: quantity to be monitored. Default: ``'val_loss'``. min_delta: minimum change in the monitored quantity to qualify as an improvement, i.e. an absolute change of less than `min_delta`, will count as no improvement. Default: ``0``. patience: number of validation epochs with no improvement after which training will be stopped. Default: ``0``. verbose: verbosity mode. Default: ``False``. mode: one of {auto, min, max}. In `min` mode, training will stop when the quantity monitored has stopped decreasing; in `max` mode it will stop when the quantity monitored has stopped increasing; in `auto` mode, the direction is automatically inferred from the name of the monitored quantity. Default: ``'auto'``. strict: whether to crash the training if `monitor` is not found in the validation metrics. Default: ``True``. Example:: >>> from pytorch_lightning import Trainer >>> from pytorch_lightning.callbacks import EarlyStopping >>> early_stopping = EarlyStopping('val_loss') >>> trainer = Trainer(early_stop_callback=early_stopping) """ mode_dict = { 'min': torch.lt, 'max': torch.gt, } def __init__(self, monitor: str = 'val_loss', min_delta: float = 0.0, patience: int = 3, verbose: bool = False, mode: str = 'auto', strict: bool = True): super().__init__() self.monitor = monitor self.patience = patience self.verbose = verbose self.strict = strict self.min_delta = min_delta self.wait_count = 0 self.stopped_epoch = 0 self.mode = mode if mode not in self.mode_dict: if self.verbose > 0: log.info(f'EarlyStopping mode {mode} is unknown, fallback to auto mode.') self.mode = 'auto' if self.mode == 'auto': if self.monitor == 'acc': self.mode = 'max' else: self.mode = 'min' if self.verbose > 0: log.info(f'EarlyStopping mode set to {self.mode} for monitoring {self.monitor}.') self.min_delta *= 1 if self.monitor_op == torch.gt else -1 self.best_score = torch_inf if self.monitor_op == torch.lt else -torch_inf def _validate_condition_metric(self, logs): """ Checks that the condition metric for early stopping is good Args: logs: callback metrics from validation output Return: True if specified metric is available """ monitor_val = logs.get(self.monitor) error_msg = (f'Early stopping conditioned on metric `{self.monitor}`' f' which is not available. Either add `{self.monitor}` to the return of ' f' validation_epoch end or modify your EarlyStopping callback to use any of the ' f'following: `{"`, `".join(list(logs.keys()))}`') if monitor_val is None: if self.strict: raise RuntimeError(error_msg) if self.verbose > 0: rank_zero_warn(error_msg, RuntimeWarning) return False return True @property def monitor_op(self): return self.mode_dict[self.mode] def state_dict(self): return { 'wait_count': self.wait_count, 'stopped_epoch': self.stopped_epoch, 'best_score': self.best_score, 'patience': self.patience } def load_state_dict(self, state_dict): state_dict = deepcopy(state_dict) self.wait_count = state_dict['wait_count'] self.stopped_epoch = state_dict['stopped_epoch'] self.best_score = state_dict['best_score'] self.patience = state_dict['patience'] def on_sanity_check_end(self, trainer, pl_module): logs = trainer.callback_metrics self._validate_condition_metric(logs) def on_validation_end(self, trainer, pl_module): self._run_early_stopping_check(trainer, pl_module) def _run_early_stopping_check(self, trainer, pl_module): logs = trainer.callback_metrics if not self._validate_condition_metric(logs): return # short circuit if metric not present current = logs.get(self.monitor) if not isinstance(current, torch.Tensor): current = torch.tensor(current) if self.monitor_op(current - self.min_delta, self.best_score): self.best_score = current self.wait_count = 0 else: self.wait_count += 1 if self.wait_count >= self.patience: self.stopped_epoch = trainer.current_epoch trainer.should_stop = True def on_train_end(self, trainer, pl_module): if self.stopped_epoch > 0 and self.verbose > 0: rank_zero_warn('Displayed epoch numbers by `EarlyStopping` start from "1" until v0.6.x,' ' but will start from "0" in v0.8.0.', DeprecationWarning) log.info(f'Epoch {self.stopped_epoch + 1:05d}: early stopping triggered.')
py
b4113ef2b0a2190b29fb92b96f527b0e86891582
# pylint: skip-file import os import re import sys import warnings xla_flags = os.getenv("XLA_FLAGS", "").lstrip("--") xla_flags = re.sub(r"xla_force_host_platform_device_count=.+\s", "", xla_flags).split() os.environ["XLA_FLAGS"] = " ".join([f"--xla_force_host_platform_device_count={100}"]) import aesara.tensor as at import arviz as az import jax import numpy as np import pandas as pd from aesara.compile import SharedVariable from aesara.graph.basic import Apply, Constant, clone, graph_inputs from aesara.graph.fg import FunctionGraph from aesara.graph.op import Op from aesara.graph.opt import MergeOptimizer from aesara.link.jax.dispatch import jax_funcify from aesara.tensor.type import TensorType from pymc3 import modelcontext from pymc3.aesaraf import compile_rv_inplace warnings.warn("This module is experimental.") class NumPyroNUTS(Op): def __init__( self, inputs, outputs, target_accept=0.8, draws=1000, tune=1000, chains=4, seed=None, progress_bar=True, ): self.draws = draws self.tune = tune self.chains = chains self.target_accept = target_accept self.progress_bar = progress_bar self.seed = seed self.inputs, self.outputs = clone(inputs, outputs, copy_inputs=False) self.inputs_type = tuple(input.type for input in inputs) self.outputs_type = tuple(output.type for output in outputs) self.nin = len(inputs) self.nout = len(outputs) self.nshared = len([v for v in inputs if isinstance(v, SharedVariable)]) self.samples_bcast = [self.chains == 1, self.draws == 1] self.fgraph = FunctionGraph(self.inputs, self.outputs, clone=False) MergeOptimizer().optimize(self.fgraph) super().__init__() def make_node(self, *inputs): # The samples for each variable outputs = [ TensorType(v.dtype, self.samples_bcast + list(v.broadcastable))() for v in inputs ] # The leapfrog statistics outputs += [TensorType("int64", self.samples_bcast)()] all_inputs = list(inputs) if self.nshared > 0: all_inputs += self.inputs[-self.nshared :] return Apply(self, all_inputs, outputs) def do_constant_folding(self, *args): return False def perform(self, node, inputs, outputs): raise NotImplementedError() @jax_funcify.register(NumPyroNUTS) def jax_funcify_NumPyroNUTS(op, node, **kwargs): from numpyro.infer import MCMC, NUTS draws = op.draws tune = op.tune chains = op.chains target_accept = op.target_accept progress_bar = op.progress_bar seed = op.seed # Compile the "inner" log-likelihood function. This will have extra shared # variable inputs as the last arguments logp_fn = jax_funcify(op.fgraph, **kwargs) if isinstance(logp_fn, (list, tuple)): # This handles the new JAX backend, which always returns a tuple logp_fn = logp_fn[0] def _sample(*inputs): if op.nshared > 0: current_state = inputs[: -op.nshared] shared_inputs = tuple(op.fgraph.inputs[-op.nshared :]) else: current_state = inputs shared_inputs = () def log_fn_wrap(x): res = logp_fn( *( x # We manually obtain the shared values and added them # as arguments to our compiled "inner" function + tuple( v.get_value(borrow=True, return_internal_type=True) for v in shared_inputs ) ) ) if isinstance(res, (list, tuple)): # This handles the new JAX backend, which always returns a tuple res = res[0] return -res nuts_kernel = NUTS( potential_fn=log_fn_wrap, target_accept_prob=target_accept, adapt_step_size=True, adapt_mass_matrix=True, dense_mass=False, ) pmap_numpyro = MCMC( nuts_kernel, num_warmup=tune, num_samples=draws, num_chains=chains, postprocess_fn=None, chain_method="parallel", progress_bar=progress_bar, ) pmap_numpyro.run(seed, init_params=current_state, extra_fields=("num_steps",)) samples = pmap_numpyro.get_samples(group_by_chain=True) leapfrogs_taken = pmap_numpyro.get_extra_fields(group_by_chain=True)["num_steps"] return tuple(samples) + (leapfrogs_taken,) return _sample def sample_numpyro_nuts( draws=1000, tune=1000, chains=4, target_accept=0.8, random_seed=10, model=None, progress_bar=True, keep_untransformed=False, ): model = modelcontext(model) seed = jax.random.PRNGKey(random_seed) rv_names = [rv.name for rv in model.value_vars] init_state = [model.initial_point[rv_name] for rv_name in rv_names] init_state_batched = jax.tree_map(lambda x: np.repeat(x[None, ...], chains, axis=0), init_state) init_state_batched_at = [at.as_tensor(v) for v in init_state_batched] nuts_inputs = sorted( (v for v in graph_inputs([model.logpt]) if not isinstance(v, Constant)), key=lambda x: isinstance(x, SharedVariable), ) map_seed = jax.random.split(seed, chains) numpyro_samples = NumPyroNUTS( nuts_inputs, [model.logpt], target_accept=target_accept, draws=draws, tune=tune, chains=chains, seed=map_seed, progress_bar=progress_bar, )(*init_state_batched_at) # Un-transform the transformed variables in JAX sample_outputs = [] for i, (value_var, rv_samples) in enumerate(zip(model.value_vars, numpyro_samples[:-1])): rv = model.values_to_rvs[value_var] transform = getattr(value_var.tag, "transform", None) if transform is not None: untrans_value_var = transform.backward(rv, rv_samples) untrans_value_var.name = rv.name sample_outputs.append(untrans_value_var) if keep_untransformed: rv_samples.name = value_var.name sample_outputs.append(rv_samples) else: rv_samples.name = rv.name sample_outputs.append(rv_samples) print("Compiling...", file=sys.stdout) tic1 = pd.Timestamp.now() _sample = compile_rv_inplace( [], sample_outputs + [numpyro_samples[-1]], allow_input_downcast=True, on_unused_input="ignore", accept_inplace=True, mode="JAX", ) tic2 = pd.Timestamp.now() print("Compilation time = ", tic2 - tic1, file=sys.stdout) print("Sampling...", file=sys.stdout) *mcmc_samples, leapfrogs_taken = _sample() tic3 = pd.Timestamp.now() print("Sampling time = ", tic3 - tic2, file=sys.stdout) posterior = {k.name: v for k, v in zip(sample_outputs, mcmc_samples)} az_trace = az.from_dict(posterior=posterior) return az_trace
py
b4113fa9bd8bce4f54abe215b21204bdf691bd03
from __future__ import absolute_import, division, print_function import numpy as np import math import torch import torch.nn as nn import torch.nn.functional as F import torchvision.transforms as T from utils.block import Double_GRU, ConvBlock, DoubleBlock, Recurrent_unit class Generator_up(nn.Module): def __init__(self, in_ch=3, nf=64, ndown = 5, nintermediate = 1, style_num=0): super(Generator_up, self).__init__() init_layers = [] init_layers +=[ConvBlock(in_ch, nf, kernel_size = 3, stride=1, padding=1, norm = 'none', activation = 'swish')] down_layers = [] for down in range(ndown): down_layers +=[DoubleBlock(nf, nf*2, first_kernel=3, second_kernel=3, stride=1, padding=1, first_norm = 'none', second_norm='none', first_act = 'swish', second_act='swish')] down_layers += [nn.AvgPool2d(2)] nf = nf*2 #inter_layers = [] #for inter in range(nintermediate): # inter_layers += [Recurrent_unit(nf,nf,kernel_size=3, # norm = 'none', act = 'swish')] #inter_layers += [ConvGRUCell(nf, nf, kernel_size=3)] #ConvBlock(nf, nf, kernel_size=3, stride =1, padding=1, norm = 'none', activation = 'swish')] self.inter_layers = Recurrent_unit(nf,nf,kernel_size=3, norm = 'none', act = 'swish') up_layers = [] for up in range(ndown): up_layers += [nn.Upsample(scale_factor=2, mode='bilinear')] up_layers += [DoubleBlock(nf*2, nf//2, first_kernel = 1, second_kernel=3, stride = 1, padding= 1, style_num = style_num, first_norm ='none', second_norm = 'cin', first_act= 'swish', second_act='swish')] nf //=2 self.out_conv = ConvBlock(nf*2, 3, kernel_size = 3, stride = 1, padding = 1, norm = 'none', activation='tanh') #self.final_conv = ConvBlock(6,3, # kernel_size=3, stride=1, padding= 1, # norm = 'none', activation = 'tanh') self.init_layers = nn.Sequential(*init_layers) self.down_layers = nn.Sequential(*down_layers) #self.inter_layers = nn.Sequential(*inter_layers) self.up_layers = nn.Sequential(*up_layers) def forward(self, in_x, style_id = 0, hidden=None) : out = self.init_layers(in_x) #stats = list() down_list = list() down_list.append(out) for down_block in self.down_layers: #out, mean, std = down_block(out) #stats.append((mean,std)) if isinstance(down_block, nn.AvgPool2d): out = down_block(out) down_list.append(out) else: out = down_block(out) #stats.reverse() down_list.reverse() out, hidden = self.inter_layers(out, hidden) i = 0 for up_block in self.up_layers: if isinstance(up_block, nn.Upsample): #beta, gamma = stats[i] #out = out*gamma + beta skip = down_list[i] out = torch.cat([out, skip], 1) out = up_block(out) i += 1 else: out = up_block(out, style_id) out = torch.cat([out,down_list[i]], 1) out = self.out_conv(out) out = (out+in_x)/2 #out = torch.cat([in_x,out], 1) #out = self.final_conv(out) return out, hidden def masking_up(self, prediction, in_x): thresh = 0.95 slope = 4 denorm_x = in_x/2 +0.5 denorm_x = torch.clamp(denorm_x, 0,1) alpha = torch.where(denorm_x>thresh, (1-denorm_x)/slope, (slope-2*(1-thresh))/(slope*(1-2*thresh))*(denorm_x-thresh)+(1-thresh)/slope) alpha = torch.where(denorm_x <(1-thresh), -(alpha-(1-thresh))/slope+(1-thresh/slope), alpha) #prediction = torch.exp(prediction) output = torch.cat([alpha,prediction],1) return output class Generator_down(nn.Module): def __init__(self, in_ch=3, nf=64, ndown = 5, nintermediate=1, style_num=0): super(Generator_down, self).__init__() init_layers = [] init_layers +=[ConvBlock(in_ch, nf, kernel_size = 3, stride=1, padding=1, norm = 'none', activation = 'swish')] down_layers = [] for down in range(ndown): down_layers +=[DoubleBlock(nf, nf*2, first_kernel =3, second_kernel=3, stride=1, padding=1, first_norm = 'none', second_norm ='none', first_act = 'swish', second_act='swish')] down_layers += [nn.AvgPool2d(2)] nf = nf*2 self.inter_layers = Recurrent_unit(nf,nf,kernel_size=3, norm = 'none', act = 'swish') #inter_layers = [] #for inter in range(nintermediate): # inter_layers += [Recurrent_unit(nf,nf,kernel_size=3, norm = 'none', act = 'swish')] #inter_layers += [ConvGRUCell(nf, nf, kernel_size=3)] #ConvBlock(nf, nf, kernel_size=3, stride =1, padding=1, norm = 'none' activation = 'swish')] up_layers = [] for up in range(ndown): up_layers += [nn.Upsample(scale_factor=2, mode='bilinear')] up_layers += [DoubleBlock(nf*2, nf//2, first_kernel = 1, second_kernel=3, stride = 1, padding= 1, style_num = style_num, first_norm = 'none', second_norm = 'cin', first_act = 'swish', second_act = 'swish')] nf //=2 self.out_conv = ConvBlock(nf*2, 3, kernel_size = 3, stride = 1, padding = 1, norm='none', activation='tanh') #self.final_conv = ConvBlock(6,3, # kernel_size=3, stride=1, padding= 1, # norm = 'none', activation = 'tanh') self.init_layers = nn.Sequential(*init_layers) self.down_layers = nn.Sequential(*down_layers) #self.inter_layers = nn.Sequential(*inter_layers) self.up_layers = nn.Sequential(*up_layers) def forward(self, in_x,style_id = 0, hidden = None) : out = self.init_layers(in_x) #stats = list() down_list = list() down_list.append(out) for down_block in self.down_layers: #out, mean, std = down_block(out) #stats.append((mean,std)) if isinstance(down_block, nn.AvgPool2d): out = down_block(out) down_list.append(out) else : out = down_block(out) #stats.reverse() down_list.reverse() out, hidden = self.inter_layers(out, hidden) i = 0 for up_block in self.up_layers: if isinstance(up_block, nn.Upsample): skip = down_list[i] out = torch.cat([out, skip], 1) out = up_block(out) i += 1 else: out = up_block(out, style_id) out = torch.cat([out,down_list[i]], 1) out = self.out_conv(out, style_id) #out = torch.cat([in_x, out], 1) #out = self.final_conv(out) out = (out+in_x)/2 return out, hidden def masking_down(self, prediction, in_x): thresh = 0.05 slope = 4 denorm_x = in_x/2 + 0.5 denorm_x = torch.clamp(denorm_x, 0,1) #alpha,_ = torch.max(denorm_x, dim= 1) alpha = torch.where(denorm_x < thresh, (thresh-denorm_x)/slope, (1-2*thresh/slope)/((1-2*thresh))*(denorm_x-thresh) + thresh/slope) alpha = torch.where(denorm_x> (1-thresh), (alpha-(1-thresh))/slope+(1-thresh/slope), alpha) output = torch.cat([alpha,prediction], 1) return output class discrim(nn.Module): def __init__(self, in_ch=6, nf=64): super(discrim, self).__init__() n_downs = 4 layers= [] layers.append(ConvBlock(in_ch, nf, kernel_size=3, stride=1, padding =1, norm='none', activation='none')) layers += [nn.AvgPool2d(2)] dim = nf for n_down in range(n_downs): layers += [ConvBlock(dim, dim*2, kernel_size = 3, stride=1, padding =1, norm = 'none', activation = 'leaky')] layers += [nn.AvgPool2d(2)] dim = dim*2 self.body_net = nn.Sequential(*layers) self.conv1 = nn.Conv2d(dim, 1, kernel_size = 3, stride =1, padding = 1, bias=False) def forward(self, in_x, pred_ev): in_x = torch.cat([in_x, pred_ev], 1) output = self.body_net(in_x) output = self.conv1(output) return output
py
b411406e84bbfd2cf70a993670714f02b146a01f
# Copyright (c) MONAI Consortium # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .config_item import ComponentLocator, ConfigComponent, ConfigExpression, ConfigItem, Instantiable from .config_parser import ConfigParser from .reference_resolver import ReferenceResolver from .scripts import run from .utils import EXPR_KEY, ID_REF_KEY, ID_SEP_KEY, MACRO_KEY
py
b41141047b326d2f35862743ce46511a0c56d89b
#!/usr/bin/env python # -*- coding: utf8 -*- from app.protocols import ssh from Log import log def get_config(hostname, port, username, password, localpath): """ Downloads config from RouterOS """ session = ssh(hostname, port, username, password) rscfile = '{}.rsc'.format(_tag()) session.instruct('/export file={}'.format(rscfile)) sftp = session.sftp() localpath = '{}{}'.format(localpath, rscfile) try: sftp.get(rscfile, localpath, callback=_check_transfer) except PermissionError as e: log.error(e) session.instruct('/file remove {}'.format(rscfile)) def _check_transfer(got, should): """ Callback for sftp put/get function. """ if got == should: log.info('Good transfer') else: log.info('Broken transfer: {} bytes of {} bytes'.format(got, should)) def _tag(): """ Return random string later used for filename """ import string import random charset = string.ascii_uppercase + string.ascii_lowercase + string.digits return ''.join(random.choice(charset) for each in range(8)) def set_config(hostname, port, username, password, localpath, remotepath): """ Uploads config to RouterOS """ session = ssh(hostname, port, username, password) sftp = session.sftp() sftp.put(localpath, remotepath, callback=_check_transfer) def save_config(): pass
py
b41141e28adbad31ef0a1b118546f0c93d063966
"""Unit tests for the io module.""" # Tests of io are scattered over the test suite: # * test_bufio - tests file buffering # * test_memoryio - tests BytesIO and StringIO # * test_fileio - tests FileIO # * test_file - tests the file interface # * test_io - tests everything else in the io module # * test_univnewlines - tests universal newline support # * test_largefile - tests operations on a file greater than 2**32 bytes # (only enabled with -ulargefile) ################################################################################ # ATTENTION TEST WRITERS!!! ################################################################################ # When writing tests for io, it's important to test both the C and Python # implementations. This is usually done by writing a base test that refers to # the type it is testing as an attribute. Then it provides custom subclasses to # test both implementations. This file has lots of examples. ################################################################################ import abc import array import errno import locale import os import pickle import random import signal import sys import threading import time import unittest import warnings import weakref from collections import deque, UserList from itertools import cycle, count from test import support from test.support.script_helper import assert_python_ok, run_python_until_end from test.support import FakePath import codecs import io # C implementation of io import _pyio as pyio # Python implementation of io try: import ctypes except ImportError: def byteslike(*pos, **kw): return array.array("b", bytes(*pos, **kw)) else: def byteslike(*pos, **kw): """Create a bytes-like object having no string or sequence methods""" data = bytes(*pos, **kw) obj = EmptyStruct() ctypes.resize(obj, len(data)) memoryview(obj).cast("B")[:] = data return obj class EmptyStruct(ctypes.Structure): pass def _default_chunk_size(): """Get the default TextIOWrapper chunk size""" with open(__file__, "r", encoding="latin-1") as f: return f._CHUNK_SIZE class MockRawIOWithoutRead: """A RawIO implementation without read(), so as to exercise the default RawIO.read() which calls readinto().""" def __init__(self, read_stack=()): self._read_stack = list(read_stack) self._write_stack = [] self._reads = 0 self._extraneous_reads = 0 def write(self, b): self._write_stack.append(bytes(b)) return len(b) def writable(self): return True def fileno(self): return 42 def readable(self): return True def seekable(self): return True def seek(self, pos, whence): return 0 # wrong but we gotta return something def tell(self): return 0 # same comment as above def readinto(self, buf): self._reads += 1 max_len = len(buf) try: data = self._read_stack[0] except IndexError: self._extraneous_reads += 1 return 0 if data is None: del self._read_stack[0] return None n = len(data) if len(data) <= max_len: del self._read_stack[0] buf[:n] = data return n else: buf[:] = data[:max_len] self._read_stack[0] = data[max_len:] return max_len def truncate(self, pos=None): return pos class CMockRawIOWithoutRead(MockRawIOWithoutRead, io.RawIOBase): pass class PyMockRawIOWithoutRead(MockRawIOWithoutRead, pyio.RawIOBase): pass class MockRawIO(MockRawIOWithoutRead): def read(self, n=None): self._reads += 1 try: return self._read_stack.pop(0) except: self._extraneous_reads += 1 return b"" class CMockRawIO(MockRawIO, io.RawIOBase): pass class PyMockRawIO(MockRawIO, pyio.RawIOBase): pass class MisbehavedRawIO(MockRawIO): def write(self, b): return super().write(b) * 2 def read(self, n=None): return super().read(n) * 2 def seek(self, pos, whence): return -123 def tell(self): return -456 def readinto(self, buf): super().readinto(buf) return len(buf) * 5 class CMisbehavedRawIO(MisbehavedRawIO, io.RawIOBase): pass class PyMisbehavedRawIO(MisbehavedRawIO, pyio.RawIOBase): pass class SlowFlushRawIO(MockRawIO): def __init__(self): super().__init__() self.in_flush = threading.Event() def flush(self): self.in_flush.set() time.sleep(0.25) class CSlowFlushRawIO(SlowFlushRawIO, io.RawIOBase): pass class PySlowFlushRawIO(SlowFlushRawIO, pyio.RawIOBase): pass class CloseFailureIO(MockRawIO): closed = 0 def close(self): if not self.closed: self.closed = 1 raise OSError class CCloseFailureIO(CloseFailureIO, io.RawIOBase): pass class PyCloseFailureIO(CloseFailureIO, pyio.RawIOBase): pass class MockFileIO: def __init__(self, data): self.read_history = [] super().__init__(data) def read(self, n=None): res = super().read(n) self.read_history.append(None if res is None else len(res)) return res def readinto(self, b): res = super().readinto(b) self.read_history.append(res) return res class CMockFileIO(MockFileIO, io.BytesIO): pass class PyMockFileIO(MockFileIO, pyio.BytesIO): pass class MockUnseekableIO: def seekable(self): return False def seek(self, *args): raise self.UnsupportedOperation("not seekable") def tell(self, *args): raise self.UnsupportedOperation("not seekable") def truncate(self, *args): raise self.UnsupportedOperation("not seekable") class CMockUnseekableIO(MockUnseekableIO, io.BytesIO): UnsupportedOperation = io.UnsupportedOperation class PyMockUnseekableIO(MockUnseekableIO, pyio.BytesIO): UnsupportedOperation = pyio.UnsupportedOperation class MockNonBlockWriterIO: def __init__(self): self._write_stack = [] self._blocker_char = None def pop_written(self): s = b"".join(self._write_stack) self._write_stack[:] = [] return s def block_on(self, char): """Block when a given char is encountered.""" self._blocker_char = char def readable(self): return True def seekable(self): return True def writable(self): return True def write(self, b): b = bytes(b) n = -1 if self._blocker_char: try: n = b.index(self._blocker_char) except ValueError: pass else: if n > 0: # write data up to the first blocker self._write_stack.append(b[:n]) return n else: # cancel blocker and indicate would block self._blocker_char = None return None self._write_stack.append(b) return len(b) class CMockNonBlockWriterIO(MockNonBlockWriterIO, io.RawIOBase): BlockingIOError = io.BlockingIOError class PyMockNonBlockWriterIO(MockNonBlockWriterIO, pyio.RawIOBase): BlockingIOError = pyio.BlockingIOError class IOTest(unittest.TestCase): def setUp(self): support.unlink(support.TESTFN) def tearDown(self): support.unlink(support.TESTFN) def write_ops(self, f): self.assertEqual(f.write(b"blah."), 5) f.truncate(0) self.assertEqual(f.tell(), 5) f.seek(0) self.assertEqual(f.write(b"blah."), 5) self.assertEqual(f.seek(0), 0) self.assertEqual(f.write(b"Hello."), 6) self.assertEqual(f.tell(), 6) self.assertEqual(f.seek(-1, 1), 5) self.assertEqual(f.tell(), 5) buffer = bytearray(b" world\n\n\n") self.assertEqual(f.write(buffer), 9) buffer[:] = b"*" * 9 # Overwrite our copy of the data self.assertEqual(f.seek(0), 0) self.assertEqual(f.write(b"h"), 1) self.assertEqual(f.seek(-1, 2), 13) self.assertEqual(f.tell(), 13) self.assertEqual(f.truncate(12), 12) self.assertEqual(f.tell(), 13) self.assertRaises(TypeError, f.seek, 0.0) def read_ops(self, f, buffered=False): data = f.read(5) self.assertEqual(data, b"hello") data = byteslike(data) self.assertEqual(f.readinto(data), 5) self.assertEqual(bytes(data), b" worl") data = bytearray(5) self.assertEqual(f.readinto(data), 2) self.assertEqual(len(data), 5) self.assertEqual(data[:2], b"d\n") self.assertEqual(f.seek(0), 0) self.assertEqual(f.read(20), b"hello world\n") self.assertEqual(f.read(1), b"") self.assertEqual(f.readinto(byteslike(b"x")), 0) self.assertEqual(f.seek(-6, 2), 6) self.assertEqual(f.read(5), b"world") self.assertEqual(f.read(0), b"") self.assertEqual(f.readinto(byteslike()), 0) self.assertEqual(f.seek(-6, 1), 5) self.assertEqual(f.read(5), b" worl") self.assertEqual(f.tell(), 10) self.assertRaises(TypeError, f.seek, 0.0) if buffered: f.seek(0) self.assertEqual(f.read(), b"hello world\n") f.seek(6) self.assertEqual(f.read(), b"world\n") self.assertEqual(f.read(), b"") f.seek(0) data = byteslike(5) self.assertEqual(f.readinto1(data), 5) self.assertEqual(bytes(data), b"hello") LARGE = 2**31 def large_file_ops(self, f): assert f.readable() assert f.writable() try: self.assertEqual(f.seek(self.LARGE), self.LARGE) except (OverflowError, ValueError): self.skipTest("no largefile support") self.assertEqual(f.tell(), self.LARGE) self.assertEqual(f.write(b"xxx"), 3) self.assertEqual(f.tell(), self.LARGE + 3) self.assertEqual(f.seek(-1, 1), self.LARGE + 2) self.assertEqual(f.truncate(), self.LARGE + 2) self.assertEqual(f.tell(), self.LARGE + 2) self.assertEqual(f.seek(0, 2), self.LARGE + 2) self.assertEqual(f.truncate(self.LARGE + 1), self.LARGE + 1) self.assertEqual(f.tell(), self.LARGE + 2) self.assertEqual(f.seek(0, 2), self.LARGE + 1) self.assertEqual(f.seek(-1, 2), self.LARGE) self.assertEqual(f.read(2), b"x") def test_invalid_operations(self): # Try writing on a file opened in read mode and vice-versa. exc = self.UnsupportedOperation for mode in ("w", "wb"): with self.open(support.TESTFN, mode) as fp: self.assertRaises(exc, fp.read) self.assertRaises(exc, fp.readline) with self.open(support.TESTFN, "wb", buffering=0) as fp: self.assertRaises(exc, fp.read) self.assertRaises(exc, fp.readline) with self.open(support.TESTFN, "rb", buffering=0) as fp: self.assertRaises(exc, fp.write, b"blah") self.assertRaises(exc, fp.writelines, [b"blah\n"]) with self.open(support.TESTFN, "rb") as fp: self.assertRaises(exc, fp.write, b"blah") self.assertRaises(exc, fp.writelines, [b"blah\n"]) with self.open(support.TESTFN, "r") as fp: self.assertRaises(exc, fp.write, "blah") self.assertRaises(exc, fp.writelines, ["blah\n"]) # Non-zero seeking from current or end pos self.assertRaises(exc, fp.seek, 1, self.SEEK_CUR) self.assertRaises(exc, fp.seek, -1, self.SEEK_END) def test_optional_abilities(self): # Test for OSError when optional APIs are not supported # The purpose of this test is to try fileno(), reading, writing and # seeking operations with various objects that indicate they do not # support these operations. def pipe_reader(): [r, w] = os.pipe() os.close(w) # So that read() is harmless return self.FileIO(r, "r") def pipe_writer(): [r, w] = os.pipe() self.addCleanup(os.close, r) # Guarantee that we can write into the pipe without blocking thread = threading.Thread(target=os.read, args=(r, 100)) thread.start() self.addCleanup(thread.join) return self.FileIO(w, "w") def buffered_reader(): return self.BufferedReader(self.MockUnseekableIO()) def buffered_writer(): return self.BufferedWriter(self.MockUnseekableIO()) def buffered_random(): return self.BufferedRandom(self.BytesIO()) def buffered_rw_pair(): return self.BufferedRWPair(self.MockUnseekableIO(), self.MockUnseekableIO()) def text_reader(): class UnseekableReader(self.MockUnseekableIO): writable = self.BufferedIOBase.writable write = self.BufferedIOBase.write return self.TextIOWrapper(UnseekableReader(), "ascii") def text_writer(): class UnseekableWriter(self.MockUnseekableIO): readable = self.BufferedIOBase.readable read = self.BufferedIOBase.read return self.TextIOWrapper(UnseekableWriter(), "ascii") tests = ( (pipe_reader, "fr"), (pipe_writer, "fw"), (buffered_reader, "r"), (buffered_writer, "w"), (buffered_random, "rws"), (buffered_rw_pair, "rw"), (text_reader, "r"), (text_writer, "w"), (self.BytesIO, "rws"), (self.StringIO, "rws"), ) for [test, abilities] in tests: with self.subTest(test), test() as obj: readable = "r" in abilities self.assertEqual(obj.readable(), readable) writable = "w" in abilities self.assertEqual(obj.writable(), writable) if isinstance(obj, self.TextIOBase): data = "3" elif isinstance(obj, (self.BufferedIOBase, self.RawIOBase)): data = b"3" else: self.fail("Unknown base class") if "f" in abilities: obj.fileno() else: self.assertRaises(OSError, obj.fileno) if readable: obj.read(1) obj.read() else: self.assertRaises(OSError, obj.read, 1) self.assertRaises(OSError, obj.read) if writable: obj.write(data) else: self.assertRaises(OSError, obj.write, data) if sys.platform.startswith("win") and test in ( pipe_reader, pipe_writer): # Pipes seem to appear as seekable on Windows continue seekable = "s" in abilities self.assertEqual(obj.seekable(), seekable) if seekable: obj.tell() obj.seek(0) else: self.assertRaises(OSError, obj.tell) self.assertRaises(OSError, obj.seek, 0) if writable and seekable: obj.truncate() obj.truncate(0) else: self.assertRaises(OSError, obj.truncate) self.assertRaises(OSError, obj.truncate, 0) def test_open_handles_NUL_chars(self): fn_with_NUL = 'foo\0bar' self.assertRaises(ValueError, self.open, fn_with_NUL, 'w') bytes_fn = bytes(fn_with_NUL, 'ascii') with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) self.assertRaises(ValueError, self.open, bytes_fn, 'w') def test_raw_file_io(self): with self.open(support.TESTFN, "wb", buffering=0) as f: self.assertEqual(f.readable(), False) self.assertEqual(f.writable(), True) self.assertEqual(f.seekable(), True) self.write_ops(f) with self.open(support.TESTFN, "rb", buffering=0) as f: self.assertEqual(f.readable(), True) self.assertEqual(f.writable(), False) self.assertEqual(f.seekable(), True) self.read_ops(f) def test_buffered_file_io(self): with self.open(support.TESTFN, "wb") as f: self.assertEqual(f.readable(), False) self.assertEqual(f.writable(), True) self.assertEqual(f.seekable(), True) self.write_ops(f) with self.open(support.TESTFN, "rb") as f: self.assertEqual(f.readable(), True) self.assertEqual(f.writable(), False) self.assertEqual(f.seekable(), True) self.read_ops(f, True) def test_readline(self): with self.open(support.TESTFN, "wb") as f: f.write(b"abc\ndef\nxyzzy\nfoo\x00bar\nanother line") with self.open(support.TESTFN, "rb") as f: self.assertEqual(f.readline(), b"abc\n") self.assertEqual(f.readline(10), b"def\n") self.assertEqual(f.readline(2), b"xy") self.assertEqual(f.readline(4), b"zzy\n") self.assertEqual(f.readline(), b"foo\x00bar\n") self.assertEqual(f.readline(None), b"another line") self.assertRaises(TypeError, f.readline, 5.3) with self.open(support.TESTFN, "r") as f: self.assertRaises(TypeError, f.readline, 5.3) def test_readline_nonsizeable(self): # Issue #30061 # Crash when readline() returns an object without __len__ class R(self.IOBase): def readline(self): return None self.assertRaises((TypeError, StopIteration), next, R()) def test_next_nonsizeable(self): # Issue #30061 # Crash when __next__() returns an object without __len__ class R(self.IOBase): def __next__(self): return None self.assertRaises(TypeError, R().readlines, 1) def test_raw_bytes_io(self): f = self.BytesIO() self.write_ops(f) data = f.getvalue() self.assertEqual(data, b"hello world\n") f = self.BytesIO(data) self.read_ops(f, True) def test_large_file_ops(self): # On Windows and Mac OSX this test consumes large resources; It takes # a long time to build the >2 GiB file and takes >2 GiB of disk space # therefore the resource must be enabled to run this test. if sys.platform[:3] == 'win' or sys.platform == 'darwin': support.requires( 'largefile', 'test requires %s bytes and a long time to run' % self.LARGE) with self.open(support.TESTFN, "w+b", 0) as f: self.large_file_ops(f) with self.open(support.TESTFN, "w+b") as f: self.large_file_ops(f) def test_with_open(self): for bufsize in (0, 1, 100): f = None with self.open(support.TESTFN, "wb", bufsize) as f: f.write(b"xxx") self.assertEqual(f.closed, True) f = None try: with self.open(support.TESTFN, "wb", bufsize) as f: 1/0 except ZeroDivisionError: self.assertEqual(f.closed, True) else: self.fail("1/0 didn't raise an exception") # issue 5008 def test_append_mode_tell(self): with self.open(support.TESTFN, "wb") as f: f.write(b"xxx") with self.open(support.TESTFN, "ab", buffering=0) as f: self.assertEqual(f.tell(), 3) with self.open(support.TESTFN, "ab") as f: self.assertEqual(f.tell(), 3) with self.open(support.TESTFN, "a") as f: self.assertGreater(f.tell(), 0) def test_destructor(self): record = [] class MyFileIO(self.FileIO): def __del__(self): record.append(1) try: f = super().__del__ except AttributeError: pass else: f() def close(self): record.append(2) super().close() def flush(self): record.append(3) super().flush() with support.check_warnings(('', ResourceWarning)): f = MyFileIO(support.TESTFN, "wb") f.write(b"xxx") del f support.gc_collect() self.assertEqual(record, [1, 2, 3]) with self.open(support.TESTFN, "rb") as f: self.assertEqual(f.read(), b"xxx") def _check_base_destructor(self, base): record = [] class MyIO(base): def __init__(self): # This exercises the availability of attributes on object # destruction. # (in the C version, close() is called by the tp_dealloc # function, not by __del__) self.on_del = 1 self.on_close = 2 self.on_flush = 3 def __del__(self): record.append(self.on_del) try: f = super().__del__ except AttributeError: pass else: f() def close(self): record.append(self.on_close) super().close() def flush(self): record.append(self.on_flush) super().flush() f = MyIO() del f support.gc_collect() self.assertEqual(record, [1, 2, 3]) def test_IOBase_destructor(self): self._check_base_destructor(self.IOBase) def test_RawIOBase_destructor(self): self._check_base_destructor(self.RawIOBase) def test_BufferedIOBase_destructor(self): self._check_base_destructor(self.BufferedIOBase) def test_TextIOBase_destructor(self): self._check_base_destructor(self.TextIOBase) def test_close_flushes(self): with self.open(support.TESTFN, "wb") as f: f.write(b"xxx") with self.open(support.TESTFN, "rb") as f: self.assertEqual(f.read(), b"xxx") def test_array_writes(self): a = array.array('i', range(10)) n = len(a.tobytes()) def check(f): with f: self.assertEqual(f.write(a), n) f.writelines((a,)) check(self.BytesIO()) check(self.FileIO(support.TESTFN, "w")) check(self.BufferedWriter(self.MockRawIO())) check(self.BufferedRandom(self.MockRawIO())) check(self.BufferedRWPair(self.MockRawIO(), self.MockRawIO())) def test_closefd(self): self.assertRaises(ValueError, self.open, support.TESTFN, 'w', closefd=False) def test_read_closed(self): with self.open(support.TESTFN, "w") as f: f.write("egg\n") with self.open(support.TESTFN, "r") as f: file = self.open(f.fileno(), "r", closefd=False) self.assertEqual(file.read(), "egg\n") file.seek(0) file.close() self.assertRaises(ValueError, file.read) def test_no_closefd_with_filename(self): # can't use closefd in combination with a file name self.assertRaises(ValueError, self.open, support.TESTFN, "r", closefd=False) def test_closefd_attr(self): with self.open(support.TESTFN, "wb") as f: f.write(b"egg\n") with self.open(support.TESTFN, "r") as f: self.assertEqual(f.buffer.raw.closefd, True) file = self.open(f.fileno(), "r", closefd=False) self.assertEqual(file.buffer.raw.closefd, False) def test_garbage_collection(self): # FileIO objects are collected, and collecting them flushes # all data to disk. with support.check_warnings(('', ResourceWarning)): f = self.FileIO(support.TESTFN, "wb") f.write(b"abcxxx") f.f = f wr = weakref.ref(f) del f support.gc_collect() self.assertIsNone(wr(), wr) with self.open(support.TESTFN, "rb") as f: self.assertEqual(f.read(), b"abcxxx") def test_unbounded_file(self): # Issue #1174606: reading from an unbounded stream such as /dev/zero. zero = "/dev/zero" if not os.path.exists(zero): self.skipTest("{0} does not exist".format(zero)) if sys.maxsize > 0x7FFFFFFF: self.skipTest("test can only run in a 32-bit address space") if support.real_max_memuse < support._2G: self.skipTest("test requires at least 2 GiB of memory") with self.open(zero, "rb", buffering=0) as f: self.assertRaises(OverflowError, f.read) with self.open(zero, "rb") as f: self.assertRaises(OverflowError, f.read) with self.open(zero, "r") as f: self.assertRaises(OverflowError, f.read) def check_flush_error_on_close(self, *args, **kwargs): # Test that the file is closed despite failed flush # and that flush() is called before file closed. f = self.open(*args, **kwargs) closed = [] def bad_flush(): closed[:] = [f.closed] raise OSError() f.flush = bad_flush self.assertRaises(OSError, f.close) # exception not swallowed self.assertTrue(f.closed) self.assertTrue(closed) # flush() called self.assertFalse(closed[0]) # flush() called before file closed f.flush = lambda: None # break reference loop def test_flush_error_on_close(self): # raw file # Issue #5700: io.FileIO calls flush() after file closed self.check_flush_error_on_close(support.TESTFN, 'wb', buffering=0) fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) self.check_flush_error_on_close(fd, 'wb', buffering=0) fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) self.check_flush_error_on_close(fd, 'wb', buffering=0, closefd=False) os.close(fd) # buffered io self.check_flush_error_on_close(support.TESTFN, 'wb') fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) self.check_flush_error_on_close(fd, 'wb') fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) self.check_flush_error_on_close(fd, 'wb', closefd=False) os.close(fd) # text io self.check_flush_error_on_close(support.TESTFN, 'w') fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) self.check_flush_error_on_close(fd, 'w') fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT) self.check_flush_error_on_close(fd, 'w', closefd=False) os.close(fd) def test_multi_close(self): f = self.open(support.TESTFN, "wb", buffering=0) f.close() f.close() f.close() self.assertRaises(ValueError, f.flush) def test_RawIOBase_read(self): # Exercise the default limited RawIOBase.read(n) implementation (which # calls readinto() internally). rawio = self.MockRawIOWithoutRead((b"abc", b"d", None, b"efg", None)) self.assertEqual(rawio.read(2), b"ab") self.assertEqual(rawio.read(2), b"c") self.assertEqual(rawio.read(2), b"d") self.assertEqual(rawio.read(2), None) self.assertEqual(rawio.read(2), b"ef") self.assertEqual(rawio.read(2), b"g") self.assertEqual(rawio.read(2), None) self.assertEqual(rawio.read(2), b"") def test_types_have_dict(self): test = ( self.IOBase(), self.RawIOBase(), self.TextIOBase(), self.StringIO(), self.BytesIO() ) for obj in test: self.assertTrue(hasattr(obj, "__dict__")) def test_opener(self): with self.open(support.TESTFN, "w") as f: f.write("egg\n") fd = os.open(support.TESTFN, os.O_RDONLY) def opener(path, flags): return fd with self.open("non-existent", "r", opener=opener) as f: self.assertEqual(f.read(), "egg\n") def test_bad_opener_negative_1(self): # Issue #27066. def badopener(fname, flags): return -1 with self.assertRaises(ValueError) as cm: open('non-existent', 'r', opener=badopener) self.assertEqual(str(cm.exception), 'opener returned -1') def test_bad_opener_other_negative(self): # Issue #27066. def badopener(fname, flags): return -2 with self.assertRaises(ValueError) as cm: open('non-existent', 'r', opener=badopener) self.assertEqual(str(cm.exception), 'opener returned -2') def test_fileio_closefd(self): # Issue #4841 with self.open(__file__, 'rb') as f1, \ self.open(__file__, 'rb') as f2: fileio = self.FileIO(f1.fileno(), closefd=False) # .__init__() must not close f1 fileio.__init__(f2.fileno(), closefd=False) f1.readline() # .close() must not close f2 fileio.close() f2.readline() def test_nonbuffered_textio(self): with support.check_no_resource_warning(self): with self.assertRaises(ValueError): self.open(support.TESTFN, 'w', buffering=0) def test_invalid_newline(self): with support.check_no_resource_warning(self): with self.assertRaises(ValueError): self.open(support.TESTFN, 'w', newline='invalid') def test_buffered_readinto_mixin(self): # Test the implementation provided by BufferedIOBase class Stream(self.BufferedIOBase): def read(self, size): return b"12345" read1 = read stream = Stream() for method in ("readinto", "readinto1"): with self.subTest(method): buffer = byteslike(5) self.assertEqual(getattr(stream, method)(buffer), 5) self.assertEqual(bytes(buffer), b"12345") def test_fspath_support(self): def check_path_succeeds(path): with self.open(path, "w") as f: f.write("egg\n") with self.open(path, "r") as f: self.assertEqual(f.read(), "egg\n") check_path_succeeds(FakePath(support.TESTFN)) check_path_succeeds(FakePath(support.TESTFN.encode('utf-8'))) with self.open(support.TESTFN, "w") as f: bad_path = FakePath(f.fileno()) with self.assertRaises(TypeError): self.open(bad_path, 'w') bad_path = FakePath(None) with self.assertRaises(TypeError): self.open(bad_path, 'w') bad_path = FakePath(FloatingPointError) with self.assertRaises(FloatingPointError): self.open(bad_path, 'w') # ensure that refcounting is correct with some error conditions with self.assertRaisesRegex(ValueError, 'read/write/append mode'): self.open(FakePath(support.TESTFN), 'rwxa') def test_RawIOBase_readall(self): # Exercise the default unlimited RawIOBase.read() and readall() # implementations. rawio = self.MockRawIOWithoutRead((b"abc", b"d", b"efg")) self.assertEqual(rawio.read(), b"abcdefg") rawio = self.MockRawIOWithoutRead((b"abc", b"d", b"efg")) self.assertEqual(rawio.readall(), b"abcdefg") def test_BufferedIOBase_readinto(self): # Exercise the default BufferedIOBase.readinto() and readinto1() # implementations (which call read() or read1() internally). class Reader(self.BufferedIOBase): def __init__(self, avail): self.avail = avail def read(self, size): result = self.avail[:size] self.avail = self.avail[size:] return result def read1(self, size): """Returns no more than 5 bytes at once""" return self.read(min(size, 5)) tests = ( # (test method, total data available, read buffer size, expected # read size) ("readinto", 10, 5, 5), ("readinto", 10, 6, 6), # More than read1() can return ("readinto", 5, 6, 5), # Buffer larger than total available ("readinto", 6, 7, 6), ("readinto", 10, 0, 0), # Empty buffer ("readinto1", 10, 5, 5), # Result limited to single read1() call ("readinto1", 10, 6, 5), # Buffer larger than read1() can return ("readinto1", 5, 6, 5), # Buffer larger than total available ("readinto1", 6, 7, 5), ("readinto1", 10, 0, 0), # Empty buffer ) UNUSED_BYTE = 0x81 for test in tests: with self.subTest(test): method, avail, request, result = test reader = Reader(bytes(range(avail))) buffer = bytearray((UNUSED_BYTE,) * request) method = getattr(reader, method) self.assertEqual(method(buffer), result) self.assertEqual(len(buffer), request) self.assertSequenceEqual(buffer[:result], range(result)) unused = (UNUSED_BYTE,) * (request - result) self.assertSequenceEqual(buffer[result:], unused) self.assertEqual(len(reader.avail), avail - result) def test_close_assert(self): class R(self.IOBase): def __setattr__(self, name, value): pass def flush(self): raise OSError() f = R() # This would cause an assertion failure. self.assertRaises(OSError, f.close) class CIOTest(IOTest): def test_IOBase_finalize(self): # Issue #12149: segmentation fault on _PyIOBase_finalize when both a # class which inherits IOBase and an object of this class are caught # in a reference cycle and close() is already in the method cache. class MyIO(self.IOBase): def close(self): pass # create an instance to populate the method cache MyIO() obj = MyIO() obj.obj = obj wr = weakref.ref(obj) del MyIO del obj support.gc_collect() self.assertIsNone(wr(), wr) class PyIOTest(IOTest): pass @support.cpython_only class APIMismatchTest(unittest.TestCase): def test_RawIOBase_io_in_pyio_match(self): """Test that pyio RawIOBase class has all c RawIOBase methods""" mismatch = support.detect_api_mismatch(pyio.RawIOBase, io.RawIOBase, ignore=('__weakref__',)) self.assertEqual(mismatch, set(), msg='Python RawIOBase does not have all C RawIOBase methods') def test_RawIOBase_pyio_in_io_match(self): """Test that c RawIOBase class has all pyio RawIOBase methods""" mismatch = support.detect_api_mismatch(io.RawIOBase, pyio.RawIOBase) self.assertEqual(mismatch, set(), msg='C RawIOBase does not have all Python RawIOBase methods') class CommonBufferedTests: # Tests common to BufferedReader, BufferedWriter and BufferedRandom def test_detach(self): raw = self.MockRawIO() buf = self.tp(raw) self.assertIs(buf.detach(), raw) self.assertRaises(ValueError, buf.detach) repr(buf) # Should still work def test_fileno(self): rawio = self.MockRawIO() bufio = self.tp(rawio) self.assertEqual(42, bufio.fileno()) def test_invalid_args(self): rawio = self.MockRawIO() bufio = self.tp(rawio) # Invalid whence self.assertRaises(ValueError, bufio.seek, 0, -1) self.assertRaises(ValueError, bufio.seek, 0, 9) def test_override_destructor(self): tp = self.tp record = [] class MyBufferedIO(tp): def __del__(self): record.append(1) try: f = super().__del__ except AttributeError: pass else: f() def close(self): record.append(2) super().close() def flush(self): record.append(3) super().flush() rawio = self.MockRawIO() bufio = MyBufferedIO(rawio) del bufio support.gc_collect() self.assertEqual(record, [1, 2, 3]) def test_context_manager(self): # Test usability as a context manager rawio = self.MockRawIO() bufio = self.tp(rawio) def _with(): with bufio: pass _with() # bufio should now be closed, and using it a second time should raise # a ValueError. self.assertRaises(ValueError, _with) def test_error_through_destructor(self): # Test that the exception state is not modified by a destructor, # even if close() fails. rawio = self.CloseFailureIO() def f(): self.tp(rawio).xyzzy with support.captured_output("stderr") as s: self.assertRaises(AttributeError, f) s = s.getvalue().strip() if s: # The destructor *may* have printed an unraisable error, check it self.assertEqual(len(s.splitlines()), 1) self.assertTrue(s.startswith("Exception OSError: "), s) self.assertTrue(s.endswith(" ignored"), s) def test_repr(self): raw = self.MockRawIO() b = self.tp(raw) clsname = "%s.%s" % (self.tp.__module__, self.tp.__qualname__) self.assertEqual(repr(b), "<%s>" % clsname) raw.name = "dummy" self.assertEqual(repr(b), "<%s name='dummy'>" % clsname) raw.name = b"dummy" self.assertEqual(repr(b), "<%s name=b'dummy'>" % clsname) def test_recursive_repr(self): # Issue #25455 raw = self.MockRawIO() b = self.tp(raw) with support.swap_attr(raw, 'name', b): try: repr(b) # Should not crash except RuntimeError: pass def test_flush_error_on_close(self): # Test that buffered file is closed despite failed flush # and that flush() is called before file closed. raw = self.MockRawIO() closed = [] def bad_flush(): closed[:] = [b.closed, raw.closed] raise OSError() raw.flush = bad_flush b = self.tp(raw) self.assertRaises(OSError, b.close) # exception not swallowed self.assertTrue(b.closed) self.assertTrue(raw.closed) self.assertTrue(closed) # flush() called self.assertFalse(closed[0]) # flush() called before file closed self.assertFalse(closed[1]) raw.flush = lambda: None # break reference loop def test_close_error_on_close(self): raw = self.MockRawIO() def bad_flush(): raise OSError('flush') def bad_close(): raise OSError('close') raw.close = bad_close b = self.tp(raw) b.flush = bad_flush with self.assertRaises(OSError) as err: # exception not swallowed b.close() self.assertEqual(err.exception.args, ('close',)) self.assertIsInstance(err.exception.__context__, OSError) self.assertEqual(err.exception.__context__.args, ('flush',)) self.assertFalse(b.closed) def test_nonnormalized_close_error_on_close(self): # Issue #21677 raw = self.MockRawIO() def bad_flush(): raise non_existing_flush def bad_close(): raise non_existing_close raw.close = bad_close b = self.tp(raw) b.flush = bad_flush with self.assertRaises(NameError) as err: # exception not swallowed b.close() self.assertIn('non_existing_close', str(err.exception)) self.assertIsInstance(err.exception.__context__, NameError) self.assertIn('non_existing_flush', str(err.exception.__context__)) self.assertFalse(b.closed) def test_multi_close(self): raw = self.MockRawIO() b = self.tp(raw) b.close() b.close() b.close() self.assertRaises(ValueError, b.flush) def test_unseekable(self): bufio = self.tp(self.MockUnseekableIO(b"A" * 10)) self.assertRaises(self.UnsupportedOperation, bufio.tell) self.assertRaises(self.UnsupportedOperation, bufio.seek, 0) def test_readonly_attributes(self): raw = self.MockRawIO() buf = self.tp(raw) x = self.MockRawIO() with self.assertRaises(AttributeError): buf.raw = x class SizeofTest: @support.cpython_only def test_sizeof(self): bufsize1 = 4096 bufsize2 = 8192 rawio = self.MockRawIO() bufio = self.tp(rawio, buffer_size=bufsize1) size = sys.getsizeof(bufio) - bufsize1 rawio = self.MockRawIO() bufio = self.tp(rawio, buffer_size=bufsize2) self.assertEqual(sys.getsizeof(bufio), size + bufsize2) @support.cpython_only def test_buffer_freeing(self) : bufsize = 4096 rawio = self.MockRawIO() bufio = self.tp(rawio, buffer_size=bufsize) size = sys.getsizeof(bufio) - bufsize bufio.close() self.assertEqual(sys.getsizeof(bufio), size) class BufferedReaderTest(unittest.TestCase, CommonBufferedTests): read_mode = "rb" def test_constructor(self): rawio = self.MockRawIO([b"abc"]) bufio = self.tp(rawio) bufio.__init__(rawio) bufio.__init__(rawio, buffer_size=1024) bufio.__init__(rawio, buffer_size=16) self.assertEqual(b"abc", bufio.read()) self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0) self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16) self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1) rawio = self.MockRawIO([b"abc"]) bufio.__init__(rawio) self.assertEqual(b"abc", bufio.read()) def test_uninitialized(self): bufio = self.tp.__new__(self.tp) del bufio bufio = self.tp.__new__(self.tp) self.assertRaisesRegex((ValueError, AttributeError), 'uninitialized|has no attribute', bufio.read, 0) bufio.__init__(self.MockRawIO()) self.assertEqual(bufio.read(0), b'') def test_read(self): for arg in (None, 7): rawio = self.MockRawIO((b"abc", b"d", b"efg")) bufio = self.tp(rawio) self.assertEqual(b"abcdefg", bufio.read(arg)) # Invalid args self.assertRaises(ValueError, bufio.read, -2) def test_read1(self): rawio = self.MockRawIO((b"abc", b"d", b"efg")) bufio = self.tp(rawio) self.assertEqual(b"a", bufio.read(1)) self.assertEqual(b"b", bufio.read1(1)) self.assertEqual(rawio._reads, 1) self.assertEqual(b"", bufio.read1(0)) self.assertEqual(b"c", bufio.read1(100)) self.assertEqual(rawio._reads, 1) self.assertEqual(b"d", bufio.read1(100)) self.assertEqual(rawio._reads, 2) self.assertEqual(b"efg", bufio.read1(100)) self.assertEqual(rawio._reads, 3) self.assertEqual(b"", bufio.read1(100)) self.assertEqual(rawio._reads, 4) def test_read1_arbitrary(self): rawio = self.MockRawIO((b"abc", b"d", b"efg")) bufio = self.tp(rawio) self.assertEqual(b"a", bufio.read(1)) self.assertEqual(b"bc", bufio.read1()) self.assertEqual(b"d", bufio.read1()) self.assertEqual(b"efg", bufio.read1(-1)) self.assertEqual(rawio._reads, 3) self.assertEqual(b"", bufio.read1()) self.assertEqual(rawio._reads, 4) def test_readinto(self): rawio = self.MockRawIO((b"abc", b"d", b"efg")) bufio = self.tp(rawio) b = bytearray(2) self.assertEqual(bufio.readinto(b), 2) self.assertEqual(b, b"ab") self.assertEqual(bufio.readinto(b), 2) self.assertEqual(b, b"cd") self.assertEqual(bufio.readinto(b), 2) self.assertEqual(b, b"ef") self.assertEqual(bufio.readinto(b), 1) self.assertEqual(b, b"gf") self.assertEqual(bufio.readinto(b), 0) self.assertEqual(b, b"gf") rawio = self.MockRawIO((b"abc", None)) bufio = self.tp(rawio) self.assertEqual(bufio.readinto(b), 2) self.assertEqual(b, b"ab") self.assertEqual(bufio.readinto(b), 1) self.assertEqual(b, b"cb") def test_readinto1(self): buffer_size = 10 rawio = self.MockRawIO((b"abc", b"de", b"fgh", b"jkl")) bufio = self.tp(rawio, buffer_size=buffer_size) b = bytearray(2) self.assertEqual(bufio.peek(3), b'abc') self.assertEqual(rawio._reads, 1) self.assertEqual(bufio.readinto1(b), 2) self.assertEqual(b, b"ab") self.assertEqual(rawio._reads, 1) self.assertEqual(bufio.readinto1(b), 1) self.assertEqual(b[:1], b"c") self.assertEqual(rawio._reads, 1) self.assertEqual(bufio.readinto1(b), 2) self.assertEqual(b, b"de") self.assertEqual(rawio._reads, 2) b = bytearray(2*buffer_size) self.assertEqual(bufio.peek(3), b'fgh') self.assertEqual(rawio._reads, 3) self.assertEqual(bufio.readinto1(b), 6) self.assertEqual(b[:6], b"fghjkl") self.assertEqual(rawio._reads, 4) def test_readinto_array(self): buffer_size = 60 data = b"a" * 26 rawio = self.MockRawIO((data,)) bufio = self.tp(rawio, buffer_size=buffer_size) # Create an array with element size > 1 byte b = array.array('i', b'x' * 32) assert len(b) != 16 # Read into it. We should get as many *bytes* as we can fit into b # (which is more than the number of elements) n = bufio.readinto(b) self.assertGreater(n, len(b)) # Check that old contents of b are preserved bm = memoryview(b).cast('B') self.assertLess(n, len(bm)) self.assertEqual(bm[:n], data[:n]) self.assertEqual(bm[n:], b'x' * (len(bm[n:]))) def test_readinto1_array(self): buffer_size = 60 data = b"a" * 26 rawio = self.MockRawIO((data,)) bufio = self.tp(rawio, buffer_size=buffer_size) # Create an array with element size > 1 byte b = array.array('i', b'x' * 32) assert len(b) != 16 # Read into it. We should get as many *bytes* as we can fit into b # (which is more than the number of elements) n = bufio.readinto1(b) self.assertGreater(n, len(b)) # Check that old contents of b are preserved bm = memoryview(b).cast('B') self.assertLess(n, len(bm)) self.assertEqual(bm[:n], data[:n]) self.assertEqual(bm[n:], b'x' * (len(bm[n:]))) def test_readlines(self): def bufio(): rawio = self.MockRawIO((b"abc\n", b"d\n", b"ef")) return self.tp(rawio) self.assertEqual(bufio().readlines(), [b"abc\n", b"d\n", b"ef"]) self.assertEqual(bufio().readlines(5), [b"abc\n", b"d\n"]) self.assertEqual(bufio().readlines(None), [b"abc\n", b"d\n", b"ef"]) def test_buffering(self): data = b"abcdefghi" dlen = len(data) tests = [ [ 100, [ 3, 1, 4, 8 ], [ dlen, 0 ] ], [ 100, [ 3, 3, 3], [ dlen ] ], [ 4, [ 1, 2, 4, 2 ], [ 4, 4, 1 ] ], ] for bufsize, buf_read_sizes, raw_read_sizes in tests: rawio = self.MockFileIO(data) bufio = self.tp(rawio, buffer_size=bufsize) pos = 0 for nbytes in buf_read_sizes: self.assertEqual(bufio.read(nbytes), data[pos:pos+nbytes]) pos += nbytes # this is mildly implementation-dependent self.assertEqual(rawio.read_history, raw_read_sizes) def test_read_non_blocking(self): # Inject some None's in there to simulate EWOULDBLOCK rawio = self.MockRawIO((b"abc", b"d", None, b"efg", None, None, None)) bufio = self.tp(rawio) self.assertEqual(b"abcd", bufio.read(6)) self.assertEqual(b"e", bufio.read(1)) self.assertEqual(b"fg", bufio.read()) self.assertEqual(b"", bufio.peek(1)) self.assertIsNone(bufio.read()) self.assertEqual(b"", bufio.read()) rawio = self.MockRawIO((b"a", None, None)) self.assertEqual(b"a", rawio.readall()) self.assertIsNone(rawio.readall()) def test_read_past_eof(self): rawio = self.MockRawIO((b"abc", b"d", b"efg")) bufio = self.tp(rawio) self.assertEqual(b"abcdefg", bufio.read(9000)) def test_read_all(self): rawio = self.MockRawIO((b"abc", b"d", b"efg")) bufio = self.tp(rawio) self.assertEqual(b"abcdefg", bufio.read()) @support.requires_resource('cpu') def test_threads(self): try: # Write out many bytes with exactly the same number of 0's, # 1's... 255's. This will help us check that concurrent reading # doesn't duplicate or forget contents. N = 1000 l = list(range(256)) * N random.shuffle(l) s = bytes(bytearray(l)) with self.open(support.TESTFN, "wb") as f: f.write(s) with self.open(support.TESTFN, self.read_mode, buffering=0) as raw: bufio = self.tp(raw, 8) errors = [] results = [] def f(): try: # Intra-buffer read then buffer-flushing read for n in cycle([1, 19]): s = bufio.read(n) if not s: break # list.append() is atomic results.append(s) except Exception as e: errors.append(e) raise threads = [threading.Thread(target=f) for x in range(20)] with support.start_threads(threads): time.sleep(0.02) # yield self.assertFalse(errors, "the following exceptions were caught: %r" % errors) s = b''.join(results) for i in range(256): c = bytes(bytearray([i])) self.assertEqual(s.count(c), N) finally: support.unlink(support.TESTFN) def test_unseekable(self): bufio = self.tp(self.MockUnseekableIO(b"A" * 10)) self.assertRaises(self.UnsupportedOperation, bufio.tell) self.assertRaises(self.UnsupportedOperation, bufio.seek, 0) bufio.read(1) self.assertRaises(self.UnsupportedOperation, bufio.seek, 0) self.assertRaises(self.UnsupportedOperation, bufio.tell) def test_misbehaved_io(self): rawio = self.MisbehavedRawIO((b"abc", b"d", b"efg")) bufio = self.tp(rawio) self.assertRaises(OSError, bufio.seek, 0) self.assertRaises(OSError, bufio.tell) def test_no_extraneous_read(self): # Issue #9550; when the raw IO object has satisfied the read request, # we should not issue any additional reads, otherwise it may block # (e.g. socket). bufsize = 16 for n in (2, bufsize - 1, bufsize, bufsize + 1, bufsize * 2): rawio = self.MockRawIO([b"x" * n]) bufio = self.tp(rawio, bufsize) self.assertEqual(bufio.read(n), b"x" * n) # Simple case: one raw read is enough to satisfy the request. self.assertEqual(rawio._extraneous_reads, 0, "failed for {}: {} != 0".format(n, rawio._extraneous_reads)) # A more complex case where two raw reads are needed to satisfy # the request. rawio = self.MockRawIO([b"x" * (n - 1), b"x"]) bufio = self.tp(rawio, bufsize) self.assertEqual(bufio.read(n), b"x" * n) self.assertEqual(rawio._extraneous_reads, 0, "failed for {}: {} != 0".format(n, rawio._extraneous_reads)) def test_read_on_closed(self): # Issue #23796 b = io.BufferedReader(io.BytesIO(b"12")) b.read(1) b.close() self.assertRaises(ValueError, b.peek) self.assertRaises(ValueError, b.read1, 1) class CBufferedReaderTest(BufferedReaderTest, SizeofTest): tp = io.BufferedReader def test_constructor(self): BufferedReaderTest.test_constructor(self) # The allocation can succeed on 32-bit builds, e.g. with more # than 2 GiB RAM and a 64-bit kernel. if sys.maxsize > 0x7FFFFFFF: rawio = self.MockRawIO() bufio = self.tp(rawio) self.assertRaises((OverflowError, MemoryError, ValueError), bufio.__init__, rawio, sys.maxsize) def test_initialization(self): rawio = self.MockRawIO([b"abc"]) bufio = self.tp(rawio) self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0) self.assertRaises(ValueError, bufio.read) self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16) self.assertRaises(ValueError, bufio.read) self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1) self.assertRaises(ValueError, bufio.read) def test_misbehaved_io_read(self): rawio = self.MisbehavedRawIO((b"abc", b"d", b"efg")) bufio = self.tp(rawio) # _pyio.BufferedReader seems to implement reading different, so that # checking this is not so easy. self.assertRaises(OSError, bufio.read, 10) def test_garbage_collection(self): # C BufferedReader objects are collected. # The Python version has __del__, so it ends into gc.garbage instead self.addCleanup(support.unlink, support.TESTFN) with support.check_warnings(('', ResourceWarning)): rawio = self.FileIO(support.TESTFN, "w+b") f = self.tp(rawio) f.f = f wr = weakref.ref(f) del f support.gc_collect() self.assertIsNone(wr(), wr) def test_args_error(self): # Issue #17275 with self.assertRaisesRegex(TypeError, "BufferedReader"): self.tp(io.BytesIO(), 1024, 1024, 1024) class PyBufferedReaderTest(BufferedReaderTest): tp = pyio.BufferedReader class BufferedWriterTest(unittest.TestCase, CommonBufferedTests): write_mode = "wb" def test_constructor(self): rawio = self.MockRawIO() bufio = self.tp(rawio) bufio.__init__(rawio) bufio.__init__(rawio, buffer_size=1024) bufio.__init__(rawio, buffer_size=16) self.assertEqual(3, bufio.write(b"abc")) bufio.flush() self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0) self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16) self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1) bufio.__init__(rawio) self.assertEqual(3, bufio.write(b"ghi")) bufio.flush() self.assertEqual(b"".join(rawio._write_stack), b"abcghi") def test_uninitialized(self): bufio = self.tp.__new__(self.tp) del bufio bufio = self.tp.__new__(self.tp) self.assertRaisesRegex((ValueError, AttributeError), 'uninitialized|has no attribute', bufio.write, b'') bufio.__init__(self.MockRawIO()) self.assertEqual(bufio.write(b''), 0) def test_detach_flush(self): raw = self.MockRawIO() buf = self.tp(raw) buf.write(b"howdy!") self.assertFalse(raw._write_stack) buf.detach() self.assertEqual(raw._write_stack, [b"howdy!"]) def test_write(self): # Write to the buffered IO but don't overflow the buffer. writer = self.MockRawIO() bufio = self.tp(writer, 8) bufio.write(b"abc") self.assertFalse(writer._write_stack) buffer = bytearray(b"def") bufio.write(buffer) buffer[:] = b"***" # Overwrite our copy of the data bufio.flush() self.assertEqual(b"".join(writer._write_stack), b"abcdef") def test_write_overflow(self): writer = self.MockRawIO() bufio = self.tp(writer, 8) contents = b"abcdefghijklmnop" for n in range(0, len(contents), 3): bufio.write(contents[n:n+3]) flushed = b"".join(writer._write_stack) # At least (total - 8) bytes were implicitly flushed, perhaps more # depending on the implementation. self.assertTrue(flushed.startswith(contents[:-8]), flushed) def check_writes(self, intermediate_func): # Lots of writes, test the flushed output is as expected. contents = bytes(range(256)) * 1000 n = 0 writer = self.MockRawIO() bufio = self.tp(writer, 13) # Generator of write sizes: repeat each N 15 times then proceed to N+1 def gen_sizes(): for size in count(1): for i in range(15): yield size sizes = gen_sizes() while n < len(contents): size = min(next(sizes), len(contents) - n) self.assertEqual(bufio.write(contents[n:n+size]), size) intermediate_func(bufio) n += size bufio.flush() self.assertEqual(contents, b"".join(writer._write_stack)) def test_writes(self): self.check_writes(lambda bufio: None) def test_writes_and_flushes(self): self.check_writes(lambda bufio: bufio.flush()) def test_writes_and_seeks(self): def _seekabs(bufio): pos = bufio.tell() bufio.seek(pos + 1, 0) bufio.seek(pos - 1, 0) bufio.seek(pos, 0) self.check_writes(_seekabs) def _seekrel(bufio): pos = bufio.seek(0, 1) bufio.seek(+1, 1) bufio.seek(-1, 1) bufio.seek(pos, 0) self.check_writes(_seekrel) def test_writes_and_truncates(self): self.check_writes(lambda bufio: bufio.truncate(bufio.tell())) def test_write_non_blocking(self): raw = self.MockNonBlockWriterIO() bufio = self.tp(raw, 8) self.assertEqual(bufio.write(b"abcd"), 4) self.assertEqual(bufio.write(b"efghi"), 5) # 1 byte will be written, the rest will be buffered raw.block_on(b"k") self.assertEqual(bufio.write(b"jklmn"), 5) # 8 bytes will be written, 8 will be buffered and the rest will be lost raw.block_on(b"0") try: bufio.write(b"opqrwxyz0123456789") except self.BlockingIOError as e: written = e.characters_written else: self.fail("BlockingIOError should have been raised") self.assertEqual(written, 16) self.assertEqual(raw.pop_written(), b"abcdefghijklmnopqrwxyz") self.assertEqual(bufio.write(b"ABCDEFGHI"), 9) s = raw.pop_written() # Previously buffered bytes were flushed self.assertTrue(s.startswith(b"01234567A"), s) def test_write_and_rewind(self): raw = io.BytesIO() bufio = self.tp(raw, 4) self.assertEqual(bufio.write(b"abcdef"), 6) self.assertEqual(bufio.tell(), 6) bufio.seek(0, 0) self.assertEqual(bufio.write(b"XY"), 2) bufio.seek(6, 0) self.assertEqual(raw.getvalue(), b"XYcdef") self.assertEqual(bufio.write(b"123456"), 6) bufio.flush() self.assertEqual(raw.getvalue(), b"XYcdef123456") def test_flush(self): writer = self.MockRawIO() bufio = self.tp(writer, 8) bufio.write(b"abc") bufio.flush() self.assertEqual(b"abc", writer._write_stack[0]) def test_writelines(self): l = [b'ab', b'cd', b'ef'] writer = self.MockRawIO() bufio = self.tp(writer, 8) bufio.writelines(l) bufio.flush() self.assertEqual(b''.join(writer._write_stack), b'abcdef') def test_writelines_userlist(self): l = UserList([b'ab', b'cd', b'ef']) writer = self.MockRawIO() bufio = self.tp(writer, 8) bufio.writelines(l) bufio.flush() self.assertEqual(b''.join(writer._write_stack), b'abcdef') def test_writelines_error(self): writer = self.MockRawIO() bufio = self.tp(writer, 8) self.assertRaises(TypeError, bufio.writelines, [1, 2, 3]) self.assertRaises(TypeError, bufio.writelines, None) self.assertRaises(TypeError, bufio.writelines, 'abc') def test_destructor(self): writer = self.MockRawIO() bufio = self.tp(writer, 8) bufio.write(b"abc") del bufio support.gc_collect() self.assertEqual(b"abc", writer._write_stack[0]) def test_truncate(self): # Truncate implicitly flushes the buffer. self.addCleanup(support.unlink, support.TESTFN) with self.open(support.TESTFN, self.write_mode, buffering=0) as raw: bufio = self.tp(raw, 8) bufio.write(b"abcdef") self.assertEqual(bufio.truncate(3), 3) self.assertEqual(bufio.tell(), 6) with self.open(support.TESTFN, "rb", buffering=0) as f: self.assertEqual(f.read(), b"abc") def test_truncate_after_write(self): # Ensure that truncate preserves the file position after # writes longer than the buffer size. # Issue: https://bugs.python.org/issue32228 self.addCleanup(support.unlink, support.TESTFN) with self.open(support.TESTFN, "wb") as f: # Fill with some buffer f.write(b'\x00' * 10000) buffer_sizes = [8192, 4096, 200] for buffer_size in buffer_sizes: with self.open(support.TESTFN, "r+b", buffering=buffer_size) as f: f.write(b'\x00' * (buffer_size + 1)) # After write write_pos and write_end are set to 0 f.read(1) # read operation makes sure that pos != raw_pos f.truncate() self.assertEqual(f.tell(), buffer_size + 2) @support.requires_resource('cpu') def test_threads(self): try: # Write out many bytes from many threads and test they were # all flushed. N = 1000 contents = bytes(range(256)) * N sizes = cycle([1, 19]) n = 0 queue = deque() while n < len(contents): size = next(sizes) queue.append(contents[n:n+size]) n += size del contents # We use a real file object because it allows us to # exercise situations where the GIL is released before # writing the buffer to the raw streams. This is in addition # to concurrency issues due to switching threads in the middle # of Python code. with self.open(support.TESTFN, self.write_mode, buffering=0) as raw: bufio = self.tp(raw, 8) errors = [] def f(): try: while True: try: s = queue.popleft() except IndexError: return bufio.write(s) except Exception as e: errors.append(e) raise threads = [threading.Thread(target=f) for x in range(20)] with support.start_threads(threads): time.sleep(0.02) # yield self.assertFalse(errors, "the following exceptions were caught: %r" % errors) bufio.close() with self.open(support.TESTFN, "rb") as f: s = f.read() for i in range(256): self.assertEqual(s.count(bytes([i])), N) finally: support.unlink(support.TESTFN) def test_misbehaved_io(self): rawio = self.MisbehavedRawIO() bufio = self.tp(rawio, 5) self.assertRaises(OSError, bufio.seek, 0) self.assertRaises(OSError, bufio.tell) self.assertRaises(OSError, bufio.write, b"abcdef") def test_max_buffer_size_removal(self): with self.assertRaises(TypeError): self.tp(self.MockRawIO(), 8, 12) def test_write_error_on_close(self): raw = self.MockRawIO() def bad_write(b): raise OSError() raw.write = bad_write b = self.tp(raw) b.write(b'spam') self.assertRaises(OSError, b.close) # exception not swallowed self.assertTrue(b.closed) def test_slow_close_from_thread(self): # Issue #31976 rawio = self.SlowFlushRawIO() bufio = self.tp(rawio, 8) t = threading.Thread(target=bufio.close) t.start() rawio.in_flush.wait() self.assertRaises(ValueError, bufio.write, b'spam') self.assertTrue(bufio.closed) t.join() class CBufferedWriterTest(BufferedWriterTest, SizeofTest): tp = io.BufferedWriter def test_constructor(self): BufferedWriterTest.test_constructor(self) # The allocation can succeed on 32-bit builds, e.g. with more # than 2 GiB RAM and a 64-bit kernel. if sys.maxsize > 0x7FFFFFFF: rawio = self.MockRawIO() bufio = self.tp(rawio) self.assertRaises((OverflowError, MemoryError, ValueError), bufio.__init__, rawio, sys.maxsize) def test_initialization(self): rawio = self.MockRawIO() bufio = self.tp(rawio) self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0) self.assertRaises(ValueError, bufio.write, b"def") self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16) self.assertRaises(ValueError, bufio.write, b"def") self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1) self.assertRaises(ValueError, bufio.write, b"def") def test_garbage_collection(self): # C BufferedWriter objects are collected, and collecting them flushes # all data to disk. # The Python version has __del__, so it ends into gc.garbage instead self.addCleanup(support.unlink, support.TESTFN) with support.check_warnings(('', ResourceWarning)): rawio = self.FileIO(support.TESTFN, "w+b") f = self.tp(rawio) f.write(b"123xxx") f.x = f wr = weakref.ref(f) del f support.gc_collect() self.assertIsNone(wr(), wr) with self.open(support.TESTFN, "rb") as f: self.assertEqual(f.read(), b"123xxx") def test_args_error(self): # Issue #17275 with self.assertRaisesRegex(TypeError, "BufferedWriter"): self.tp(io.BytesIO(), 1024, 1024, 1024) class PyBufferedWriterTest(BufferedWriterTest): tp = pyio.BufferedWriter class BufferedRWPairTest(unittest.TestCase): def test_constructor(self): pair = self.tp(self.MockRawIO(), self.MockRawIO()) self.assertFalse(pair.closed) def test_uninitialized(self): pair = self.tp.__new__(self.tp) del pair pair = self.tp.__new__(self.tp) self.assertRaisesRegex((ValueError, AttributeError), 'uninitialized|has no attribute', pair.read, 0) self.assertRaisesRegex((ValueError, AttributeError), 'uninitialized|has no attribute', pair.write, b'') pair.__init__(self.MockRawIO(), self.MockRawIO()) self.assertEqual(pair.read(0), b'') self.assertEqual(pair.write(b''), 0) def test_detach(self): pair = self.tp(self.MockRawIO(), self.MockRawIO()) self.assertRaises(self.UnsupportedOperation, pair.detach) def test_constructor_max_buffer_size_removal(self): with self.assertRaises(TypeError): self.tp(self.MockRawIO(), self.MockRawIO(), 8, 12) def test_constructor_with_not_readable(self): class NotReadable(MockRawIO): def readable(self): return False self.assertRaises(OSError, self.tp, NotReadable(), self.MockRawIO()) def test_constructor_with_not_writeable(self): class NotWriteable(MockRawIO): def writable(self): return False self.assertRaises(OSError, self.tp, self.MockRawIO(), NotWriteable()) def test_read(self): pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO()) self.assertEqual(pair.read(3), b"abc") self.assertEqual(pair.read(1), b"d") self.assertEqual(pair.read(), b"ef") pair = self.tp(self.BytesIO(b"abc"), self.MockRawIO()) self.assertEqual(pair.read(None), b"abc") def test_readlines(self): pair = lambda: self.tp(self.BytesIO(b"abc\ndef\nh"), self.MockRawIO()) self.assertEqual(pair().readlines(), [b"abc\n", b"def\n", b"h"]) self.assertEqual(pair().readlines(), [b"abc\n", b"def\n", b"h"]) self.assertEqual(pair().readlines(5), [b"abc\n", b"def\n"]) def test_read1(self): # .read1() is delegated to the underlying reader object, so this test # can be shallow. pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO()) self.assertEqual(pair.read1(3), b"abc") self.assertEqual(pair.read1(), b"def") def test_readinto(self): for method in ("readinto", "readinto1"): with self.subTest(method): pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO()) data = byteslike(b'\0' * 5) self.assertEqual(getattr(pair, method)(data), 5) self.assertEqual(bytes(data), b"abcde") def test_write(self): w = self.MockRawIO() pair = self.tp(self.MockRawIO(), w) pair.write(b"abc") pair.flush() buffer = bytearray(b"def") pair.write(buffer) buffer[:] = b"***" # Overwrite our copy of the data pair.flush() self.assertEqual(w._write_stack, [b"abc", b"def"]) def test_peek(self): pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO()) self.assertTrue(pair.peek(3).startswith(b"abc")) self.assertEqual(pair.read(3), b"abc") def test_readable(self): pair = self.tp(self.MockRawIO(), self.MockRawIO()) self.assertTrue(pair.readable()) def test_writeable(self): pair = self.tp(self.MockRawIO(), self.MockRawIO()) self.assertTrue(pair.writable()) def test_seekable(self): # BufferedRWPairs are never seekable, even if their readers and writers # are. pair = self.tp(self.MockRawIO(), self.MockRawIO()) self.assertFalse(pair.seekable()) # .flush() is delegated to the underlying writer object and has been # tested in the test_write method. def test_close_and_closed(self): pair = self.tp(self.MockRawIO(), self.MockRawIO()) self.assertFalse(pair.closed) pair.close() self.assertTrue(pair.closed) def test_reader_close_error_on_close(self): def reader_close(): reader_non_existing reader = self.MockRawIO() reader.close = reader_close writer = self.MockRawIO() pair = self.tp(reader, writer) with self.assertRaises(NameError) as err: pair.close() self.assertIn('reader_non_existing', str(err.exception)) self.assertTrue(pair.closed) self.assertFalse(reader.closed) self.assertTrue(writer.closed) def test_writer_close_error_on_close(self): def writer_close(): writer_non_existing reader = self.MockRawIO() writer = self.MockRawIO() writer.close = writer_close pair = self.tp(reader, writer) with self.assertRaises(NameError) as err: pair.close() self.assertIn('writer_non_existing', str(err.exception)) self.assertFalse(pair.closed) self.assertTrue(reader.closed) self.assertFalse(writer.closed) def test_reader_writer_close_error_on_close(self): def reader_close(): reader_non_existing def writer_close(): writer_non_existing reader = self.MockRawIO() reader.close = reader_close writer = self.MockRawIO() writer.close = writer_close pair = self.tp(reader, writer) with self.assertRaises(NameError) as err: pair.close() self.assertIn('reader_non_existing', str(err.exception)) self.assertIsInstance(err.exception.__context__, NameError) self.assertIn('writer_non_existing', str(err.exception.__context__)) self.assertFalse(pair.closed) self.assertFalse(reader.closed) self.assertFalse(writer.closed) def test_isatty(self): class SelectableIsAtty(MockRawIO): def __init__(self, isatty): MockRawIO.__init__(self) self._isatty = isatty def isatty(self): return self._isatty pair = self.tp(SelectableIsAtty(False), SelectableIsAtty(False)) self.assertFalse(pair.isatty()) pair = self.tp(SelectableIsAtty(True), SelectableIsAtty(False)) self.assertTrue(pair.isatty()) pair = self.tp(SelectableIsAtty(False), SelectableIsAtty(True)) self.assertTrue(pair.isatty()) pair = self.tp(SelectableIsAtty(True), SelectableIsAtty(True)) self.assertTrue(pair.isatty()) def test_weakref_clearing(self): brw = self.tp(self.MockRawIO(), self.MockRawIO()) ref = weakref.ref(brw) brw = None ref = None # Shouldn't segfault. class CBufferedRWPairTest(BufferedRWPairTest): tp = io.BufferedRWPair class PyBufferedRWPairTest(BufferedRWPairTest): tp = pyio.BufferedRWPair class BufferedRandomTest(BufferedReaderTest, BufferedWriterTest): read_mode = "rb+" write_mode = "wb+" def test_constructor(self): BufferedReaderTest.test_constructor(self) BufferedWriterTest.test_constructor(self) def test_uninitialized(self): BufferedReaderTest.test_uninitialized(self) BufferedWriterTest.test_uninitialized(self) def test_read_and_write(self): raw = self.MockRawIO((b"asdf", b"ghjk")) rw = self.tp(raw, 8) self.assertEqual(b"as", rw.read(2)) rw.write(b"ddd") rw.write(b"eee") self.assertFalse(raw._write_stack) # Buffer writes self.assertEqual(b"ghjk", rw.read()) self.assertEqual(b"dddeee", raw._write_stack[0]) def test_seek_and_tell(self): raw = self.BytesIO(b"asdfghjkl") rw = self.tp(raw) self.assertEqual(b"as", rw.read(2)) self.assertEqual(2, rw.tell()) rw.seek(0, 0) self.assertEqual(b"asdf", rw.read(4)) rw.write(b"123f") rw.seek(0, 0) self.assertEqual(b"asdf123fl", rw.read()) self.assertEqual(9, rw.tell()) rw.seek(-4, 2) self.assertEqual(5, rw.tell()) rw.seek(2, 1) self.assertEqual(7, rw.tell()) self.assertEqual(b"fl", rw.read(11)) rw.flush() self.assertEqual(b"asdf123fl", raw.getvalue()) self.assertRaises(TypeError, rw.seek, 0.0) def check_flush_and_read(self, read_func): raw = self.BytesIO(b"abcdefghi") bufio = self.tp(raw) self.assertEqual(b"ab", read_func(bufio, 2)) bufio.write(b"12") self.assertEqual(b"ef", read_func(bufio, 2)) self.assertEqual(6, bufio.tell()) bufio.flush() self.assertEqual(6, bufio.tell()) self.assertEqual(b"ghi", read_func(bufio)) raw.seek(0, 0) raw.write(b"XYZ") # flush() resets the read buffer bufio.flush() bufio.seek(0, 0) self.assertEqual(b"XYZ", read_func(bufio, 3)) def test_flush_and_read(self): self.check_flush_and_read(lambda bufio, *args: bufio.read(*args)) def test_flush_and_readinto(self): def _readinto(bufio, n=-1): b = bytearray(n if n >= 0 else 9999) n = bufio.readinto(b) return bytes(b[:n]) self.check_flush_and_read(_readinto) def test_flush_and_peek(self): def _peek(bufio, n=-1): # This relies on the fact that the buffer can contain the whole # raw stream, otherwise peek() can return less. b = bufio.peek(n) if n != -1: b = b[:n] bufio.seek(len(b), 1) return b self.check_flush_and_read(_peek) def test_flush_and_write(self): raw = self.BytesIO(b"abcdefghi") bufio = self.tp(raw) bufio.write(b"123") bufio.flush() bufio.write(b"45") bufio.flush() bufio.seek(0, 0) self.assertEqual(b"12345fghi", raw.getvalue()) self.assertEqual(b"12345fghi", bufio.read()) def test_threads(self): BufferedReaderTest.test_threads(self) BufferedWriterTest.test_threads(self) def test_writes_and_peek(self): def _peek(bufio): bufio.peek(1) self.check_writes(_peek) def _peek(bufio): pos = bufio.tell() bufio.seek(-1, 1) bufio.peek(1) bufio.seek(pos, 0) self.check_writes(_peek) def test_writes_and_reads(self): def _read(bufio): bufio.seek(-1, 1) bufio.read(1) self.check_writes(_read) def test_writes_and_read1s(self): def _read1(bufio): bufio.seek(-1, 1) bufio.read1(1) self.check_writes(_read1) def test_writes_and_readintos(self): def _read(bufio): bufio.seek(-1, 1) bufio.readinto(bytearray(1)) self.check_writes(_read) def test_write_after_readahead(self): # Issue #6629: writing after the buffer was filled by readahead should # first rewind the raw stream. for overwrite_size in [1, 5]: raw = self.BytesIO(b"A" * 10) bufio = self.tp(raw, 4) # Trigger readahead self.assertEqual(bufio.read(1), b"A") self.assertEqual(bufio.tell(), 1) # Overwriting should rewind the raw stream if it needs so bufio.write(b"B" * overwrite_size) self.assertEqual(bufio.tell(), overwrite_size + 1) # If the write size was smaller than the buffer size, flush() and # check that rewind happens. bufio.flush() self.assertEqual(bufio.tell(), overwrite_size + 1) s = raw.getvalue() self.assertEqual(s, b"A" + b"B" * overwrite_size + b"A" * (9 - overwrite_size)) def test_write_rewind_write(self): # Various combinations of reading / writing / seeking backwards / writing again def mutate(bufio, pos1, pos2): assert pos2 >= pos1 # Fill the buffer bufio.seek(pos1) bufio.read(pos2 - pos1) bufio.write(b'\x02') # This writes earlier than the previous write, but still inside # the buffer. bufio.seek(pos1) bufio.write(b'\x01') b = b"\x80\x81\x82\x83\x84" for i in range(0, len(b)): for j in range(i, len(b)): raw = self.BytesIO(b) bufio = self.tp(raw, 100) mutate(bufio, i, j) bufio.flush() expected = bytearray(b) expected[j] = 2 expected[i] = 1 self.assertEqual(raw.getvalue(), expected, "failed result for i=%d, j=%d" % (i, j)) def test_truncate_after_read_or_write(self): raw = self.BytesIO(b"A" * 10) bufio = self.tp(raw, 100) self.assertEqual(bufio.read(2), b"AA") # the read buffer gets filled self.assertEqual(bufio.truncate(), 2) self.assertEqual(bufio.write(b"BB"), 2) # the write buffer increases self.assertEqual(bufio.truncate(), 4) def test_misbehaved_io(self): BufferedReaderTest.test_misbehaved_io(self) BufferedWriterTest.test_misbehaved_io(self) def test_interleaved_read_write(self): # Test for issue #12213 with self.BytesIO(b'abcdefgh') as raw: with self.tp(raw, 100) as f: f.write(b"1") self.assertEqual(f.read(1), b'b') f.write(b'2') self.assertEqual(f.read1(1), b'd') f.write(b'3') buf = bytearray(1) f.readinto(buf) self.assertEqual(buf, b'f') f.write(b'4') self.assertEqual(f.peek(1), b'h') f.flush() self.assertEqual(raw.getvalue(), b'1b2d3f4h') with self.BytesIO(b'abc') as raw: with self.tp(raw, 100) as f: self.assertEqual(f.read(1), b'a') f.write(b"2") self.assertEqual(f.read(1), b'c') f.flush() self.assertEqual(raw.getvalue(), b'a2c') def test_interleaved_readline_write(self): with self.BytesIO(b'ab\ncdef\ng\n') as raw: with self.tp(raw) as f: f.write(b'1') self.assertEqual(f.readline(), b'b\n') f.write(b'2') self.assertEqual(f.readline(), b'def\n') f.write(b'3') self.assertEqual(f.readline(), b'\n') f.flush() self.assertEqual(raw.getvalue(), b'1b\n2def\n3\n') # You can't construct a BufferedRandom over a non-seekable stream. test_unseekable = None class CBufferedRandomTest(BufferedRandomTest, SizeofTest): tp = io.BufferedRandom def test_constructor(self): BufferedRandomTest.test_constructor(self) # The allocation can succeed on 32-bit builds, e.g. with more # than 2 GiB RAM and a 64-bit kernel. if sys.maxsize > 0x7FFFFFFF: rawio = self.MockRawIO() bufio = self.tp(rawio) self.assertRaises((OverflowError, MemoryError, ValueError), bufio.__init__, rawio, sys.maxsize) def test_garbage_collection(self): CBufferedReaderTest.test_garbage_collection(self) CBufferedWriterTest.test_garbage_collection(self) def test_args_error(self): # Issue #17275 with self.assertRaisesRegex(TypeError, "BufferedRandom"): self.tp(io.BytesIO(), 1024, 1024, 1024) class PyBufferedRandomTest(BufferedRandomTest): tp = pyio.BufferedRandom # To fully exercise seek/tell, the StatefulIncrementalDecoder has these # properties: # - A single output character can correspond to many bytes of input. # - The number of input bytes to complete the character can be # undetermined until the last input byte is received. # - The number of input bytes can vary depending on previous input. # - A single input byte can correspond to many characters of output. # - The number of output characters can be undetermined until the # last input byte is received. # - The number of output characters can vary depending on previous input. class StatefulIncrementalDecoder(codecs.IncrementalDecoder): """ For testing seek/tell behavior with a stateful, buffering decoder. Input is a sequence of words. Words may be fixed-length (length set by input) or variable-length (period-terminated). In variable-length mode, extra periods are ignored. Possible words are: - 'i' followed by a number sets the input length, I (maximum 99). When I is set to 0, words are space-terminated. - 'o' followed by a number sets the output length, O (maximum 99). - Any other word is converted into a word followed by a period on the output. The output word consists of the input word truncated or padded out with hyphens to make its length equal to O. If O is 0, the word is output verbatim without truncating or padding. I and O are initially set to 1. When I changes, any buffered input is re-scanned according to the new I. EOF also terminates the last word. """ def __init__(self, errors='strict'): codecs.IncrementalDecoder.__init__(self, errors) self.reset() def __repr__(self): return '<SID %x>' % id(self) def reset(self): self.i = 1 self.o = 1 self.buffer = bytearray() def getstate(self): i, o = self.i ^ 1, self.o ^ 1 # so that flags = 0 after reset() return bytes(self.buffer), i*100 + o def setstate(self, state): buffer, io = state self.buffer = bytearray(buffer) i, o = divmod(io, 100) self.i, self.o = i ^ 1, o ^ 1 def decode(self, input, final=False): output = '' for b in input: if self.i == 0: # variable-length, terminated with period if b == ord('.'): if self.buffer: output += self.process_word() else: self.buffer.append(b) else: # fixed-length, terminate after self.i bytes self.buffer.append(b) if len(self.buffer) == self.i: output += self.process_word() if final and self.buffer: # EOF terminates the last word output += self.process_word() return output def process_word(self): output = '' if self.buffer[0] == ord('i'): self.i = min(99, int(self.buffer[1:] or 0)) # set input length elif self.buffer[0] == ord('o'): self.o = min(99, int(self.buffer[1:] or 0)) # set output length else: output = self.buffer.decode('ascii') if len(output) < self.o: output += '-'*self.o # pad out with hyphens if self.o: output = output[:self.o] # truncate to output length output += '.' self.buffer = bytearray() return output codecEnabled = False @classmethod def lookupTestDecoder(cls, name): if cls.codecEnabled and name == 'test_decoder': latin1 = codecs.lookup('latin-1') return codecs.CodecInfo( name='test_decoder', encode=latin1.encode, decode=None, incrementalencoder=None, streamreader=None, streamwriter=None, incrementaldecoder=cls) # Register the previous decoder for testing. # Disabled by default, tests will enable it. codecs.register(StatefulIncrementalDecoder.lookupTestDecoder) class StatefulIncrementalDecoderTest(unittest.TestCase): """ Make sure the StatefulIncrementalDecoder actually works. """ test_cases = [ # I=1, O=1 (fixed-length input == fixed-length output) (b'abcd', False, 'a.b.c.d.'), # I=0, O=0 (variable-length input, variable-length output) (b'oiabcd', True, 'abcd.'), # I=0, O=0 (should ignore extra periods) (b'oi...abcd...', True, 'abcd.'), # I=0, O=6 (variable-length input, fixed-length output) (b'i.o6.x.xyz.toolongtofit.', False, 'x-----.xyz---.toolon.'), # I=2, O=6 (fixed-length input < fixed-length output) (b'i.i2.o6xyz', True, 'xy----.z-----.'), # I=6, O=3 (fixed-length input > fixed-length output) (b'i.o3.i6.abcdefghijklmnop', True, 'abc.ghi.mno.'), # I=0, then 3; O=29, then 15 (with longer output) (b'i.o29.a.b.cde.o15.abcdefghijabcdefghij.i3.a.b.c.d.ei00k.l.m', True, 'a----------------------------.' + 'b----------------------------.' + 'cde--------------------------.' + 'abcdefghijabcde.' + 'a.b------------.' + '.c.------------.' + 'd.e------------.' + 'k--------------.' + 'l--------------.' + 'm--------------.') ] def test_decoder(self): # Try a few one-shot test cases. for input, eof, output in self.test_cases: d = StatefulIncrementalDecoder() self.assertEqual(d.decode(input, eof), output) # Also test an unfinished decode, followed by forcing EOF. d = StatefulIncrementalDecoder() self.assertEqual(d.decode(b'oiabcd'), '') self.assertEqual(d.decode(b'', 1), 'abcd.') class TextIOWrapperTest(unittest.TestCase): def setUp(self): self.testdata = b"AAA\r\nBBB\rCCC\r\nDDD\nEEE\r\n" self.normalized = b"AAA\nBBB\nCCC\nDDD\nEEE\n".decode("ascii") support.unlink(support.TESTFN) def tearDown(self): support.unlink(support.TESTFN) def test_constructor(self): r = self.BytesIO(b"\xc3\xa9\n\n") b = self.BufferedReader(r, 1000) t = self.TextIOWrapper(b) t.__init__(b, encoding="latin-1", newline="\r\n") self.assertEqual(t.encoding, "latin-1") self.assertEqual(t.line_buffering, False) t.__init__(b, encoding="utf-8", line_buffering=True) self.assertEqual(t.encoding, "utf-8") self.assertEqual(t.line_buffering, True) self.assertEqual("\xe9\n", t.readline()) self.assertRaises(TypeError, t.__init__, b, newline=42) self.assertRaises(ValueError, t.__init__, b, newline='xyzzy') def test_uninitialized(self): t = self.TextIOWrapper.__new__(self.TextIOWrapper) del t t = self.TextIOWrapper.__new__(self.TextIOWrapper) self.assertRaises(Exception, repr, t) self.assertRaisesRegex((ValueError, AttributeError), 'uninitialized|has no attribute', t.read, 0) t.__init__(self.MockRawIO()) self.assertEqual(t.read(0), '') def test_non_text_encoding_codecs_are_rejected(self): # Ensure the constructor complains if passed a codec that isn't # marked as a text encoding # http://bugs.python.org/issue20404 r = self.BytesIO() b = self.BufferedWriter(r) with self.assertRaisesRegex(LookupError, "is not a text encoding"): self.TextIOWrapper(b, encoding="hex") def test_detach(self): r = self.BytesIO() b = self.BufferedWriter(r) t = self.TextIOWrapper(b) self.assertIs(t.detach(), b) t = self.TextIOWrapper(b, encoding="ascii") t.write("howdy") self.assertFalse(r.getvalue()) t.detach() self.assertEqual(r.getvalue(), b"howdy") self.assertRaises(ValueError, t.detach) # Operations independent of the detached stream should still work repr(t) self.assertEqual(t.encoding, "ascii") self.assertEqual(t.errors, "strict") self.assertFalse(t.line_buffering) self.assertFalse(t.write_through) def test_repr(self): raw = self.BytesIO("hello".encode("utf-8")) b = self.BufferedReader(raw) t = self.TextIOWrapper(b, encoding="utf-8") modname = self.TextIOWrapper.__module__ self.assertEqual(repr(t), "<%s.TextIOWrapper encoding='utf-8'>" % modname) raw.name = "dummy" self.assertEqual(repr(t), "<%s.TextIOWrapper name='dummy' encoding='utf-8'>" % modname) t.mode = "r" self.assertEqual(repr(t), "<%s.TextIOWrapper name='dummy' mode='r' encoding='utf-8'>" % modname) raw.name = b"dummy" self.assertEqual(repr(t), "<%s.TextIOWrapper name=b'dummy' mode='r' encoding='utf-8'>" % modname) t.buffer.detach() repr(t) # Should not raise an exception def test_recursive_repr(self): # Issue #25455 raw = self.BytesIO() t = self.TextIOWrapper(raw) with support.swap_attr(raw, 'name', t): try: repr(t) # Should not crash except RuntimeError: pass def test_line_buffering(self): r = self.BytesIO() b = self.BufferedWriter(r, 1000) t = self.TextIOWrapper(b, newline="\n", line_buffering=True) t.write("X") self.assertEqual(r.getvalue(), b"") # No flush happened t.write("Y\nZ") self.assertEqual(r.getvalue(), b"XY\nZ") # All got flushed t.write("A\rB") self.assertEqual(r.getvalue(), b"XY\nZA\rB") def test_reconfigure_line_buffering(self): r = self.BytesIO() b = self.BufferedWriter(r, 1000) t = self.TextIOWrapper(b, newline="\n", line_buffering=False) t.write("AB\nC") self.assertEqual(r.getvalue(), b"") t.reconfigure(line_buffering=True) # implicit flush self.assertEqual(r.getvalue(), b"AB\nC") t.write("DEF\nG") self.assertEqual(r.getvalue(), b"AB\nCDEF\nG") t.write("H") self.assertEqual(r.getvalue(), b"AB\nCDEF\nG") t.reconfigure(line_buffering=False) # implicit flush self.assertEqual(r.getvalue(), b"AB\nCDEF\nGH") t.write("IJ") self.assertEqual(r.getvalue(), b"AB\nCDEF\nGH") # Keeping default value t.reconfigure() t.reconfigure(line_buffering=None) self.assertEqual(t.line_buffering, False) t.reconfigure(line_buffering=True) t.reconfigure() t.reconfigure(line_buffering=None) self.assertEqual(t.line_buffering, True) @unittest.skipIf(sys.flags.utf8_mode, "utf-8 mode is enabled") def test_default_encoding(self): old_environ = dict(os.environ) try: # try to get a user preferred encoding different than the current # locale encoding to check that TextIOWrapper() uses the current # locale encoding and not the user preferred encoding for key in ('LC_ALL', 'LANG', 'LC_CTYPE'): if key in os.environ: del os.environ[key] current_locale_encoding = locale.getpreferredencoding(False) b = self.BytesIO() t = self.TextIOWrapper(b) self.assertEqual(t.encoding, current_locale_encoding) finally: os.environ.clear() os.environ.update(old_environ) @support.cpython_only @unittest.skipIf(sys.flags.utf8_mode, "utf-8 mode is enabled") def test_device_encoding(self): # Issue 15989 import _testcapi b = self.BytesIO() b.fileno = lambda: _testcapi.INT_MAX + 1 self.assertRaises(OverflowError, self.TextIOWrapper, b) b.fileno = lambda: _testcapi.UINT_MAX + 1 self.assertRaises(OverflowError, self.TextIOWrapper, b) def test_encoding(self): # Check the encoding attribute is always set, and valid b = self.BytesIO() t = self.TextIOWrapper(b, encoding="utf-8") self.assertEqual(t.encoding, "utf-8") t = self.TextIOWrapper(b) self.assertIsNotNone(t.encoding) codecs.lookup(t.encoding) def test_encoding_errors_reading(self): # (1) default b = self.BytesIO(b"abc\n\xff\n") t = self.TextIOWrapper(b, encoding="ascii") self.assertRaises(UnicodeError, t.read) # (2) explicit strict b = self.BytesIO(b"abc\n\xff\n") t = self.TextIOWrapper(b, encoding="ascii", errors="strict") self.assertRaises(UnicodeError, t.read) # (3) ignore b = self.BytesIO(b"abc\n\xff\n") t = self.TextIOWrapper(b, encoding="ascii", errors="ignore") self.assertEqual(t.read(), "abc\n\n") # (4) replace b = self.BytesIO(b"abc\n\xff\n") t = self.TextIOWrapper(b, encoding="ascii", errors="replace") self.assertEqual(t.read(), "abc\n\ufffd\n") def test_encoding_errors_writing(self): # (1) default b = self.BytesIO() t = self.TextIOWrapper(b, encoding="ascii") self.assertRaises(UnicodeError, t.write, "\xff") # (2) explicit strict b = self.BytesIO() t = self.TextIOWrapper(b, encoding="ascii", errors="strict") self.assertRaises(UnicodeError, t.write, "\xff") # (3) ignore b = self.BytesIO() t = self.TextIOWrapper(b, encoding="ascii", errors="ignore", newline="\n") t.write("abc\xffdef\n") t.flush() self.assertEqual(b.getvalue(), b"abcdef\n") # (4) replace b = self.BytesIO() t = self.TextIOWrapper(b, encoding="ascii", errors="replace", newline="\n") t.write("abc\xffdef\n") t.flush() self.assertEqual(b.getvalue(), b"abc?def\n") def test_newlines(self): input_lines = [ "unix\n", "windows\r\n", "os9\r", "last\n", "nonl" ] tests = [ [ None, [ 'unix\n', 'windows\n', 'os9\n', 'last\n', 'nonl' ] ], [ '', input_lines ], [ '\n', [ "unix\n", "windows\r\n", "os9\rlast\n", "nonl" ] ], [ '\r\n', [ "unix\nwindows\r\n", "os9\rlast\nnonl" ] ], [ '\r', [ "unix\nwindows\r", "\nos9\r", "last\nnonl" ] ], ] encodings = ( 'utf-8', 'latin-1', 'utf-16', 'utf-16-le', 'utf-16-be', 'utf-32', 'utf-32-le', 'utf-32-be', ) # Try a range of buffer sizes to test the case where \r is the last # character in TextIOWrapper._pending_line. for encoding in encodings: # XXX: str.encode() should return bytes data = bytes(''.join(input_lines).encode(encoding)) for do_reads in (False, True): for bufsize in range(1, 10): for newline, exp_lines in tests: bufio = self.BufferedReader(self.BytesIO(data), bufsize) textio = self.TextIOWrapper(bufio, newline=newline, encoding=encoding) if do_reads: got_lines = [] while True: c2 = textio.read(2) if c2 == '': break self.assertEqual(len(c2), 2) got_lines.append(c2 + textio.readline()) else: got_lines = list(textio) for got_line, exp_line in zip(got_lines, exp_lines): self.assertEqual(got_line, exp_line) self.assertEqual(len(got_lines), len(exp_lines)) def test_newlines_input(self): testdata = b"AAA\nBB\x00B\nCCC\rDDD\rEEE\r\nFFF\r\nGGG" normalized = testdata.replace(b"\r\n", b"\n").replace(b"\r", b"\n") for newline, expected in [ (None, normalized.decode("ascii").splitlines(keepends=True)), ("", testdata.decode("ascii").splitlines(keepends=True)), ("\n", ["AAA\n", "BB\x00B\n", "CCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]), ("\r\n", ["AAA\nBB\x00B\nCCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]), ("\r", ["AAA\nBB\x00B\nCCC\r", "DDD\r", "EEE\r", "\nFFF\r", "\nGGG"]), ]: buf = self.BytesIO(testdata) txt = self.TextIOWrapper(buf, encoding="ascii", newline=newline) self.assertEqual(txt.readlines(), expected) txt.seek(0) self.assertEqual(txt.read(), "".join(expected)) def test_newlines_output(self): testdict = { "": b"AAA\nBBB\nCCC\nX\rY\r\nZ", "\n": b"AAA\nBBB\nCCC\nX\rY\r\nZ", "\r": b"AAA\rBBB\rCCC\rX\rY\r\rZ", "\r\n": b"AAA\r\nBBB\r\nCCC\r\nX\rY\r\r\nZ", } tests = [(None, testdict[os.linesep])] + sorted(testdict.items()) for newline, expected in tests: buf = self.BytesIO() txt = self.TextIOWrapper(buf, encoding="ascii", newline=newline) txt.write("AAA\nB") txt.write("BB\nCCC\n") txt.write("X\rY\r\nZ") txt.flush() self.assertEqual(buf.closed, False) self.assertEqual(buf.getvalue(), expected) def test_destructor(self): l = [] base = self.BytesIO class MyBytesIO(base): def close(self): l.append(self.getvalue()) base.close(self) b = MyBytesIO() t = self.TextIOWrapper(b, encoding="ascii") t.write("abc") del t support.gc_collect() self.assertEqual([b"abc"], l) def test_override_destructor(self): record = [] class MyTextIO(self.TextIOWrapper): def __del__(self): record.append(1) try: f = super().__del__ except AttributeError: pass else: f() def close(self): record.append(2) super().close() def flush(self): record.append(3) super().flush() b = self.BytesIO() t = MyTextIO(b, encoding="ascii") del t support.gc_collect() self.assertEqual(record, [1, 2, 3]) def test_error_through_destructor(self): # Test that the exception state is not modified by a destructor, # even if close() fails. rawio = self.CloseFailureIO() def f(): self.TextIOWrapper(rawio).xyzzy with support.captured_output("stderr") as s: self.assertRaises(AttributeError, f) s = s.getvalue().strip() if s: # The destructor *may* have printed an unraisable error, check it self.assertEqual(len(s.splitlines()), 1) self.assertTrue(s.startswith("Exception OSError: "), s) self.assertTrue(s.endswith(" ignored"), s) # Systematic tests of the text I/O API def test_basic_io(self): for chunksize in (1, 2, 3, 4, 5, 15, 16, 17, 31, 32, 33, 63, 64, 65): for enc in "ascii", "latin-1", "utf-8" :# , "utf-16-be", "utf-16-le": f = self.open(support.TESTFN, "w+", encoding=enc) f._CHUNK_SIZE = chunksize self.assertEqual(f.write("abc"), 3) f.close() f = self.open(support.TESTFN, "r+", encoding=enc) f._CHUNK_SIZE = chunksize self.assertEqual(f.tell(), 0) self.assertEqual(f.read(), "abc") cookie = f.tell() self.assertEqual(f.seek(0), 0) self.assertEqual(f.read(None), "abc") f.seek(0) self.assertEqual(f.read(2), "ab") self.assertEqual(f.read(1), "c") self.assertEqual(f.read(1), "") self.assertEqual(f.read(), "") self.assertEqual(f.tell(), cookie) self.assertEqual(f.seek(0), 0) self.assertEqual(f.seek(0, 2), cookie) self.assertEqual(f.write("def"), 3) self.assertEqual(f.seek(cookie), cookie) self.assertEqual(f.read(), "def") if enc.startswith("utf"): self.multi_line_test(f, enc) f.close() def multi_line_test(self, f, enc): f.seek(0) f.truncate() sample = "s\xff\u0fff\uffff" wlines = [] for size in (0, 1, 2, 3, 4, 5, 30, 31, 32, 33, 62, 63, 64, 65, 1000): chars = [] for i in range(size): chars.append(sample[i % len(sample)]) line = "".join(chars) + "\n" wlines.append((f.tell(), line)) f.write(line) f.seek(0) rlines = [] while True: pos = f.tell() line = f.readline() if not line: break rlines.append((pos, line)) self.assertEqual(rlines, wlines) def test_telling(self): f = self.open(support.TESTFN, "w+", encoding="utf-8") p0 = f.tell() f.write("\xff\n") p1 = f.tell() f.write("\xff\n") p2 = f.tell() f.seek(0) self.assertEqual(f.tell(), p0) self.assertEqual(f.readline(), "\xff\n") self.assertEqual(f.tell(), p1) self.assertEqual(f.readline(), "\xff\n") self.assertEqual(f.tell(), p2) f.seek(0) for line in f: self.assertEqual(line, "\xff\n") self.assertRaises(OSError, f.tell) self.assertEqual(f.tell(), p2) f.close() def test_seeking(self): chunk_size = _default_chunk_size() prefix_size = chunk_size - 2 u_prefix = "a" * prefix_size prefix = bytes(u_prefix.encode("utf-8")) self.assertEqual(len(u_prefix), len(prefix)) u_suffix = "\u8888\n" suffix = bytes(u_suffix.encode("utf-8")) line = prefix + suffix with self.open(support.TESTFN, "wb") as f: f.write(line*2) with self.open(support.TESTFN, "r", encoding="utf-8") as f: s = f.read(prefix_size) self.assertEqual(s, str(prefix, "ascii")) self.assertEqual(f.tell(), prefix_size) self.assertEqual(f.readline(), u_suffix) def test_seeking_too(self): # Regression test for a specific bug data = b'\xe0\xbf\xbf\n' with self.open(support.TESTFN, "wb") as f: f.write(data) with self.open(support.TESTFN, "r", encoding="utf-8") as f: f._CHUNK_SIZE # Just test that it exists f._CHUNK_SIZE = 2 f.readline() f.tell() def test_seek_and_tell(self): #Test seek/tell using the StatefulIncrementalDecoder. # Make test faster by doing smaller seeks CHUNK_SIZE = 128 def test_seek_and_tell_with_data(data, min_pos=0): """Tell/seek to various points within a data stream and ensure that the decoded data returned by read() is consistent.""" f = self.open(support.TESTFN, 'wb') f.write(data) f.close() f = self.open(support.TESTFN, encoding='test_decoder') f._CHUNK_SIZE = CHUNK_SIZE decoded = f.read() f.close() for i in range(min_pos, len(decoded) + 1): # seek positions for j in [1, 5, len(decoded) - i]: # read lengths f = self.open(support.TESTFN, encoding='test_decoder') self.assertEqual(f.read(i), decoded[:i]) cookie = f.tell() self.assertEqual(f.read(j), decoded[i:i + j]) f.seek(cookie) self.assertEqual(f.read(), decoded[i:]) f.close() # Enable the test decoder. StatefulIncrementalDecoder.codecEnabled = 1 # Run the tests. try: # Try each test case. for input, _, _ in StatefulIncrementalDecoderTest.test_cases: test_seek_and_tell_with_data(input) # Position each test case so that it crosses a chunk boundary. for input, _, _ in StatefulIncrementalDecoderTest.test_cases: offset = CHUNK_SIZE - len(input)//2 prefix = b'.'*offset # Don't bother seeking into the prefix (takes too long). min_pos = offset*2 test_seek_and_tell_with_data(prefix + input, min_pos) # Ensure our test decoder won't interfere with subsequent tests. finally: StatefulIncrementalDecoder.codecEnabled = 0 def test_encoded_writes(self): data = "1234567890" tests = ("utf-16", "utf-16-le", "utf-16-be", "utf-32", "utf-32-le", "utf-32-be") for encoding in tests: buf = self.BytesIO() f = self.TextIOWrapper(buf, encoding=encoding) # Check if the BOM is written only once (see issue1753). f.write(data) f.write(data) f.seek(0) self.assertEqual(f.read(), data * 2) f.seek(0) self.assertEqual(f.read(), data * 2) self.assertEqual(buf.getvalue(), (data * 2).encode(encoding)) def test_unreadable(self): class UnReadable(self.BytesIO): def readable(self): return False txt = self.TextIOWrapper(UnReadable()) self.assertRaises(OSError, txt.read) def test_read_one_by_one(self): txt = self.TextIOWrapper(self.BytesIO(b"AA\r\nBB")) reads = "" while True: c = txt.read(1) if not c: break reads += c self.assertEqual(reads, "AA\nBB") def test_readlines(self): txt = self.TextIOWrapper(self.BytesIO(b"AA\nBB\nCC")) self.assertEqual(txt.readlines(), ["AA\n", "BB\n", "CC"]) txt.seek(0) self.assertEqual(txt.readlines(None), ["AA\n", "BB\n", "CC"]) txt.seek(0) self.assertEqual(txt.readlines(5), ["AA\n", "BB\n"]) # read in amounts equal to TextIOWrapper._CHUNK_SIZE which is 128. def test_read_by_chunk(self): # make sure "\r\n" straddles 128 char boundary. txt = self.TextIOWrapper(self.BytesIO(b"A" * 127 + b"\r\nB")) reads = "" while True: c = txt.read(128) if not c: break reads += c self.assertEqual(reads, "A"*127+"\nB") def test_writelines(self): l = ['ab', 'cd', 'ef'] buf = self.BytesIO() txt = self.TextIOWrapper(buf) txt.writelines(l) txt.flush() self.assertEqual(buf.getvalue(), b'abcdef') def test_writelines_userlist(self): l = UserList(['ab', 'cd', 'ef']) buf = self.BytesIO() txt = self.TextIOWrapper(buf) txt.writelines(l) txt.flush() self.assertEqual(buf.getvalue(), b'abcdef') def test_writelines_error(self): txt = self.TextIOWrapper(self.BytesIO()) self.assertRaises(TypeError, txt.writelines, [1, 2, 3]) self.assertRaises(TypeError, txt.writelines, None) self.assertRaises(TypeError, txt.writelines, b'abc') def test_issue1395_1(self): txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") # read one char at a time reads = "" while True: c = txt.read(1) if not c: break reads += c self.assertEqual(reads, self.normalized) def test_issue1395_2(self): txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") txt._CHUNK_SIZE = 4 reads = "" while True: c = txt.read(4) if not c: break reads += c self.assertEqual(reads, self.normalized) def test_issue1395_3(self): txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") txt._CHUNK_SIZE = 4 reads = txt.read(4) reads += txt.read(4) reads += txt.readline() reads += txt.readline() reads += txt.readline() self.assertEqual(reads, self.normalized) def test_issue1395_4(self): txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") txt._CHUNK_SIZE = 4 reads = txt.read(4) reads += txt.read() self.assertEqual(reads, self.normalized) def test_issue1395_5(self): txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") txt._CHUNK_SIZE = 4 reads = txt.read(4) pos = txt.tell() txt.seek(0) txt.seek(pos) self.assertEqual(txt.read(4), "BBB\n") def test_issue2282(self): buffer = self.BytesIO(self.testdata) txt = self.TextIOWrapper(buffer, encoding="ascii") self.assertEqual(buffer.seekable(), txt.seekable()) def test_append_bom(self): # The BOM is not written again when appending to a non-empty file filename = support.TESTFN for charset in ('utf-8-sig', 'utf-16', 'utf-32'): with self.open(filename, 'w', encoding=charset) as f: f.write('aaa') pos = f.tell() with self.open(filename, 'rb') as f: self.assertEqual(f.read(), 'aaa'.encode(charset)) with self.open(filename, 'a', encoding=charset) as f: f.write('xxx') with self.open(filename, 'rb') as f: self.assertEqual(f.read(), 'aaaxxx'.encode(charset)) def test_seek_bom(self): # Same test, but when seeking manually filename = support.TESTFN for charset in ('utf-8-sig', 'utf-16', 'utf-32'): with self.open(filename, 'w', encoding=charset) as f: f.write('aaa') pos = f.tell() with self.open(filename, 'r+', encoding=charset) as f: f.seek(pos) f.write('zzz') f.seek(0) f.write('bbb') with self.open(filename, 'rb') as f: self.assertEqual(f.read(), 'bbbzzz'.encode(charset)) def test_seek_append_bom(self): # Same test, but first seek to the start and then to the end filename = support.TESTFN for charset in ('utf-8-sig', 'utf-16', 'utf-32'): with self.open(filename, 'w', encoding=charset) as f: f.write('aaa') with self.open(filename, 'a', encoding=charset) as f: f.seek(0) f.seek(0, self.SEEK_END) f.write('xxx') with self.open(filename, 'rb') as f: self.assertEqual(f.read(), 'aaaxxx'.encode(charset)) def test_errors_property(self): with self.open(support.TESTFN, "w") as f: self.assertEqual(f.errors, "strict") with self.open(support.TESTFN, "w", errors="replace") as f: self.assertEqual(f.errors, "replace") @support.no_tracing def test_threads_write(self): # Issue6750: concurrent writes could duplicate data event = threading.Event() with self.open(support.TESTFN, "w", buffering=1) as f: def run(n): text = "Thread%03d\n" % n event.wait() f.write(text) threads = [threading.Thread(target=run, args=(x,)) for x in range(20)] with support.start_threads(threads, event.set): time.sleep(0.02) with self.open(support.TESTFN) as f: content = f.read() for n in range(20): self.assertEqual(content.count("Thread%03d\n" % n), 1) def test_flush_error_on_close(self): # Test that text file is closed despite failed flush # and that flush() is called before file closed. txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") closed = [] def bad_flush(): closed[:] = [txt.closed, txt.buffer.closed] raise OSError() txt.flush = bad_flush self.assertRaises(OSError, txt.close) # exception not swallowed self.assertTrue(txt.closed) self.assertTrue(txt.buffer.closed) self.assertTrue(closed) # flush() called self.assertFalse(closed[0]) # flush() called before file closed self.assertFalse(closed[1]) txt.flush = lambda: None # break reference loop def test_close_error_on_close(self): buffer = self.BytesIO(self.testdata) def bad_flush(): raise OSError('flush') def bad_close(): raise OSError('close') buffer.close = bad_close txt = self.TextIOWrapper(buffer, encoding="ascii") txt.flush = bad_flush with self.assertRaises(OSError) as err: # exception not swallowed txt.close() self.assertEqual(err.exception.args, ('close',)) self.assertIsInstance(err.exception.__context__, OSError) self.assertEqual(err.exception.__context__.args, ('flush',)) self.assertFalse(txt.closed) def test_nonnormalized_close_error_on_close(self): # Issue #21677 buffer = self.BytesIO(self.testdata) def bad_flush(): raise non_existing_flush def bad_close(): raise non_existing_close buffer.close = bad_close txt = self.TextIOWrapper(buffer, encoding="ascii") txt.flush = bad_flush with self.assertRaises(NameError) as err: # exception not swallowed txt.close() self.assertIn('non_existing_close', str(err.exception)) self.assertIsInstance(err.exception.__context__, NameError) self.assertIn('non_existing_flush', str(err.exception.__context__)) self.assertFalse(txt.closed) def test_multi_close(self): txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") txt.close() txt.close() txt.close() self.assertRaises(ValueError, txt.flush) def test_unseekable(self): txt = self.TextIOWrapper(self.MockUnseekableIO(self.testdata)) self.assertRaises(self.UnsupportedOperation, txt.tell) self.assertRaises(self.UnsupportedOperation, txt.seek, 0) def test_readonly_attributes(self): txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") buf = self.BytesIO(self.testdata) with self.assertRaises(AttributeError): txt.buffer = buf def test_rawio(self): # Issue #12591: TextIOWrapper must work with raw I/O objects, so # that subprocess.Popen() can have the required unbuffered # semantics with universal_newlines=True. raw = self.MockRawIO([b'abc', b'def', b'ghi\njkl\nopq\n']) txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n') # Reads self.assertEqual(txt.read(4), 'abcd') self.assertEqual(txt.readline(), 'efghi\n') self.assertEqual(list(txt), ['jkl\n', 'opq\n']) def test_rawio_write_through(self): # Issue #12591: with write_through=True, writes don't need a flush raw = self.MockRawIO([b'abc', b'def', b'ghi\njkl\nopq\n']) txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n', write_through=True) txt.write('1') txt.write('23\n4') txt.write('5') self.assertEqual(b''.join(raw._write_stack), b'123\n45') def test_bufio_write_through(self): # Issue #21396: write_through=True doesn't force a flush() # on the underlying binary buffered object. flush_called, write_called = [], [] class BufferedWriter(self.BufferedWriter): def flush(self, *args, **kwargs): flush_called.append(True) return super().flush(*args, **kwargs) def write(self, *args, **kwargs): write_called.append(True) return super().write(*args, **kwargs) rawio = self.BytesIO() data = b"a" bufio = BufferedWriter(rawio, len(data)*2) textio = self.TextIOWrapper(bufio, encoding='ascii', write_through=True) # write to the buffered io but don't overflow the buffer text = data.decode('ascii') textio.write(text) # buffer.flush is not called with write_through=True self.assertFalse(flush_called) # buffer.write *is* called with write_through=True self.assertTrue(write_called) self.assertEqual(rawio.getvalue(), b"") # no flush write_called = [] # reset textio.write(text * 10) # total content is larger than bufio buffer self.assertTrue(write_called) self.assertEqual(rawio.getvalue(), data * 11) # all flushed def test_reconfigure_write_through(self): raw = self.MockRawIO([]) t = self.TextIOWrapper(raw, encoding='ascii', newline='\n') t.write('1') t.reconfigure(write_through=True) # implied flush self.assertEqual(t.write_through, True) self.assertEqual(b''.join(raw._write_stack), b'1') t.write('23') self.assertEqual(b''.join(raw._write_stack), b'123') t.reconfigure(write_through=False) self.assertEqual(t.write_through, False) t.write('45') t.flush() self.assertEqual(b''.join(raw._write_stack), b'12345') # Keeping default value t.reconfigure() t.reconfigure(write_through=None) self.assertEqual(t.write_through, False) t.reconfigure(write_through=True) t.reconfigure() t.reconfigure(write_through=None) self.assertEqual(t.write_through, True) def test_read_nonbytes(self): # Issue #17106 # Crash when underlying read() returns non-bytes t = self.TextIOWrapper(self.StringIO('a')) self.assertRaises(TypeError, t.read, 1) t = self.TextIOWrapper(self.StringIO('a')) self.assertRaises(TypeError, t.readline) t = self.TextIOWrapper(self.StringIO('a')) self.assertRaises(TypeError, t.read) def test_illegal_encoder(self): # Issue 31271: Calling write() while the return value of encoder's # encode() is invalid shouldn't cause an assertion failure. rot13 = codecs.lookup("rot13") with support.swap_attr(rot13, '_is_text_encoding', True): t = io.TextIOWrapper(io.BytesIO(b'foo'), encoding="rot13") self.assertRaises(TypeError, t.write, 'bar') def test_illegal_decoder(self): # Issue #17106 # Bypass the early encoding check added in issue 20404 def _make_illegal_wrapper(): quopri = codecs.lookup("quopri") quopri._is_text_encoding = True try: t = self.TextIOWrapper(self.BytesIO(b'aaaaaa'), newline='\n', encoding="quopri") finally: quopri._is_text_encoding = False return t # Crash when decoder returns non-string t = _make_illegal_wrapper() self.assertRaises(TypeError, t.read, 1) t = _make_illegal_wrapper() self.assertRaises(TypeError, t.readline) t = _make_illegal_wrapper() self.assertRaises(TypeError, t.read) # Issue 31243: calling read() while the return value of decoder's # getstate() is invalid should neither crash the interpreter nor # raise a SystemError. def _make_very_illegal_wrapper(getstate_ret_val): class BadDecoder: def getstate(self): return getstate_ret_val def _get_bad_decoder(dummy): return BadDecoder() quopri = codecs.lookup("quopri") with support.swap_attr(quopri, 'incrementaldecoder', _get_bad_decoder): return _make_illegal_wrapper() t = _make_very_illegal_wrapper(42) self.assertRaises(TypeError, t.read, 42) t = _make_very_illegal_wrapper(()) self.assertRaises(TypeError, t.read, 42) t = _make_very_illegal_wrapper((1, 2)) self.assertRaises(TypeError, t.read, 42) def _check_create_at_shutdown(self, **kwargs): # Issue #20037: creating a TextIOWrapper at shutdown # shouldn't crash the interpreter. iomod = self.io.__name__ code = """if 1: import codecs import {iomod} as io # Avoid looking up codecs at shutdown codecs.lookup('utf-8') class C: def __init__(self): self.buf = io.BytesIO() def __del__(self): io.TextIOWrapper(self.buf, **{kwargs}) print("ok") c = C() """.format(iomod=iomod, kwargs=kwargs) return assert_python_ok("-c", code) @support.requires_type_collecting def test_create_at_shutdown_without_encoding(self): rc, out, err = self._check_create_at_shutdown() if err: # Can error out with a RuntimeError if the module state # isn't found. self.assertIn(self.shutdown_error, err.decode()) else: self.assertEqual("ok", out.decode().strip()) @support.requires_type_collecting def test_create_at_shutdown_with_encoding(self): rc, out, err = self._check_create_at_shutdown(encoding='utf-8', errors='strict') self.assertFalse(err) self.assertEqual("ok", out.decode().strip()) def test_read_byteslike(self): r = MemviewBytesIO(b'Just some random string\n') t = self.TextIOWrapper(r, 'utf-8') # TextIOwrapper will not read the full string, because # we truncate it to a multiple of the native int size # so that we can construct a more complex memoryview. bytes_val = _to_memoryview(r.getvalue()).tobytes() self.assertEqual(t.read(200), bytes_val.decode('utf-8')) def test_issue22849(self): class F(object): def readable(self): return True def writable(self): return True def seekable(self): return True for i in range(10): try: self.TextIOWrapper(F(), encoding='utf-8') except Exception: pass F.tell = lambda x: 0 t = self.TextIOWrapper(F(), encoding='utf-8') def test_reconfigure_encoding_read(self): # latin1 -> utf8 # (latin1 can decode utf-8 encoded string) data = 'abc\xe9\n'.encode('latin1') + 'd\xe9f\n'.encode('utf8') raw = self.BytesIO(data) txt = self.TextIOWrapper(raw, encoding='latin1', newline='\n') self.assertEqual(txt.readline(), 'abc\xe9\n') with self.assertRaises(self.UnsupportedOperation): txt.reconfigure(encoding='utf-8') with self.assertRaises(self.UnsupportedOperation): txt.reconfigure(newline=None) def test_reconfigure_write_fromascii(self): # ascii has a specific encodefunc in the C implementation, # but utf-8-sig has not. Make sure that we get rid of the # cached encodefunc when we switch encoders. raw = self.BytesIO() txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n') txt.write('foo\n') txt.reconfigure(encoding='utf-8-sig') txt.write('\xe9\n') txt.flush() self.assertEqual(raw.getvalue(), b'foo\n\xc3\xa9\n') def test_reconfigure_write(self): # latin -> utf8 raw = self.BytesIO() txt = self.TextIOWrapper(raw, encoding='latin1', newline='\n') txt.write('abc\xe9\n') txt.reconfigure(encoding='utf-8') self.assertEqual(raw.getvalue(), b'abc\xe9\n') txt.write('d\xe9f\n') txt.flush() self.assertEqual(raw.getvalue(), b'abc\xe9\nd\xc3\xa9f\n') # ascii -> utf-8-sig: ensure that no BOM is written in the middle of # the file raw = self.BytesIO() txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n') txt.write('abc\n') txt.reconfigure(encoding='utf-8-sig') txt.write('d\xe9f\n') txt.flush() self.assertEqual(raw.getvalue(), b'abc\nd\xc3\xa9f\n') def test_reconfigure_write_non_seekable(self): raw = self.BytesIO() raw.seekable = lambda: False raw.seek = None txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n') txt.write('abc\n') txt.reconfigure(encoding='utf-8-sig') txt.write('d\xe9f\n') txt.flush() # If the raw stream is not seekable, there'll be a BOM self.assertEqual(raw.getvalue(), b'abc\n\xef\xbb\xbfd\xc3\xa9f\n') def test_reconfigure_defaults(self): txt = self.TextIOWrapper(self.BytesIO(), 'ascii', 'replace', '\n') txt.reconfigure(encoding=None) self.assertEqual(txt.encoding, 'ascii') self.assertEqual(txt.errors, 'replace') txt.write('LF\n') txt.reconfigure(newline='\r\n') self.assertEqual(txt.encoding, 'ascii') self.assertEqual(txt.errors, 'replace') txt.reconfigure(errors='ignore') self.assertEqual(txt.encoding, 'ascii') self.assertEqual(txt.errors, 'ignore') txt.write('CRLF\n') txt.reconfigure(encoding='utf-8', newline=None) self.assertEqual(txt.errors, 'strict') txt.seek(0) self.assertEqual(txt.read(), 'LF\nCRLF\n') self.assertEqual(txt.detach().getvalue(), b'LF\nCRLF\r\n') def test_reconfigure_newline(self): raw = self.BytesIO(b'CR\rEOF') txt = self.TextIOWrapper(raw, 'ascii', newline='\n') txt.reconfigure(newline=None) self.assertEqual(txt.readline(), 'CR\n') raw = self.BytesIO(b'CR\rEOF') txt = self.TextIOWrapper(raw, 'ascii', newline='\n') txt.reconfigure(newline='') self.assertEqual(txt.readline(), 'CR\r') raw = self.BytesIO(b'CR\rLF\nEOF') txt = self.TextIOWrapper(raw, 'ascii', newline='\r') txt.reconfigure(newline='\n') self.assertEqual(txt.readline(), 'CR\rLF\n') raw = self.BytesIO(b'LF\nCR\rEOF') txt = self.TextIOWrapper(raw, 'ascii', newline='\n') txt.reconfigure(newline='\r') self.assertEqual(txt.readline(), 'LF\nCR\r') raw = self.BytesIO(b'CR\rCRLF\r\nEOF') txt = self.TextIOWrapper(raw, 'ascii', newline='\r') txt.reconfigure(newline='\r\n') self.assertEqual(txt.readline(), 'CR\rCRLF\r\n') txt = self.TextIOWrapper(self.BytesIO(), 'ascii', newline='\r') txt.reconfigure(newline=None) txt.write('linesep\n') txt.reconfigure(newline='') txt.write('LF\n') txt.reconfigure(newline='\n') txt.write('LF\n') txt.reconfigure(newline='\r') txt.write('CR\n') txt.reconfigure(newline='\r\n') txt.write('CRLF\n') expected = 'linesep' + os.linesep + 'LF\nLF\nCR\rCRLF\r\n' self.assertEqual(txt.detach().getvalue().decode('ascii'), expected) def test_issue25862(self): # Assertion failures occurred in tell() after read() and write(). t = self.TextIOWrapper(self.BytesIO(b'test'), encoding='ascii') t.read(1) t.read() t.tell() t = self.TextIOWrapper(self.BytesIO(b'test'), encoding='ascii') t.read(1) t.write('x') t.tell() class MemviewBytesIO(io.BytesIO): '''A BytesIO object whose read method returns memoryviews rather than bytes''' def read1(self, len_): return _to_memoryview(super().read1(len_)) def read(self, len_): return _to_memoryview(super().read(len_)) def _to_memoryview(buf): '''Convert bytes-object *buf* to a non-trivial memoryview''' arr = array.array('i') idx = len(buf) - len(buf) % arr.itemsize arr.frombytes(buf[:idx]) return memoryview(arr) class CTextIOWrapperTest(TextIOWrapperTest): io = io shutdown_error = "RuntimeError: could not find io module state" def test_initialization(self): r = self.BytesIO(b"\xc3\xa9\n\n") b = self.BufferedReader(r, 1000) t = self.TextIOWrapper(b) self.assertRaises(ValueError, t.__init__, b, newline='xyzzy') self.assertRaises(ValueError, t.read) t = self.TextIOWrapper.__new__(self.TextIOWrapper) self.assertRaises(Exception, repr, t) def test_garbage_collection(self): # C TextIOWrapper objects are collected, and collecting them flushes # all data to disk. # The Python version has __del__, so it ends in gc.garbage instead. with support.check_warnings(('', ResourceWarning)): rawio = io.FileIO(support.TESTFN, "wb") b = self.BufferedWriter(rawio) t = self.TextIOWrapper(b, encoding="ascii") t.write("456def") t.x = t wr = weakref.ref(t) del t support.gc_collect() self.assertIsNone(wr(), wr) with self.open(support.TESTFN, "rb") as f: self.assertEqual(f.read(), b"456def") def test_rwpair_cleared_before_textio(self): # Issue 13070: TextIOWrapper's finalization would crash when called # after the reference to the underlying BufferedRWPair's writer got # cleared by the GC. for i in range(1000): b1 = self.BufferedRWPair(self.MockRawIO(), self.MockRawIO()) t1 = self.TextIOWrapper(b1, encoding="ascii") b2 = self.BufferedRWPair(self.MockRawIO(), self.MockRawIO()) t2 = self.TextIOWrapper(b2, encoding="ascii") # circular references t1.buddy = t2 t2.buddy = t1 support.gc_collect() class PyTextIOWrapperTest(TextIOWrapperTest): io = pyio shutdown_error = "LookupError: unknown encoding: ascii" class IncrementalNewlineDecoderTest(unittest.TestCase): def check_newline_decoding_utf8(self, decoder): # UTF-8 specific tests for a newline decoder def _check_decode(b, s, **kwargs): # We exercise getstate() / setstate() as well as decode() state = decoder.getstate() self.assertEqual(decoder.decode(b, **kwargs), s) decoder.setstate(state) self.assertEqual(decoder.decode(b, **kwargs), s) _check_decode(b'\xe8\xa2\x88', "\u8888") _check_decode(b'\xe8', "") _check_decode(b'\xa2', "") _check_decode(b'\x88', "\u8888") _check_decode(b'\xe8', "") _check_decode(b'\xa2', "") _check_decode(b'\x88', "\u8888") _check_decode(b'\xe8', "") self.assertRaises(UnicodeDecodeError, decoder.decode, b'', final=True) decoder.reset() _check_decode(b'\n', "\n") _check_decode(b'\r', "") _check_decode(b'', "\n", final=True) _check_decode(b'\r', "\n", final=True) _check_decode(b'\r', "") _check_decode(b'a', "\na") _check_decode(b'\r\r\n', "\n\n") _check_decode(b'\r', "") _check_decode(b'\r', "\n") _check_decode(b'\na', "\na") _check_decode(b'\xe8\xa2\x88\r\n', "\u8888\n") _check_decode(b'\xe8\xa2\x88', "\u8888") _check_decode(b'\n', "\n") _check_decode(b'\xe8\xa2\x88\r', "\u8888") _check_decode(b'\n', "\n") def check_newline_decoding(self, decoder, encoding): result = [] if encoding is not None: encoder = codecs.getincrementalencoder(encoding)() def _decode_bytewise(s): # Decode one byte at a time for b in encoder.encode(s): result.append(decoder.decode(bytes([b]))) else: encoder = None def _decode_bytewise(s): # Decode one char at a time for c in s: result.append(decoder.decode(c)) self.assertEqual(decoder.newlines, None) _decode_bytewise("abc\n\r") self.assertEqual(decoder.newlines, '\n') _decode_bytewise("\nabc") self.assertEqual(decoder.newlines, ('\n', '\r\n')) _decode_bytewise("abc\r") self.assertEqual(decoder.newlines, ('\n', '\r\n')) _decode_bytewise("abc") self.assertEqual(decoder.newlines, ('\r', '\n', '\r\n')) _decode_bytewise("abc\r") self.assertEqual("".join(result), "abc\n\nabcabc\nabcabc") decoder.reset() input = "abc" if encoder is not None: encoder.reset() input = encoder.encode(input) self.assertEqual(decoder.decode(input), "abc") self.assertEqual(decoder.newlines, None) def test_newline_decoder(self): encodings = ( # None meaning the IncrementalNewlineDecoder takes unicode input # rather than bytes input None, 'utf-8', 'latin-1', 'utf-16', 'utf-16-le', 'utf-16-be', 'utf-32', 'utf-32-le', 'utf-32-be', ) for enc in encodings: decoder = enc and codecs.getincrementaldecoder(enc)() decoder = self.IncrementalNewlineDecoder(decoder, translate=True) self.check_newline_decoding(decoder, enc) decoder = codecs.getincrementaldecoder("utf-8")() decoder = self.IncrementalNewlineDecoder(decoder, translate=True) self.check_newline_decoding_utf8(decoder) self.assertRaises(TypeError, decoder.setstate, 42) def test_newline_bytes(self): # Issue 5433: Excessive optimization in IncrementalNewlineDecoder def _check(dec): self.assertEqual(dec.newlines, None) self.assertEqual(dec.decode("\u0D00"), "\u0D00") self.assertEqual(dec.newlines, None) self.assertEqual(dec.decode("\u0A00"), "\u0A00") self.assertEqual(dec.newlines, None) dec = self.IncrementalNewlineDecoder(None, translate=False) _check(dec) dec = self.IncrementalNewlineDecoder(None, translate=True) _check(dec) def test_translate(self): # issue 35062 for translate in (-2, -1, 1, 2): decoder = codecs.getincrementaldecoder("utf-8")() decoder = self.IncrementalNewlineDecoder(decoder, translate) self.check_newline_decoding_utf8(decoder) decoder = codecs.getincrementaldecoder("utf-8")() decoder = self.IncrementalNewlineDecoder(decoder, translate=0) self.assertEqual(decoder.decode(b"\r\r\n"), "\r\r\n") class CIncrementalNewlineDecoderTest(IncrementalNewlineDecoderTest): pass class PyIncrementalNewlineDecoderTest(IncrementalNewlineDecoderTest): pass # XXX Tests for open() class MiscIOTest(unittest.TestCase): def tearDown(self): support.unlink(support.TESTFN) def test___all__(self): for name in self.io.__all__: obj = getattr(self.io, name, None) self.assertIsNotNone(obj, name) if name == "open": continue elif "error" in name.lower() or name == "UnsupportedOperation": self.assertTrue(issubclass(obj, Exception), name) elif not name.startswith("SEEK_"): self.assertTrue(issubclass(obj, self.IOBase)) def test_attributes(self): f = self.open(support.TESTFN, "wb", buffering=0) self.assertEqual(f.mode, "wb") f.close() with support.check_warnings(('', DeprecationWarning)): f = self.open(support.TESTFN, "U") self.assertEqual(f.name, support.TESTFN) self.assertEqual(f.buffer.name, support.TESTFN) self.assertEqual(f.buffer.raw.name, support.TESTFN) self.assertEqual(f.mode, "U") self.assertEqual(f.buffer.mode, "rb") self.assertEqual(f.buffer.raw.mode, "rb") f.close() f = self.open(support.TESTFN, "w+") self.assertEqual(f.mode, "w+") self.assertEqual(f.buffer.mode, "rb+") # Does it really matter? self.assertEqual(f.buffer.raw.mode, "rb+") g = self.open(f.fileno(), "wb", closefd=False) self.assertEqual(g.mode, "wb") self.assertEqual(g.raw.mode, "wb") self.assertEqual(g.name, f.fileno()) self.assertEqual(g.raw.name, f.fileno()) f.close() g.close() def test_io_after_close(self): for kwargs in [ {"mode": "w"}, {"mode": "wb"}, {"mode": "w", "buffering": 1}, {"mode": "w", "buffering": 2}, {"mode": "wb", "buffering": 0}, {"mode": "r"}, {"mode": "rb"}, {"mode": "r", "buffering": 1}, {"mode": "r", "buffering": 2}, {"mode": "rb", "buffering": 0}, {"mode": "w+"}, {"mode": "w+b"}, {"mode": "w+", "buffering": 1}, {"mode": "w+", "buffering": 2}, {"mode": "w+b", "buffering": 0}, ]: f = self.open(support.TESTFN, **kwargs) f.close() self.assertRaises(ValueError, f.flush) self.assertRaises(ValueError, f.fileno) self.assertRaises(ValueError, f.isatty) self.assertRaises(ValueError, f.__iter__) if hasattr(f, "peek"): self.assertRaises(ValueError, f.peek, 1) self.assertRaises(ValueError, f.read) if hasattr(f, "read1"): self.assertRaises(ValueError, f.read1, 1024) self.assertRaises(ValueError, f.read1) if hasattr(f, "readall"): self.assertRaises(ValueError, f.readall) if hasattr(f, "readinto"): self.assertRaises(ValueError, f.readinto, bytearray(1024)) if hasattr(f, "readinto1"): self.assertRaises(ValueError, f.readinto1, bytearray(1024)) self.assertRaises(ValueError, f.readline) self.assertRaises(ValueError, f.readlines) self.assertRaises(ValueError, f.readlines, 1) self.assertRaises(ValueError, f.seek, 0) self.assertRaises(ValueError, f.tell) self.assertRaises(ValueError, f.truncate) self.assertRaises(ValueError, f.write, b"" if "b" in kwargs['mode'] else "") self.assertRaises(ValueError, f.writelines, []) self.assertRaises(ValueError, next, f) def test_blockingioerror(self): # Various BlockingIOError issues class C(str): pass c = C("") b = self.BlockingIOError(1, c) c.b = b b.c = c wr = weakref.ref(c) del c, b support.gc_collect() self.assertIsNone(wr(), wr) def test_abcs(self): # Test the visible base classes are ABCs. self.assertIsInstance(self.IOBase, abc.ABCMeta) self.assertIsInstance(self.RawIOBase, abc.ABCMeta) self.assertIsInstance(self.BufferedIOBase, abc.ABCMeta) self.assertIsInstance(self.TextIOBase, abc.ABCMeta) def _check_abc_inheritance(self, abcmodule): with self.open(support.TESTFN, "wb", buffering=0) as f: self.assertIsInstance(f, abcmodule.IOBase) self.assertIsInstance(f, abcmodule.RawIOBase) self.assertNotIsInstance(f, abcmodule.BufferedIOBase) self.assertNotIsInstance(f, abcmodule.TextIOBase) with self.open(support.TESTFN, "wb") as f: self.assertIsInstance(f, abcmodule.IOBase) self.assertNotIsInstance(f, abcmodule.RawIOBase) self.assertIsInstance(f, abcmodule.BufferedIOBase) self.assertNotIsInstance(f, abcmodule.TextIOBase) with self.open(support.TESTFN, "w") as f: self.assertIsInstance(f, abcmodule.IOBase) self.assertNotIsInstance(f, abcmodule.RawIOBase) self.assertNotIsInstance(f, abcmodule.BufferedIOBase) self.assertIsInstance(f, abcmodule.TextIOBase) def test_abc_inheritance(self): # Test implementations inherit from their respective ABCs self._check_abc_inheritance(self) def test_abc_inheritance_official(self): # Test implementations inherit from the official ABCs of the # baseline "io" module. self._check_abc_inheritance(io) def _check_warn_on_dealloc(self, *args, **kwargs): f = open(*args, **kwargs) r = repr(f) with self.assertWarns(ResourceWarning) as cm: f = None support.gc_collect() self.assertIn(r, str(cm.warning.args[0])) def test_warn_on_dealloc(self): self._check_warn_on_dealloc(support.TESTFN, "wb", buffering=0) self._check_warn_on_dealloc(support.TESTFN, "wb") self._check_warn_on_dealloc(support.TESTFN, "w") def _check_warn_on_dealloc_fd(self, *args, **kwargs): fds = [] def cleanup_fds(): for fd in fds: try: os.close(fd) except OSError as e: if e.errno != errno.EBADF: raise self.addCleanup(cleanup_fds) r, w = os.pipe() fds += r, w self._check_warn_on_dealloc(r, *args, **kwargs) # When using closefd=False, there's no warning r, w = os.pipe() fds += r, w with support.check_no_resource_warning(self): open(r, *args, closefd=False, **kwargs) def test_warn_on_dealloc_fd(self): self._check_warn_on_dealloc_fd("rb", buffering=0) self._check_warn_on_dealloc_fd("rb") self._check_warn_on_dealloc_fd("r") def test_pickling(self): # Pickling file objects is forbidden for kwargs in [ {"mode": "w"}, {"mode": "wb"}, {"mode": "wb", "buffering": 0}, {"mode": "r"}, {"mode": "rb"}, {"mode": "rb", "buffering": 0}, {"mode": "w+"}, {"mode": "w+b"}, {"mode": "w+b", "buffering": 0}, ]: for protocol in range(pickle.HIGHEST_PROTOCOL + 1): with self.open(support.TESTFN, **kwargs) as f: self.assertRaises(TypeError, pickle.dumps, f, protocol) def test_nonblock_pipe_write_bigbuf(self): self._test_nonblock_pipe_write(16*1024) def test_nonblock_pipe_write_smallbuf(self): self._test_nonblock_pipe_write(1024) @unittest.skipUnless(hasattr(os, 'set_blocking'), 'os.set_blocking() required for this test') def _test_nonblock_pipe_write(self, bufsize): sent = [] received = [] r, w = os.pipe() os.set_blocking(r, False) os.set_blocking(w, False) # To exercise all code paths in the C implementation we need # to play with buffer sizes. For instance, if we choose a # buffer size less than or equal to _PIPE_BUF (4096 on Linux) # then we will never get a partial write of the buffer. rf = self.open(r, mode='rb', closefd=True, buffering=bufsize) wf = self.open(w, mode='wb', closefd=True, buffering=bufsize) with rf, wf: for N in 9999, 73, 7574: try: i = 0 while True: msg = bytes([i % 26 + 97]) * N sent.append(msg) wf.write(msg) i += 1 except self.BlockingIOError as e: self.assertEqual(e.args[0], errno.EAGAIN) self.assertEqual(e.args[2], e.characters_written) sent[-1] = sent[-1][:e.characters_written] received.append(rf.read()) msg = b'BLOCKED' wf.write(msg) sent.append(msg) while True: try: wf.flush() break except self.BlockingIOError as e: self.assertEqual(e.args[0], errno.EAGAIN) self.assertEqual(e.args[2], e.characters_written) self.assertEqual(e.characters_written, 0) received.append(rf.read()) received += iter(rf.read, None) sent, received = b''.join(sent), b''.join(received) self.assertEqual(sent, received) self.assertTrue(wf.closed) self.assertTrue(rf.closed) def test_create_fail(self): # 'x' mode fails if file is existing with self.open(support.TESTFN, 'w'): pass self.assertRaises(FileExistsError, self.open, support.TESTFN, 'x') def test_create_writes(self): # 'x' mode opens for writing with self.open(support.TESTFN, 'xb') as f: f.write(b"spam") with self.open(support.TESTFN, 'rb') as f: self.assertEqual(b"spam", f.read()) def test_open_allargs(self): # there used to be a buffer overflow in the parser for rawmode self.assertRaises(ValueError, self.open, support.TESTFN, 'rwax+') class CMiscIOTest(MiscIOTest): io = io def test_readinto_buffer_overflow(self): # Issue #18025 class BadReader(self.io.BufferedIOBase): def read(self, n=-1): return b'x' * 10**6 bufio = BadReader() b = bytearray(2) self.assertRaises(ValueError, bufio.readinto, b) def check_daemon_threads_shutdown_deadlock(self, stream_name): # Issue #23309: deadlocks at shutdown should be avoided when a # daemon thread and the main thread both write to a file. code = """if 1: import sys import time import threading from test.support import SuppressCrashReport file = sys.{stream_name} def run(): while True: file.write('.') file.flush() crash = SuppressCrashReport() crash.__enter__() # don't call __exit__(): the crash occurs at Python shutdown thread = threading.Thread(target=run) thread.daemon = True thread.start() time.sleep(0.5) file.write('!') file.flush() """.format_map(locals()) res, _ = run_python_until_end("-c", code) err = res.err.decode() if res.rc != 0: # Failure: should be a fatal error self.assertIn("Fatal Python error: could not acquire lock " "for <_io.BufferedWriter name='<{stream_name}>'> " "at interpreter shutdown, possibly due to " "daemon threads".format_map(locals()), err) else: self.assertFalse(err.strip('.!')) def test_daemon_threads_shutdown_stdout_deadlock(self): self.check_daemon_threads_shutdown_deadlock('stdout') def test_daemon_threads_shutdown_stderr_deadlock(self): self.check_daemon_threads_shutdown_deadlock('stderr') class PyMiscIOTest(MiscIOTest): io = pyio @unittest.skipIf(os.name == 'nt', 'POSIX signals required for this test.') class SignalsTest(unittest.TestCase): def setUp(self): self.oldalrm = signal.signal(signal.SIGALRM, self.alarm_interrupt) def tearDown(self): signal.signal(signal.SIGALRM, self.oldalrm) def alarm_interrupt(self, sig, frame): 1/0 def check_interrupted_write(self, item, bytes, **fdopen_kwargs): """Check that a partial write, when it gets interrupted, properly invokes the signal handler, and bubbles up the exception raised in the latter.""" read_results = [] def _read(): if hasattr(signal, 'pthread_sigmask'): signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGALRM]) s = os.read(r, 1) read_results.append(s) t = threading.Thread(target=_read) t.daemon = True r, w = os.pipe() fdopen_kwargs["closefd"] = False large_data = item * (support.PIPE_MAX_SIZE // len(item) + 1) try: wio = self.io.open(w, **fdopen_kwargs) t.start() # Fill the pipe enough that the write will be blocking. # It will be interrupted by the timer armed above. Since the # other thread has read one byte, the low-level write will # return with a successful (partial) result rather than an EINTR. # The buffered IO layer must check for pending signal # handlers, which in this case will invoke alarm_interrupt(). signal.alarm(1) try: self.assertRaises(ZeroDivisionError, wio.write, large_data) finally: signal.alarm(0) t.join() # We got one byte, get another one and check that it isn't a # repeat of the first one. read_results.append(os.read(r, 1)) self.assertEqual(read_results, [bytes[0:1], bytes[1:2]]) finally: os.close(w) os.close(r) # This is deliberate. If we didn't close the file descriptor # before closing wio, wio would try to flush its internal # buffer, and block again. try: wio.close() except OSError as e: if e.errno != errno.EBADF: raise def test_interrupted_write_unbuffered(self): self.check_interrupted_write(b"xy", b"xy", mode="wb", buffering=0) def test_interrupted_write_buffered(self): self.check_interrupted_write(b"xy", b"xy", mode="wb") def test_interrupted_write_text(self): self.check_interrupted_write("xy", b"xy", mode="w", encoding="ascii") @support.no_tracing def check_reentrant_write(self, data, **fdopen_kwargs): def on_alarm(*args): # Will be called reentrantly from the same thread wio.write(data) 1/0 signal.signal(signal.SIGALRM, on_alarm) r, w = os.pipe() wio = self.io.open(w, **fdopen_kwargs) try: signal.alarm(1) # Either the reentrant call to wio.write() fails with RuntimeError, # or the signal handler raises ZeroDivisionError. with self.assertRaises((ZeroDivisionError, RuntimeError)) as cm: while 1: for i in range(100): wio.write(data) wio.flush() # Make sure the buffer doesn't fill up and block further writes os.read(r, len(data) * 100) exc = cm.exception if isinstance(exc, RuntimeError): self.assertTrue(str(exc).startswith("reentrant call"), str(exc)) finally: signal.alarm(0) wio.close() os.close(r) def test_reentrant_write_buffered(self): self.check_reentrant_write(b"xy", mode="wb") def test_reentrant_write_text(self): self.check_reentrant_write("xy", mode="w", encoding="ascii") def check_interrupted_read_retry(self, decode, **fdopen_kwargs): """Check that a buffered read, when it gets interrupted (either returning a partial result or EINTR), properly invokes the signal handler and retries if the latter returned successfully.""" r, w = os.pipe() fdopen_kwargs["closefd"] = False def alarm_handler(sig, frame): os.write(w, b"bar") signal.signal(signal.SIGALRM, alarm_handler) try: rio = self.io.open(r, **fdopen_kwargs) os.write(w, b"foo") signal.alarm(1) # Expected behaviour: # - first raw read() returns partial b"foo" # - second raw read() returns EINTR # - third raw read() returns b"bar" self.assertEqual(decode(rio.read(6)), "foobar") finally: signal.alarm(0) rio.close() os.close(w) os.close(r) def test_interrupted_read_retry_buffered(self): self.check_interrupted_read_retry(lambda x: x.decode('latin1'), mode="rb") def test_interrupted_read_retry_text(self): self.check_interrupted_read_retry(lambda x: x, mode="r") def check_interrupted_write_retry(self, item, **fdopen_kwargs): """Check that a buffered write, when it gets interrupted (either returning a partial result or EINTR), properly invokes the signal handler and retries if the latter returned successfully.""" select = support.import_module("select") # A quantity that exceeds the buffer size of an anonymous pipe's # write end. N = support.PIPE_MAX_SIZE r, w = os.pipe() fdopen_kwargs["closefd"] = False # We need a separate thread to read from the pipe and allow the # write() to finish. This thread is started after the SIGALRM is # received (forcing a first EINTR in write()). read_results = [] write_finished = False error = None def _read(): try: while not write_finished: while r in select.select([r], [], [], 1.0)[0]: s = os.read(r, 1024) read_results.append(s) except BaseException as exc: nonlocal error error = exc t = threading.Thread(target=_read) t.daemon = True def alarm1(sig, frame): signal.signal(signal.SIGALRM, alarm2) signal.alarm(1) def alarm2(sig, frame): t.start() large_data = item * N signal.signal(signal.SIGALRM, alarm1) try: wio = self.io.open(w, **fdopen_kwargs) signal.alarm(1) # Expected behaviour: # - first raw write() is partial (because of the limited pipe buffer # and the first alarm) # - second raw write() returns EINTR (because of the second alarm) # - subsequent write()s are successful (either partial or complete) written = wio.write(large_data) self.assertEqual(N, written) wio.flush() write_finished = True t.join() self.assertIsNone(error) self.assertEqual(N, sum(len(x) for x in read_results)) finally: signal.alarm(0) write_finished = True os.close(w) os.close(r) # This is deliberate. If we didn't close the file descriptor # before closing wio, wio would try to flush its internal # buffer, and could block (in case of failure). try: wio.close() except OSError as e: if e.errno != errno.EBADF: raise def test_interrupted_write_retry_buffered(self): self.check_interrupted_write_retry(b"x", mode="wb") def test_interrupted_write_retry_text(self): self.check_interrupted_write_retry("x", mode="w", encoding="latin1") class CSignalsTest(SignalsTest): io = io class PySignalsTest(SignalsTest): io = pyio # Handling reentrancy issues would slow down _pyio even more, so the # tests are disabled. test_reentrant_write_buffered = None test_reentrant_write_text = None def load_tests(*args): tests = (CIOTest, PyIOTest, APIMismatchTest, CBufferedReaderTest, PyBufferedReaderTest, CBufferedWriterTest, PyBufferedWriterTest, CBufferedRWPairTest, PyBufferedRWPairTest, CBufferedRandomTest, PyBufferedRandomTest, StatefulIncrementalDecoderTest, CIncrementalNewlineDecoderTest, PyIncrementalNewlineDecoderTest, CTextIOWrapperTest, PyTextIOWrapperTest, CMiscIOTest, PyMiscIOTest, CSignalsTest, PySignalsTest, ) # Put the namespaces of the IO module we are testing and some useful mock # classes in the __dict__ of each test. mocks = (MockRawIO, MisbehavedRawIO, MockFileIO, CloseFailureIO, MockNonBlockWriterIO, MockUnseekableIO, MockRawIOWithoutRead, SlowFlushRawIO) all_members = io.__all__ + ["IncrementalNewlineDecoder"] c_io_ns = {name : getattr(io, name) for name in all_members} py_io_ns = {name : getattr(pyio, name) for name in all_members} globs = globals() c_io_ns.update((x.__name__, globs["C" + x.__name__]) for x in mocks) py_io_ns.update((x.__name__, globs["Py" + x.__name__]) for x in mocks) # Avoid turning open into a bound method. py_io_ns["open"] = pyio.OpenWrapper for test in tests: if test.__name__.startswith("C"): for name, obj in c_io_ns.items(): setattr(test, name, obj) elif test.__name__.startswith("Py"): for name, obj in py_io_ns.items(): setattr(test, name, obj) suite = unittest.TestSuite([unittest.makeSuite(test) for test in tests]) return suite if __name__ == "__main__": unittest.main()
py
b41141e41ab5de61459f90f43611b139debc06b5
# -*- coding: utf-8 -*- from tests import base from girder.models.setting import Setting from girder_google_analytics.settings import PluginSettings def setUpModule(): base.enabledPlugins.append('google_analytics') base.startServer() def tearDownModule(): base.stopServer() class GoogleAnalyticsTest(base.TestCase): def testGetAnalytics(self): # test without set resp = self.request('/google_analytics/id') self.assertStatusOk(resp) self.assertEqual(resp.json['google_analytics_id'], '') # set tracking id Setting().set(PluginSettings.TRACKING_ID, 'testing-tracking-id') # verify we can get the tracking id without being authenticated. resp = self.request('/google_analytics/id') self.assertStatusOk(resp) self.assertEquals(resp.json['google_analytics_id'], 'testing-tracking-id')
py
b41142dbf25baa8196a8652abeae6e4bde4f39c4
from django.urls import path from blog.feeds import CategoryFeed, EntriesFeed app_name = "blog" urlpatterns = [ path("entries/", EntriesFeed(), name="entries"), path("categories/<slug:slug>/", CategoryFeed(), name="category"), ]
py
b411430a148b1487df47007f97ef1a2707cfaac8
# Copyright 2019 The Magenta Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. r"""Utilities for splitting wav files and labels into smaller chunks.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import bisect import math import librosa from magenta.music import audio_io from magenta.music import sequences_lib from magenta.protobuf import music_pb2 import numpy as np import tensorflow as tf def find_inactive_ranges(note_sequence): """Returns ranges where no notes are active in the note_sequence.""" start_sequence = sorted( note_sequence.notes, key=lambda note: note.start_time, reverse=True) end_sequence = sorted( note_sequence.notes, key=lambda note: note.end_time, reverse=True) notes_active = 0 time = start_sequence[-1].start_time inactive_ranges = [] if time > 0: inactive_ranges.append(0.) inactive_ranges.append(time) start_sequence.pop() notes_active += 1 # Iterate through all note on events while start_sequence or end_sequence: if start_sequence and (start_sequence[-1].start_time < end_sequence[-1].end_time): if notes_active == 0: time = start_sequence[-1].start_time inactive_ranges.append(time) notes_active += 1 start_sequence.pop() else: notes_active -= 1 if notes_active == 0: time = end_sequence[-1].end_time inactive_ranges.append(time) end_sequence.pop() # if the last note is the same time as the end, don't add it # remove the start instead of creating a sequence with 0 length if inactive_ranges[-1] < note_sequence.total_time: inactive_ranges.append(note_sequence.total_time) else: inactive_ranges.pop() assert len(inactive_ranges) % 2 == 0 inactive_ranges = [(inactive_ranges[2 * i], inactive_ranges[2 * i + 1]) for i in range(len(inactive_ranges) // 2)] return inactive_ranges def _last_zero_crossing(samples, start, end): """Returns the last zero crossing in the window [start, end).""" samples_greater_than_zero = samples[start:end] > 0 samples_less_than_zero = samples[start:end] < 0 samples_greater_than_equal_zero = samples[start:end] >= 0 samples_less_than_equal_zero = samples[start:end] <= 0 # use np instead of python for loop for speed xings = np.logical_or( np.logical_and(samples_greater_than_zero[:-1], samples_less_than_equal_zero[1:]), np.logical_and(samples_less_than_zero[:-1], samples_greater_than_equal_zero[1:])).nonzero()[0] return xings[-1] + start if xings.size > 0 else None def find_split_points(note_sequence, samples, sample_rate, min_length, max_length): """Returns times at which there are no notes. The general strategy employed is to first check if there are places in the sustained pianoroll where no notes are active within the max_length window; if so the middle of the last gap is chosen as the split point. If not, then it checks if there are places in the pianoroll without sustain where no notes are active and then finds last zero crossing of the wav file and chooses that as the split point. If neither of those is true, then it chooses the last zero crossing within the max_length window as the split point. If there are no zero crossings in the entire window, then it basically gives up and advances time forward by max_length. Args: note_sequence: The NoteSequence to split. samples: The audio file as samples. sample_rate: The sample rate (samples/second) of the audio file. min_length: Minimum number of seconds in a split. max_length: Maximum number of seconds in a split. Returns: A list of split points in seconds from the beginning of the file. """ if not note_sequence.notes: return [] end_time = note_sequence.total_time note_sequence_sustain = sequences_lib.apply_sustain_control_changes( note_sequence) ranges_nosustain = find_inactive_ranges(note_sequence) ranges_sustain = find_inactive_ranges(note_sequence_sustain) nosustain_starts = [x[0] for x in ranges_nosustain] sustain_starts = [x[0] for x in ranges_sustain] nosustain_ends = [x[1] for x in ranges_nosustain] sustain_ends = [x[1] for x in ranges_sustain] split_points = [0.] while end_time - split_points[-1] > max_length: max_advance = split_points[-1] + max_length # check for interval in sustained sequence pos = bisect.bisect_right(sustain_ends, max_advance) if pos < len(sustain_starts) and max_advance > sustain_starts[pos]: split_points.append(max_advance) # if no interval, or we didn't fit, try the unmodified sequence elif pos == 0 or sustain_starts[pos - 1] <= split_points[-1] + min_length: # no splits available, use non sustain notes and find close zero crossing pos = bisect.bisect_right(nosustain_ends, max_advance) if pos < len(nosustain_starts) and max_advance > nosustain_starts[pos]: # we fit, great, try to split at a zero crossing zxc_start = nosustain_starts[pos] zxc_end = max_advance last_zero_xing = _last_zero_crossing( samples, int(math.floor(zxc_start * sample_rate)), int(math.ceil(zxc_end * sample_rate))) if last_zero_xing: last_zero_xing = float(last_zero_xing) / sample_rate split_points.append(last_zero_xing) else: # give up and just return where there are at least no notes split_points.append(max_advance) else: # there are no good places to cut, so just pick the last zero crossing # check the entire valid range for zero crossings start_sample = int( math.ceil((split_points[-1] + min_length) * sample_rate)) + 1 end_sample = start_sample + (max_length - min_length) * sample_rate last_zero_xing = _last_zero_crossing(samples, start_sample, end_sample) if last_zero_xing: last_zero_xing = float(last_zero_xing) / sample_rate split_points.append(last_zero_xing) else: # give up and advance by max amount split_points.append(max_advance) else: # only advance as far as max_length new_time = min(np.mean(ranges_sustain[pos - 1]), max_advance) split_points.append(new_time) if split_points[-1] != end_time: split_points.append(end_time) # ensure that we've generated a valid sequence of splits for prev, curr in zip(split_points[:-1], split_points[1:]): assert curr > prev assert curr - prev <= max_length + 1e-8 if curr < end_time: assert curr - prev >= min_length - 1e-8 assert end_time - split_points[-1] < max_length return split_points def create_example(example_id, ns, wav_data, velocity_range=None): """Creates a tf.train.Example proto for training or testing.""" if velocity_range is None: velocities = [note.velocity for note in ns.notes] velocity_max = np.max(velocities) velocity_min = np.min(velocities) velocity_range = music_pb2.VelocityRange(min=velocity_min, max=velocity_max) example = tf.train.Example( features=tf.train.Features( feature={ 'id': tf.train.Feature( bytes_list=tf.train.BytesList( value=[example_id.encode('utf-8')])), 'sequence': tf.train.Feature( bytes_list=tf.train.BytesList( value=[ns.SerializeToString()])), 'audio': tf.train.Feature( bytes_list=tf.train.BytesList(value=[wav_data])), 'velocity_range': tf.train.Feature( bytes_list=tf.train.BytesList( value=[velocity_range.SerializeToString()])), })) return example def process_record(wav_data, ns, example_id, min_length=5, max_length=20, sample_rate=16000): """Split a record into chunks and create an example proto. To use the full length audio and notesequence, set min_length=0 and max_length=-1. Args: wav_data: audio data in WAV format. ns: corresponding NoteSequence. example_id: id for the example proto min_length: minimum length in seconds for audio chunks. max_length: maximum length in seconds for audio chunks. sample_rate: desired audio sample rate. Yields: Example protos. """ samples = audio_io.wav_data_to_samples(wav_data, sample_rate) samples = librosa.util.normalize(samples, norm=np.inf) if max_length == min_length: splits = np.arange(0, ns.total_time, max_length) elif max_length > 0: splits = find_split_points(ns, samples, sample_rate, min_length, max_length) else: splits = [0, ns.total_time] velocities = [note.velocity for note in ns.notes] velocity_max = np.max(velocities) velocity_min = np.min(velocities) velocity_range = music_pb2.VelocityRange(min=velocity_min, max=velocity_max) for start, end in zip(splits[:-1], splits[1:]): if end - start < min_length: continue if start == 0 and end == ns.total_time: new_ns = ns else: new_ns = sequences_lib.extract_subsequence(ns, start, end) if not new_ns.notes: tf.logging.warning('skipping empty sequence') continue if start == 0 and end == ns.total_time: new_samples = samples else: # the resampling that happen in crop_wav_data is really slow # and we've already done it once, avoid doing it twice new_samples = audio_io.crop_samples(samples, sample_rate, start, end - start) new_wav_data = audio_io.samples_to_wav_data(new_samples, sample_rate) yield create_example( example_id, new_ns, new_wav_data, velocity_range=velocity_range)
py
b41143264069e91d23fc03666c10685c459c87e4
import io import unittest from contextlib import redirect_stdout from unittest.mock import patch class TestQ(unittest.TestCase): @patch('builtins.input', side_effect=[ '6 5 3', '1 3', '2 3', '3 4', '4 5', '3 6', '1 5', '1', '2 6', ]) def test_case_0(self, input_mock=None): text_trap = io.StringIO() with redirect_stdout(text_trap): import solution self.assertEqual(text_trap.getvalue(), '1\n') if __name__ == '__main__': unittest.main()
py
b41143342eb9550c99e259f3c12577aeb0a5bf5d
# coding: utf-8 """ Kubernetes No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: v1.12.1 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class V1beta1Role(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'api_version': 'str', 'kind': 'str', 'metadata': 'V1ObjectMeta', 'rules': 'list[V1beta1PolicyRule]' } attribute_map = { 'api_version': 'apiVersion', 'kind': 'kind', 'metadata': 'metadata', 'rules': 'rules' } def __init__(self, api_version=None, kind=None, metadata=None, rules=None): """ V1beta1Role - a model defined in Swagger """ self._api_version = None self._kind = None self._metadata = None self._rules = None self.discriminator = None if api_version is not None: self.api_version = api_version if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata self.rules = rules @property def api_version(self): """ Gets the api_version of this V1beta1Role. APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources :return: The api_version of this V1beta1Role. :rtype: str """ return self._api_version @api_version.setter def api_version(self, api_version): """ Sets the api_version of this V1beta1Role. APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources :param api_version: The api_version of this V1beta1Role. :type: str """ self._api_version = api_version @property def kind(self): """ Gets the kind of this V1beta1Role. Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds :return: The kind of this V1beta1Role. :rtype: str """ return self._kind @kind.setter def kind(self, kind): """ Sets the kind of this V1beta1Role. Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds :param kind: The kind of this V1beta1Role. :type: str """ self._kind = kind @property def metadata(self): """ Gets the metadata of this V1beta1Role. Standard object's metadata. :return: The metadata of this V1beta1Role. :rtype: V1ObjectMeta """ return self._metadata @metadata.setter def metadata(self, metadata): """ Sets the metadata of this V1beta1Role. Standard object's metadata. :param metadata: The metadata of this V1beta1Role. :type: V1ObjectMeta """ self._metadata = metadata @property def rules(self): """ Gets the rules of this V1beta1Role. Rules holds all the PolicyRules for this Role :return: The rules of this V1beta1Role. :rtype: list[V1beta1PolicyRule] """ return self._rules @rules.setter def rules(self, rules): """ Sets the rules of this V1beta1Role. Rules holds all the PolicyRules for this Role :param rules: The rules of this V1beta1Role. :type: list[V1beta1PolicyRule] """ if rules is None: raise ValueError("Invalid value for `rules`, must not be `None`") self._rules = rules def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, V1beta1Role): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
py
b41144776c9dd3fb6164eb67a1c6e16c1cf31707
# coding=utf-8 """Update checking module for Sopel. This is separated from version.py, so that it can be easily overridden by distribution packagers, and they can check their repositories rather than the Sopel website. """ # Copyright 2014, Elsie Powell, embolalia.com # Licensed under the Eiffel Forum License 2. from __future__ import unicode_literals, absolute_import, print_function, division import json import sopel import sopel.module import requests import sopel.tools wait_time = 24 * 60 * 60 # check once per day startup_check_run = False version_url = 'http://sopel.chat/latest.json' message = ( 'A new Sopel version, {}, is available. I am running {}. Please update ' 'me. Full release notes at {}' ) unstable_message = ( 'A new pre-release version, {}, is available. I am running {}. Please ' 'update me. {}' ) @sopel.module.event(sopel.tools.events.RPL_LUSERCLIENT) @sopel.module.rule('.*') def startup_version_check(bot, trigger): global startup_check_run if not startup_check_run: startup_check_run = True check_version(bot) @sopel.module.interval(wait_time) def check_version(bot): version = sopel.version_info info = requests.get(version_url).json() if version.releaselevel == 'final': latest = info['version'] notes = info['release_notes'] else: latest = info['unstable'] notes = info.get('unstable_notes', '') if notes: notes = 'Full release notes at ' + notes latest_version = sopel._version_info(latest) msg = message.format(latest, sopel.__version__, notes) if version < latest_version: bot.msg(bot.config.core.owner, msg)
py
b411451fae4450220e8715290a3f02ebab084239
from rlkit.launchers.experiments.murtaza.multiworld import her_td3_experiment import rlkit.misc.hyperparameter as hyp from multiworld.envs.mujoco.cameras import sawyer_pusher_camera_upright_v2 from multiworld.envs.pygame.point2d import Point2DWallEnv from rlkit.launchers.launcher_util import run_experiment from rlkit.launchers.arglauncher import run_variants import numpy as np if __name__ == "__main__": # noinspection PyTypeChecker variant = dict( algo_kwargs=dict( base_kwargs=dict( num_epochs=101, num_steps_per_epoch=1000, num_steps_per_eval=1000, max_path_length=100, num_updates_per_env_step=4, batch_size=128, discount=0.99, min_num_steps_before_training=4000, reward_scale=1.0, render=False, collection_mode='online', tau=1e-2, parallel_env_params=dict( num_workers=1, ), ), her_kwargs=dict( observation_key='state_observation', desired_goal_key='state_desired_goal', ), td3_kwargs=dict(), ), replay_buffer_kwargs=dict( max_size=int(1E6), fraction_goals_rollout_goals=0.1, fraction_goals_env_goals=0.5, ob_keys_to_save=[], ), qf_kwargs=dict( hidden_sizes=[400, 300], ), policy_kwargs=dict( hidden_sizes=[400, 300], ), algorithm='HER-TD3', version='normal', es_kwargs=dict( max_sigma=.2, ), exploration_type='ou', observation_key='state_observation', desired_goal_key='state_desired_goal', init_camera=sawyer_pusher_camera_upright_v2, do_state_exp=True, save_video=True, imsize=84, snapshot_mode='gap_and_last', snapshot_gap=10, env_class=Point2DWallEnv, env_kwargs=dict( render_onscreen=False, ball_radius=1, images_are_rgb=True, show_goal=False, ), num_exps_per_instance=1, ) search_space = { # 'env_id': ['SawyerPushAndReacherXYEnv-v0', ], 'seedid': range(5), 'algo_kwargs.base_kwargs.num_updates_per_env_step': [4, ], 'replay_buffer_kwargs.fraction_goals_rollout_goals': [0.1, ], 'replay_buffer_kwargs.fraction_goals_env_goals': [0.5, ], } sweeper = hyp.DeterministicHyperparameterSweeper( search_space, default_parameters=variant, ) # n_seeds = 1 # mode = 'local' # exp_prefix = 'test' n_seeds = 1 mode = 'ec2' exp_prefix = 'sawyer_pusher_state_final' variants = [] for variant in sweeper.iterate_hyperparameters(): variants.append(variant) run_variants(her_td3_experiment, variants, run_id=1)
py
b411454cb765ed75ea11f7429b0b070073c5e59a
from ElevatorBot.backgroundEvents.base import BaseEvent from ElevatorBot.discordEvents.base import ElevatorSnake from ElevatorBot.misc.cache import descend_cache from ElevatorBot.networking.errors import BackendException from ElevatorBot.static.descendOnlyIds import descend_channels class MemberCountUpdater(BaseEvent): """This updates the member count for descend""" def __init__(self): interval_minutes = 30 super().__init__(scheduler_type="interval", interval_minutes=interval_minutes) async def run(self, client: ElevatorSnake): # get the channel if exists and update that message try: channel = await descend_cache.get_member_count() except BackendException: return # update the name await channel.edit( name=f"Members|{descend_channels.guild.member_count}", reason="Member Count Update", )
py
b41145858fe930cc45a4675840d776593d538054
from __future__ import absolute_import import os import sys import atexit from reloadium.vendored.sentry_sdk.hub import Hub from reloadium.vendored.sentry_sdk.utils import logger from reloadium.vendored.sentry_sdk.integrations import Integration from reloadium.vendored.sentry_sdk._types import MYPY if MYPY: from typing import Any from typing import Optional def default_callback(pending, timeout): # type: (int, int) -> None """This is the default shutdown callback that is set on the options. It prints out a message to stderr that informs the user that some events are still pending and the process is waiting for them to flush out. """ def echo(msg): # type: (str) -> None sys.stderr.write(msg + "\n") echo("Sentry is attempting to send %i pending error messages" % pending) echo("Waiting up to %s seconds" % timeout) echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C")) sys.stderr.flush() class AtexitIntegration(Integration): identifier = "atexit" def __init__(self, callback=None): # type: (Optional[Any]) -> None if callback is None: callback = default_callback self.callback = callback @staticmethod def setup_once(): # type: () -> None @atexit.register def _shutdown(): # type: () -> None logger.debug("atexit: got shutdown signal") hub = Hub.main integration = hub.get_integration(AtexitIntegration) if integration is not None: logger.debug("atexit: shutting down client") # If there is a session on the hub, close it now. hub.end_session() # If an integration is there, a client has to be there. client = hub.client # type: Any client.close(callback=integration.callback)
py
b411472d2d2dfab9518c55f1e02aee39e3420211
"""Allow users to set and activate scenes.""" from __future__ import annotations from collections import namedtuple import logging from typing import Any import voluptuous as vol from homeassistant import config as conf_util from homeassistant.components.light import ATTR_TRANSITION from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN, STATES, Scene from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_STATE, CONF_ENTITIES, CONF_ICON, CONF_ID, CONF_NAME, CONF_PLATFORM, SERVICE_RELOAD, STATE_OFF, STATE_ON, ) from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant, State, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import ( config_per_platform, config_validation as cv, entity_platform, ) from homeassistant.helpers.state import async_reproduce_state from homeassistant.loader import async_get_integration def _convert_states(states): """Convert state definitions to State objects.""" result = {} for entity_id, info in states.items(): entity_id = cv.entity_id(entity_id) if isinstance(info, dict): entity_attrs = info.copy() state = entity_attrs.pop(ATTR_STATE, None) attributes = entity_attrs else: state = info attributes = {} # YAML translates 'on' to a boolean # http://yaml.org/type/bool.html if isinstance(state, bool): state = STATE_ON if state else STATE_OFF elif not isinstance(state, str): raise vol.Invalid(f"State for {entity_id} should be a string") result[entity_id] = State(entity_id, state, attributes) return result def _ensure_no_intersection(value): """Validate that entities and snapshot_entities do not overlap.""" if ( CONF_SNAPSHOT not in value or CONF_ENTITIES not in value or all( entity_id not in value[CONF_SNAPSHOT] for entity_id in value[CONF_ENTITIES] ) ): return value raise vol.Invalid("entities and snapshot_entities must not overlap") CONF_SCENE_ID = "scene_id" CONF_SNAPSHOT = "snapshot_entities" DATA_PLATFORM = "homeassistant_scene" EVENT_SCENE_RELOADED = "scene_reloaded" STATES_SCHEMA = vol.All(dict, _convert_states) PLATFORM_SCHEMA = vol.Schema( { vol.Required(CONF_PLATFORM): HA_DOMAIN, vol.Required(STATES): vol.All( cv.ensure_list, [ vol.Schema( { vol.Optional(CONF_ID): cv.string, vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_ICON): cv.icon, vol.Required(CONF_ENTITIES): STATES_SCHEMA, } ) ], ), }, extra=vol.ALLOW_EXTRA, ) CREATE_SCENE_SCHEMA = vol.All( cv.has_at_least_one_key(CONF_ENTITIES, CONF_SNAPSHOT), _ensure_no_intersection, vol.Schema( { vol.Required(CONF_SCENE_ID): cv.slug, vol.Optional(CONF_ENTITIES, default={}): STATES_SCHEMA, vol.Optional(CONF_SNAPSHOT, default=[]): cv.entity_ids, } ), ) SERVICE_APPLY = "apply" SERVICE_CREATE = "create" SCENECONFIG = namedtuple("SceneConfig", [CONF_ID, CONF_NAME, CONF_ICON, STATES]) _LOGGER = logging.getLogger(__name__) @callback def scenes_with_entity(hass: HomeAssistant, entity_id: str) -> list[str]: """Return all scenes that reference the entity.""" if DATA_PLATFORM not in hass.data: return [] platform = hass.data[DATA_PLATFORM] return [ scene_entity.entity_id for scene_entity in platform.entities.values() if entity_id in scene_entity.scene_config.states ] @callback def entities_in_scene(hass: HomeAssistant, entity_id: str) -> list[str]: """Return all entities in a scene.""" if DATA_PLATFORM not in hass.data: return [] platform = hass.data[DATA_PLATFORM] entity = platform.entities.get(entity_id) if entity is None: return [] return list(entity.scene_config.states) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up Safegate Pro scene entries.""" _process_scenes_config(hass, async_add_entities, config) # This platform can be loaded multiple times. Only first time register the service. if hass.services.has_service(SCENE_DOMAIN, SERVICE_RELOAD): return # Store platform for later. platform = hass.data[DATA_PLATFORM] = entity_platform.async_get_current_platform() async def reload_config(call): """Reload the scene config.""" try: conf = await conf_util.async_hass_config_yaml(hass) except HomeAssistantError as err: _LOGGER.error(err) return integration = await async_get_integration(hass, SCENE_DOMAIN) conf = await conf_util.async_process_component_config(hass, conf, integration) if not (conf and platform): return await platform.async_reset() # Extract only the config for the Safegate Pro platform, ignore the rest. for p_type, p_config in config_per_platform(conf, SCENE_DOMAIN): if p_type != HA_DOMAIN: continue _process_scenes_config(hass, async_add_entities, p_config) hass.bus.async_fire(EVENT_SCENE_RELOADED, context=call.context) hass.helpers.service.async_register_admin_service( SCENE_DOMAIN, SERVICE_RELOAD, reload_config ) async def apply_service(call): """Apply a scene.""" reproduce_options = {} if ATTR_TRANSITION in call.data: reproduce_options[ATTR_TRANSITION] = call.data.get(ATTR_TRANSITION) await async_reproduce_state( hass, call.data[CONF_ENTITIES].values(), context=call.context, reproduce_options=reproduce_options, ) hass.services.async_register( SCENE_DOMAIN, SERVICE_APPLY, apply_service, vol.Schema( { vol.Optional(ATTR_TRANSITION): vol.All( vol.Coerce(float), vol.Clamp(min=0, max=6553) ), vol.Required(CONF_ENTITIES): STATES_SCHEMA, } ), ) async def create_service(call): """Create a scene.""" snapshot = call.data[CONF_SNAPSHOT] entities = call.data[CONF_ENTITIES] for entity_id in snapshot: state = hass.states.get(entity_id) if state is None: _LOGGER.warning( "Entity %s does not exist and therefore cannot be snapshotted", entity_id, ) continue entities[entity_id] = State(entity_id, state.state, state.attributes) if not entities: _LOGGER.warning("Empty scenes are not allowed") return scene_config = SCENECONFIG(None, call.data[CONF_SCENE_ID], None, entities) entity_id = f"{SCENE_DOMAIN}.{scene_config.name}" old = platform.entities.get(entity_id) if old is not None: if not old.from_service: _LOGGER.warning("The scene %s already exists", entity_id) return await platform.async_remove_entity(entity_id) async_add_entities([HomeAssistantScene(hass, scene_config, from_service=True)]) hass.services.async_register( SCENE_DOMAIN, SERVICE_CREATE, create_service, CREATE_SCENE_SCHEMA ) def _process_scenes_config(hass, async_add_entities, config): """Process multiple scenes and add them.""" scene_config = config[STATES] # Check empty list if not scene_config: return async_add_entities( HomeAssistantScene( hass, SCENECONFIG( scene.get(CONF_ID), scene[CONF_NAME], scene.get(CONF_ICON), scene[CONF_ENTITIES], ), ) for scene in scene_config ) class HomeAssistantScene(Scene): """A scene is a group of entities and the states we want them to be.""" def __init__(self, hass, scene_config, from_service=False): """Initialize the scene.""" self.hass = hass self.scene_config = scene_config self.from_service = from_service @property def name(self): """Return the name of the scene.""" return self.scene_config.name @property def icon(self): """Return the icon of the scene.""" return self.scene_config.icon @property def unique_id(self): """Return unique ID.""" return self.scene_config.id @property def extra_state_attributes(self): """Return the scene state attributes.""" attributes = {ATTR_ENTITY_ID: list(self.scene_config.states)} unique_id = self.unique_id if unique_id is not None: attributes[CONF_ID] = unique_id return attributes async def async_activate(self, **kwargs: Any) -> None: """Activate scene. Try to get entities into requested state.""" await async_reproduce_state( self.hass, self.scene_config.states.values(), context=self._context, reproduce_options=kwargs, )
py
b41148088d4f768424e7c0fa7420a92c5855753a
from typing import Dict, Optional import pandas as pd import pygeos as geos from great_expectations.core.expectation_configuration import ExpectationConfiguration from great_expectations.exceptions import InvalidExpectationConfigurationError from great_expectations.execution_engine import ( ExecutionEngine, PandasExecutionEngine, SparkDFExecutionEngine, SqlAlchemyExecutionEngine, ) from great_expectations.expectations.expectation import ColumnExpectation from great_expectations.expectations.metrics import ( ColumnAggregateMetricProvider, column_aggregate_partial, column_aggregate_value, ) # This class defines a Metric to support your Expectation. # For most ColumnExpectations, the main business logic for calculation will live in this class. class ColumnAggregateGeometryBoundingRadius(ColumnAggregateMetricProvider): # This is the id string that will be used to reference your Metric. metric_name = "column.geometry.minimum_bounding_radius" value_keys = ( "column_shape_format", "diameter_flag", ) # This method implements the core logic for the PandasExecutionEngine @column_aggregate_value(engine=PandasExecutionEngine) def _pandas(cls, column, **kwargs): column_shape_format = kwargs.get("column_shape_format") # Load the column into a pygeos Geometry vector from numpy array (Series not supported). if column_shape_format == "wkt": shape_test = geos.from_wkt(column.to_numpy(), on_invalid="ignore") elif column_shape_format == "wkb": shape_test = geos.from_wkb(column.to_numpy(), on_invalid="ignore") elif column_shape_format == "xy": shape_df = pd.DataFrame(column.to_list(), columns=("x", "y")) shape_test = geos.points(shape_df.lon, y=shape_df.lat) else: raise NotImplementedError("Column values shape format not implemented.") shape_test = geos.union_all(shape_test) radius = geos.minimum_bounding_radius(shape_test) return radius # This method defines the business logic for evaluating your Metric when using a SqlAlchemyExecutionEngine # @column_aggregate_partial(engine=SqlAlchemyExecutionEngine) # def _sqlalchemy(cls, column, _dialect, **kwargs): # raise NotImplementedError # # This method defines the business logic for evaluating your Metric when using a SparkDFExecutionEngine # @column_aggregate_partial(engine=SparkDFExecutionEngine) # def _spark(cls, column, **kwargs): # raise NotImplementedError # This class defines the Expectation itself class ExpectColumnMininumBoundingRadiusToBeBetween(ColumnExpectation): """ Expect that column values as geometry points to be contained within a bounding circle with a given radius (or diameter). expect_column_values_minimum_bounding_radius_to_be_between is a :func:`column_expectation <great_expectations.dataset.dataset.MetaDataset.column_expectation>`. Args: column (str): \ The column name. Column values must be provided in WKT or WKB format, which are commom formats for GIS Database formats. WKT can be accessed thhrough the ST_AsText() or ST_AsBinary() functions in queries for PostGIS and MSSQL. Column values can alternately be given in x,y tuple or list pairs. The user is responsible for the coordinate reference system and the units. e.g. values may be given in easting-northing pairs. min_value (float or None): \ The minimum radius (or diameter) that bounds all geometries in the column max_value (float or None): \ The maximum radius (or diameter) that bounds all geometries in the column strict_min (boolean): \ If True, the minimal radius must be strictly larger than min_value, Default: False strict_max (boolean): \ If True, the maximal radius must be strictly smaller than max_value, Default: False Keyword Args: column_shape_format: str Geometry format for 'column' (wkt, wkb, xy). Column values can be provided in WKT or WKB format, which are commom formats for GIS Database formats. xy also supports tuple pairs or list pairs for points only WKT can be accessed thhrough the ST_AsText() or ST_AsBinary() functions in queries for PostGIS and MSSQL. Must be one of: [wkt, wkb, xy] Default: wkt diameter_flag (boolean): \ If True, the user can specify a diameter as opposed to a radius, Default: False Returns: An ExpectationSuiteValidationResult Notes: These fields in the result object are customized for this expectation: :: { "observed_value": (list) The actual bounding radius (or diameter) } * min_value and max_value are both inclusive unless strict_min or strict_max are set to True. * If min_value is None, then max_value is treated as an upper bound * If max_value is None, then min_value is treated as a lower bound """ # These examples will be shown in the public gallery. # They will also be executed as unit tests for your Expectation. examples = [ { "data": { "points_only": [ "POINT(1 1)", "POINT(2 2)", "POINT(6 4)", "POINT(3 9)", "POINT(5 5)", ], "points_and_lines": [ "POINT(1 1)", "POINT(2 2)", "POINT(6 4)", "POINT(3 9)", "LINESTRING(5 5, 8 10)", ], }, "tests": [ { "title": "positive_test_with_points", "exact_match_out": False, "include_in_gallery": True, "in": { "column": "points_only", "column_shape_format": "wkt", "min_value": None, "max_value": 5, "strict_min": False, "strict_max": False, "diameter_flag": False, }, "out": { "success": True, # "result":{"observed_value":4.123105625617661} }, }, { "title": "positive_test_with_points_and_lines", "exact_match_out": False, "include_in_gallery": True, "in": { "column": "points_and_lines", "column_shape_format": "wkt", "min_value": 5, "max_value": 10, "strict_min": True, "strict_max": True, "diameter_flag": False, }, "out": { "success": True, # "result":{"observed_value":5.70087712549569} }, }, { "title": "negative positive_test_with_points_and_lines", "exact_match_out": False, "include_in_gallery": True, "in": { "column": "points_and_lines", "column_shape_format": "wkt", "min_value": 1, "max_value": 10, "strict_min": False, "strict_max": True, "diameter_flag": True, }, "out": { "success": False, # "result":{"observed_value":11.40175425099138} }, }, ], } ] # This is a tuple consisting of all Metrics necessary to evaluate the Expectation. metric_dependencies = ("column.geometry.minimum_bounding_radius",) # This a tuple of parameter names that can affect whether the Expectation evaluates to True or False. success_keys = ( "diameter_flag", "column_shape_format", "min_value", "strict_min", "max_value", "strict_max", ) # This dictionary contains default values for any parameters that should have default values. default_kwarg_values = { "diameter_flag": False, "column_shape_format": "wkt", } def validate_configuration( self, configuration: Optional[ExpectationConfiguration] ) -> None: """ Validates that a configuration has been set, and sets a configuration if it has yet to be set. Ensures that necessary configuration arguments have been provided for the validation of the expectation. Args: configuration (OPTIONAL[ExpectationConfiguration]): \ An optional Expectation Configuration entry that will be used to configure the expectation Returns: None. Raises InvalidExpectationConfigurationError if the config is not validated successfully """ super().validate_configuration(configuration) if configuration is None: configuration = self.configuration # # Check other things in configuration.kwargs and raise Exceptions if needed # try: # assert ( # ... # ), "message" # assert ( # ... # ), "message" # except AssertionError as e: # raise InvalidExpectationConfigurationError(str(e)) # This method performs a validation of your metrics against your success keys, returning a dict indicating the success or failure of the Expectation. def _validate( self, configuration: ExpectationConfiguration, metrics: Dict, runtime_configuration: dict = None, execution_engine: ExecutionEngine = None, ): radius = metrics.get("column.geometry.minimum_bounding_radius") diameter_flag = self.get_success_kwargs(configuration).get("diameter_flag") min_value = self.get_success_kwargs(configuration).get("min_value") max_value = self.get_success_kwargs(configuration).get("max_value") strict_min = self.get_success_kwargs(configuration).get("strict_min") strict_max = self.get_success_kwargs(configuration).get("strict_max") if diameter_flag: distance = radius * 2 else: distance = radius # Evaluate the between statement (from column_values_between.py) if min_value is None: if strict_max: success = distance < max_value else: success = distance <= max_value elif max_value is None: if strict_min: success = min_value < distance else: success = min_value <= distance else: if strict_min and strict_max: success = (min_value < distance) & (distance < max_value) elif strict_min: success = (min_value < distance) & (distance <= max_value) elif strict_max: success = (min_value <= distance) & (distance < max_value) else: success = (min_value <= distance) & (distance <= max_value) return {"success": success, "result": {"observed_value": distance}} # This object contains metadata for display in the public Gallery library_metadata = { "tags": ["hackathon-2022"], # Tags for this Expectation in the Gallery "contributors": [ # Github handles for all contributors to this Expectation. "@pjdobson", # Don't forget to add your github handle here! ], "requirements": ["pygeos"], } if __name__ == "__main__": ExpectColumnMininumBoundingRadiusToBeBetween().print_diagnostic_checklist()
py
b4114833a58f6983d7689dcc787b20bde94bb65d
#!/usr/bin/env python3 from mosq_test_helper import * port = int(sys.argv[2]) rc = 1 keepalive = 60 connect_packet = mosq_test.gen_connect("test-helper", keepalive=keepalive) connack_packet = mosq_test.gen_connack(rc=0) publish_packet = mosq_test.gen_publish(sys.argv[1], qos=0, retain=True, payload="message") sock = mosq_test.do_client_connect(connect_packet, connack_packet, connack_error="helper connack", port=port) sock.send(publish_packet) rc = 0 sock.close() exit(rc)
py
b411488221d2a5ee41976cb42d446761c6172ded
""" import logging from la.evaluate import BoardEvaluate from dinglinghui.Macro import BLANK, WIDTH, DEPTH, HEIGHT, WHITE from dinglinghui.Node import Node def search_line(board, row): # TODO:This is function isn't work well. node_list = [] for i in range(1, WIDTH, 2): if board.chessboard[row][i] != BLANK: if board.chessboard[row][i - 1] == BLANK: node = Node(i - 1, row) node_list.append(node) if board.chessboard[row][i - 2] == BLANK: node = Node(i - 2, row) node_list.append(node) return node_list def update_board(board, node_list): for i in node_list: board.chessboard[i.y][i.x] = i.kind return board def restore_board(board, node_list): for i in node_list: board.chessboard[i.y][i.x] = BLANK def search_node(board, node_list): nodes = [] board = update_board(board, node_list) for i in range(HEIGHT): nodes.extend(search_line(board, i)) restore_board(board, node_list) return nodes def evaluate_point(board, player): evalue = BoardEvaluate(board, player) return evalue.evaluate() def create_nodes(board, node, node_list, alpha, beta, max_value, min_value, player): logging.debug("enter create nodes function") logging.info("alpha: " + str(alpha)) # well, i can input chinese on my computer. # player is refer to the one who is player, possibly is the one chess. not player is the opponent if node.depth <= DEPTH & node.kind == player: node.create_children(search_node(board, node_list)) for i in node.children: node_list.append(i) i.beta = evaluate_point(board, player) restore_board(board, node_list) beta += i.beta if beta < min_value: i.beta = beta # sign to show this node is runnable create_nodes(board, i, node_list, alpha, beta, max_value, beta, not player) else: node_list.pop() elif node.depth <= DEPTH & node.kind != player: node.create_children(search_node(board, node_list)) for i in node.children: node_list.append(i) i.alpha = evaluate_point(board, player) restore_board(board, node_list) alpha += i.alpha if alpha > max_value: create_nodes(board, i, node_list, alpha, beta, alpha, min_value, not player) else: node_list.pop() def create_and_pure_tree(board, player): logging.debug("enter create and pure the tree function") logging.info("player: " + str(player)) node = Node(0, 0) node.kind = WHITE nodes = [node] create_nodes(board, node, nodes, 0, 0, 0, 1000, player) return nodes[0].x, nodes[0].y """ import time from dinglinghui.Macro import HEIGHT, WIDTH, SCORE_MIN, SCORE_MAX, SCORE_FIVE, MAP_ENTRY_TYPE, DEPTH, BLANK from la.evaluate import BoardEvaluate def isWin(board, turn): boardevaluate = BoardEvaluate(board, turn) return boardevaluate.evaluate() # get all positions that is empty def genmove(board): moves = [] for y in range(HEIGHT): for x in range(WIDTH): if board.board[y][x] == BLANK: score = board.pos_score[y][x] moves.append((score, x, y)) moves.sort(reverse=True) return moves def search(board, turn): moves = genmove(board) boardevaluate = BoardEvaluate(board, turn) bestmove = None max_score = -0x7fffffff for score, x, y in moves: board.board[y][x] = turn score = boardevaluate.evaluate() board.board[y][x] = 0 if score > max_score: max_score = score bestmove = (max_score, x, y) return bestmove def findBestChess(board, turn): time1 = time.time() score, x, y = search(board, turn) time2 = time.time() print('time[%f] (%d, %d), score[%d] save[%d]' % ((time2 - time1), x, y, score, turn)) return x, y # check if has a none empty position in it's radius range def hasNeighbor(board, x, y, radius): start_x, end_x = (x - radius), (x + radius) start_y, end_y = (y - radius), (y + radius) for i in range(start_y, end_y + 1): for j in range(start_x, end_x + 1): if 0 <= i < HEIGHT and 0 <= j < WIDTH: if board[i][j] != 0: return True return False def __search(board, turn, depth, alpha=SCORE_MIN, beta=SCORE_MAX): boardevaluate = BoardEvaluate(board, turn) score = boardevaluate.evaluate() if depth <= 0 or abs(score) >= SCORE_FIVE: return score moves = genmove(board) bestmove = None board.alpha += len(moves) # if there are no moves, just return the score if len(moves) == 0: return score for _, x, y in moves: board.board[y][x] = turn if turn == MAP_ENTRY_TYPE.MAP_PLAYER_ONE: op_turn = MAP_ENTRY_TYPE.MAP_PLAYER_TWO else: op_turn = MAP_ENTRY_TYPE.MAP_PLAYER_ONE score = __search(board, op_turn, depth - 1, -beta, -alpha) board.board[y][x] = 0 board.beta += 1 # alpha/beta pruning if score > alpha: alpha = score bestmove = (x, y) if alpha >= beta: break if depth == DEPTH and bestmove: board.bestmove = bestmove return alpha
py
b41148b2c78e7e551f1ae9ca231d7a47971e4040
#!/usr/bin/env python # Four spaces as indentation [no tabs] from PDDL import PDDL_Parser import pickle def convert(list): return tuple(i[0] for i in list) class Constructor: #----------------------------------------------- # Construct #----------------------------------------------- def construct(self, domain, problem): # Parser parser = PDDL_Parser() parser.parse_domain(domain) parser.parse_problem(problem) # Parsed data state = parser.state initial_state = convert(state) goal_pos = parser.positive_goals goal_not = parser.negative_goals # Do nothing if self.applicable(state, goal_pos, goal_not): return [] # Grounding process ground_actions = [] for action in parser.actions: for act in action.groundify(parser.objects): ground_actions.append(act) # Search visited = [state] need_visit = [state] transitions = dict() while need_visit: state = need_visit.pop(0) transitions[convert(state)] = dict() for act in ground_actions: if self.applicable(state, act.positive_preconditions, act.negative_preconditions): new_state = self.apply(state, act.add_effects, act.del_effects) if new_state not in visited: visited.append(new_state) need_visit.append(new_state) transitions[convert(state)][act.name] = convert(new_state) return [transitions, initial_state] #----------------------------------------------- # Applicable #----------------------------------------------- def applicable(self, state, positive, negative): for i in positive: if i not in state: return False for i in negative: if i in state: return False return True #----------------------------------------------- # Apply #----------------------------------------------- def apply(self, state, positive, negative): new_state = [] for i in state: if i not in negative: new_state.append(i) for i in positive: if i not in new_state: new_state.append(i) return new_state # ========================================== # Main # ========================================== if __name__ == '__main__': import sys, time start_time = time.time() domain = sys.argv[1] problem = sys.argv[2] # domain = '../graphs/0/domain.pddl' # problem = '../graphs/0/problem.pddl' constructor = Constructor() [transitions, initial_state] = constructor.construct(domain, problem) print('\nThe total number of states: ', '\t\t', len(transitions.keys())) print('\nTime: ', '\t\t', str(time.time() - start_time) + 's') # count the transitions edge_count = 0 for s in transitions: for a in transitions[s]: n_s = transitions[s][a] edge_count+=1 print('\nThe total number of transitions: ', '\t\t', edge_count) with open('transitions.pickle', 'wb') as handle: pickle.dump([transitions, initial_state], handle, protocol=pickle.HIGHEST_PROTOCOL)
py
b41148faa562a9b6ff9350031845c59c7a13f8b1
from passlib.context import CryptContext pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") def hash(password): return pwd_context.hash(password) def verify_password(plain_password, hashed_password): return pwd_context.verify(plain_password, hashed_password)
py
b4114a4f68a56a991e1b307ce07ca191942123ee
# An Abstract Base Class for fast / approximate nearest neighbour wrapper. # Don't instantiate, but subclass. from abc import ABCMeta, abstractmethod from sklearn.utils.extmath import softmax import numpy as np def id_to_class_func(ids, num_classes, id_class_array): r'''Takes id class list with class of all point ids saved at their corresponding index positions and returns the ids mapped to their classes. ''' return num_classes, id_class_array[ids] class NNBase(object, metaclass = ABCMeta): r'''Base wrapper class for fast / approximate nearest neighbours.''' @abstractmethod def __init__(self, *args, **kwargs): r'''A list and dictionary of parameters.''' pass @abstractmethod def add_batch(self, vectors, **kwargs): r'''Add vectors in the given batch to the NN object.''' pass @abstractmethod def build(self, **kwargs): r'''build the NN object (probably if it has C / C++ backend)''' pass @abstractmethod def save(self, save_path, **kwargs): r'''Method to save the NN to save path provided.''' pass @abstractmethod def load(self, load_path, **kwargs): r'''Method to load the NN to save path provided.''' pass @abstractmethod def get_knn(self, queries, k=1, query_as_vector=True, **kwargs): r'''Method to return nearest neighbours for a given query which can either be an iterable of ids already indexed or an iterable of query vectors. If the query is for already indexed ids then set query_as_vector as False. ''' pass def knn_classify(self, queries, k=1, query_as_vector=True, smoothening=0.001, omit_first_match=False, avg_over_queries=False, id_to_class_func=id_to_class_func, **fkwargs): r'''Method to return classification probabilities based on knn and the provided classes of the points. omit_first_match should be set as True if the queries are of items from the indexed dataset and hence needs to be omitted to get a soft probabilities based on it's nearest neighbour excluding itself. ''' if query_as_vector is True: queries = np.atleast_2d(queries) ids, distances = self.get_knn(queries, k, query_as_vector, include_distances=True) if omit_first_match is True: ids, distances = ids[:,1:], distances[:,1:] num_classes, id_classes = id_to_class_func(np.ravel(ids), **fkwargs) id_classes = id_classes.reshape(ids.shape) probabilities_wrt_ids = softmax(-distances) proba = np.full((len(queries), num_classes), smoothening) # obtain class wise boolean in id_classes to sum up the probabilities class_bool = (id_classes == np.arange(num_classes)[:,None,None]) instance_class_proba = class_bool*probabilities_wrt_ids #sum up the class-wise probabilities and add then up to proba proba += instance_class_proba.sum(axis=2).transpose() # obtain final smoothened probabilities proba /= proba.sum(axis=1, keepdims=True) if avg_over_queries is True: proba = proba.mean(axis=0) return proba
py
b4114a74ebf4912a35f6a9719eb8482d660e36f5
#!/usr/bin/env python2 # Copyright (c) 2014 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Test the BIP66 changeover logic # from test_framework import BitcoinTestFramework from tucoinrpc.authproxy import AuthServiceProxy, JSONRPCException from util import * import os import shutil class BIP66Test(BitcoinTestFramework): def setup_network(self): self.nodes = [] self.nodes.append(start_node(0, self.options.tmpdir, [])) self.nodes.append(start_node(1, self.options.tmpdir, ["-blockversion=2"])) self.nodes.append(start_node(2, self.options.tmpdir, ["-blockversion=3"])) connect_nodes(self.nodes[1], 0) connect_nodes(self.nodes[2], 0) self.is_network_split = False self.sync_all() def run_test(self): cnt = self.nodes[0].getblockcount() # Mine some old-version blocks self.nodes[1].setgenerate(True, 100) self.sync_all() if (self.nodes[0].getblockcount() != cnt + 100): raise AssertionError("Failed to mine 100 version=2 blocks") # Mine 750 new-version blocks for i in xrange(15): self.nodes[2].setgenerate(True, 50) self.sync_all() if (self.nodes[0].getblockcount() != cnt + 850): raise AssertionError("Failed to mine 750 version=3 blocks") # TODO: check that new DERSIG rules are not enforced # Mine 1 new-version block self.nodes[2].setgenerate(True, 1) self.sync_all() if (self.nodes[0].getblockcount() != cnt + 851): raise AssertionFailure("Failed to mine a version=3 blocks") # TODO: check that new DERSIG rules are enforced # Mine 198 new-version blocks for i in xrange(2): self.nodes[2].setgenerate(True, 99) self.sync_all() if (self.nodes[0].getblockcount() != cnt + 1049): raise AssertionError("Failed to mine 198 version=3 blocks") # Mine 1 old-version block self.nodes[1].setgenerate(True, 1) self.sync_all() if (self.nodes[0].getblockcount() != cnt + 1050): raise AssertionError("Failed to mine a version=2 block after 949 version=3 blocks") # Mine 1 new-version blocks self.nodes[2].setgenerate(True, 1) self.sync_all() if (self.nodes[0].getblockcount() != cnt + 1051): raise AssertionError("Failed to mine a version=3 block") # Mine 1 old-version blocks try: self.nodes[1].setgenerate(True, 1) raise AssertionError("Succeeded to mine a version=2 block after 950 version=3 blocks") except JSONRPCException: pass self.sync_all() if (self.nodes[0].getblockcount() != cnt + 1051): raise AssertionError("Accepted a version=2 block after 950 version=3 blocks") # Mine 1 new-version blocks self.nodes[2].setgenerate(True, 1) self.sync_all() if (self.nodes[0].getblockcount() != cnt + 1052): raise AssertionError("Failed to mine a version=3 block") if __name__ == '__main__': BIP66Test().main()
py
b4114a937c263ae4f521909f3073b31a9c02d060
import pytest from datetime import datetime from hn import utils def test_parse_date(): assert utils.parse_date(datetime(2018, 1, 1)) == datetime(2018, 1, 1) assert utils.parse_date('2018-07-01') == datetime(2018, 7, 1) assert utils.parse_date('2018-09-03 18:45:11') == datetime(2018, 9, 3, 18, 45, 11) assert utils.parse_date('2018-09-03 18:45:11') == datetime(2018, 9, 3, 18, 45, 11) assert utils.parse_date('2018-11-03T14:53:42.000Z') == datetime( 2018, 11, 3, 14, 53, 42) assert utils.parse_date('2018') == datetime(2018, 1, 1) assert utils.parse_date('2018-09') == datetime(2018, 9, 1) with pytest.raises(ValueError): utils.parse_date('2018-13')
py
b4114aa2364021b538b652a0be06e4b61cf03454
from structs import Vulnerability from tools.common.command_task import CommandTask class Enum4linuxTask(CommandTask): def get_vulnerabilities(self, results): return Vulnerability(exploit=self.exploit, output=str(results), port=self.port, context=self.context, scan=self.scan) def __init__(self, username, password, domain, *args, **kwargs): self.username = username self.password = password self.domain = domain super(Enum4linuxTask, self).__init__(*args, **kwargs) def prepare_args(self): """ Prepare aguments for command execution Returns: list """ return ['-u', self.username, '-p', self.password, '-w', self.domain, str(self.port.node.ip)]
py
b4114abdea139b368dc17a5f3aba0c6833915255
# importando o módulo string import string # O alfabeto a = string.ascii_letters # Rodando o alfabeto um caractere para a esquerda b = a[1:] + a[0] # A função maketrans() cria uma tabela de tradução # entre os caracteres das duas strings que ela # recebeu como parâmetro. # Os caracteres ausentes nas tabelas serão # copiados para a saída. tab = str.maketrans(a, b) # A mensagem... msg = '''Esse texto será traduzido.. Vai ficar bem estranho. ''' # A função translate() usa a tabela de tradução # criada pela maketrans() para traduzir uma string print (msg.translate(tab))
py
b41150962c66f53ecab3c9622d09e285d8180088
# Generated by Django 2.1.5 on 2019-01-22 13:31 from django.db import migrations, models import uuid class Migration(migrations.Migration): initial = True dependencies = [ ('auth', '0009_alter_user_last_name_max_length'), ] operations = [ migrations.CreateModel( name='User', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('password', models.CharField(max_length=128, verbose_name='password')), ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), ('email', models.EmailField(max_length=255, null=True, unique=True, verbose_name='Email')), ('mobile', models.CharField(blank=True, max_length=10, null=True, unique=True, verbose_name='Mobile')), ('full_name', models.CharField(default='Unknown User', max_length=30, verbose_name='Full Name')), ('avatar', models.ImageField(blank=True, upload_to='', verbose_name='Avatar Setting')), ('token', models.UUIDField(default=uuid.uuid4, editable=False, verbose_name='Token')), ('is_active', models.BooleanField(default=True, verbose_name='Active')), ('is_staff', models.BooleanField(default=False, verbose_name='Staff')), ('registered_at', models.DateTimeField(auto_now_add=True, verbose_name='Registered At')), ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')), ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')), ], options={ 'verbose_name': 'User', 'verbose_name_plural': 'Users', }, ), ]
py
b41150cf0b3c325d4889a498649e954febc91256
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='RegistrationProfile', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('activation_key', models.CharField(max_length=40)), ('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)), ], ), ]
py
b411518df19d21f539888b16c328321ca04d1868
# -*- coding: utf-8 -*- """ This file is part of Urban Mediator software. Copyright (c) 2008 University of Art and Design Helsinki See the file LICENSE.txt for copying permission. Script used to fetch data given URL and timeout. !!! UM config not used for this one. Config done below. Should have exact same values as main config. """ import signal, time, sys, os, sha, datetime from urllib2 import urlopen from DocStorage import FSStorage ########################## Config # storing files STORAGE_DIR = os.environ["HOME"] + "/.urban_mediator" file_storage = FSStorage("file://" + STORAGE_DIR, STORAGE_DIR) # # XXX !!! move to config ########################## def request_digest(url): return sha.new(url).hexdigest() # !!! move to config TIMEOUT = 25 # default timeout, seconds AGED = 3600*24 # default aging threshold, seconds #from ACPN Python cookbook class TimedOutExc(Exception): def __init__(self, value = "Timed Out"): self.value = value def __str__(self): return repr(self.value) def TimedOutFn(f, timeout, *args, **kwargs): def handler(signum, frame): raise TimedOutExc() old = signal.signal(signal.SIGALRM, handler) signal.alarm(timeout) try: result = f(*args, **kwargs) finally: signal.signal(signal.SIGALRM, old) signal.alarm(0) return result def timed_out(timeout): def decorate(f): def handler(signum, frame): raise TimedOutExc() def new_f(*args, **kwargs): old = signal.signal(signal.SIGALRM, handler) signal.alarm(timeout) try: result = f(*args, **kwargs) finally: signal.signal(signal.SIGALRM, old) signal.alarm(0) return result new_f.func_name = f.func_name return new_f return decorate def _doReadFeed(url): uo = urlopen(url) return uo.headers.items(), uo def readFeed(url=None, cache_dir="CACHE/", aged=AGED, custom_timeout=None): req_dig = request_digest(url) req_dig_key = cache_dir + req_dig req_dig_md_key = file_storage.metadata(req_dig_key) custom_timeout = custom_timeout or TIMEOUT aged_in_cache = False in_cache = False now = datetime.datetime.now() try: md = file_storage.getItem(req_dig_md_key) in_cache = True if now - md["datetime"] > datetime.timedelta(seconds=aged): aged_in_cache = True except: pass if in_cache and not aged_in_cache: try: return file_storage.getItem(req_dig_key, mode="file") except: file_storage.delItem(req_dig_md_key) in_cache = False try: headers, data = timed_out(custom_timeout)(_doReadFeed)(url) file_storage.setItem(req_dig_md_key, {"datetime": now, "headers": headers}) file_storage.setItem(req_dig_key, data) except: raise if aged_in_cache: return file_storage.getItem(req_dig_key, mode="file") return "None" return file_storage.getItem(req_dig_key, mode="file") # Its safe to read stdin instead of having URL in arguments list try: cache_dir = sys.argv[1] except: cache_dir = "CACHE/" try: aged = sys.argv[2] if aged == "None": aged = None else: aged = int(aged) except: aged = AGED try: custom_timeout = sys.argv[3] if custom_timeout == "None": custom_timeout = None else: custom_timeout = int(custom_timeout) except: custom_timeout = TIMEOUT url = sys.stdin.read().strip() t1 = time.time() data = readFeed(url, cache_dir=cache_dir, aged=aged, custom_timeout=custom_timeout) # sys.stdout.write(data.read())
py
b411518e56daa74956c450cc54e55f2fbba6431a
from gerrit_review_robot.gerrit_review_robot import GerritReviewRobot
py
b41152c92a700cd1dc98e14ea9dee7f2b8e11313
import packerlicious.post_processor as post_processor class TestManifestPostProcessor(object): def test_no_required_fields(self): b = post_processor.Manifest() b.to_dict()
py
b41153b3d7440ad3e535373a60ee443a68e205f5
#!/usr/bin/env python # coding: utf-8 # # Error Functions # # This is a library of standard error functions. I implemented these for use in perceptron networks and to better understand how they are calculated. # # **General Use** for API (reminder on how to build for me): # # * All error functions should take in an observed vector (y_observed) and prediction vector (y_pred) # In[1]: # Library Imports import numpy as np # ## Absolute Error # # $${y_0 - y}$$ # In[19]: def absolute_error(y_pred:np.array, y_observed:np.array, absolute_values=False): """ Calculates vector errors. """ if absolute_values: return abs(y_observed - y_pred) return y_observed - y_pred
py
b41153cc3ecc827f43b689ae7066e1601c600343
"""Implementation of the spawnpoint command.""" from mcipc.rcon.client import Client from mcipc.rcon.types import Vec3 __all__ = ['spawnpoint'] def spawnpoint(self: Client, player: str = None, spawn_pos: Vec3 = None) -> str: """Sets the spawn point for a player.""" return self.run('spawnpoint', player, spawn_pos)
py
b41153fae450fd7867fbf4a611c319190661a3b7
from gym_adserver.envs.adserver import Ad from gym_adserver.envs.adserver import AdServerEnv
py
b4115584890a84af9f5a920a18f9ac0e298b04ff
# TODO: Add an appropriate license to your skill before publishing. See # the LICENSE file for more information. # Below is the list of outside modules you'll be using in your skill. # They might be built-in to Python, from mycroft-core or from external # libraries. If you use an external library, be sure to include it # in the requirements.txt file so the library is installed properly # when the skill gets installed later by a user. from adapt.intent import IntentBuilder from mycroft.skills.core import MycroftSkill, intent_handler from mycroft.util.log import LOG # Each skill is contained within its own class, which inherits base methods # from the MycroftSkill class. You extend this class as shown below. # TODO: Change "Template" to a unique name for your skill class CookieJarSkill(MycroftSkill): # The constructor of the skill, which calls MycroftSkill's constructor def __init__(self): super(CookieJarSkill, self).__init__(name="CookieJarSkill") # Initialize working variables used within the skill. self.count = 0 # The "handle_xxxx_intent" function is triggered by Mycroft when the # skill's intent is matched. The intent is defined by the IntentBuilder() # pieces, and is triggered when the user's utterance matches the pattern # defined by the keywords. In this case, the match occurs when one word # is found from each of the files: # vocab/en-us/Hello.voc # vocab/en-us/World.voc # In this example that means it would match on utterances like: # 'Hello world' # 'Howdy you great big world' # 'Greetings planet earth' @intent_handler(IntentBuilder("").require("Hello").require("World")) def handle_hello_world_intent(self, message): # In this case, respond by simply speaking a canned response. # Mycroft will randomly speak one of the lines from the file # dialogs/en-us/hello.world.dialog self.speak_dialog("hello.world") @intent_handler(IntentBuilder("".require("Crow").require("Midnight")) def handle_password_intent(self, message): self.speak_dialog("code.reply") @intent_handler(IntentBuilder("").require("Count").require("Dir")) def handle_count_intent(self, message): if message.data["Dir"] == "up": self.count += 1 else: # assume "down" self.count -= 1 self.speak_dialog("count.is.now", data={"count": self.count}) # The "stop" method defines what Mycroft does when told to stop during # the skill's execution. In this case, since the skill's functionality # is extremely simple, there is no need to override it. If you DO # need to implement stop, you should return True to indicate you handled # it. # # def stop(self): # return False # The "create_skill()" method is used to create an instance of the skill. # Note that it's outside the class itself. def create_skill(): return TemplateSkill()
py
b411560d673b523d31626e66da4191b05e8b23db
from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.flatpages import views from django.contrib.auth.decorators import login_required as auth from django.contrib.staticfiles.urls import staticfiles_urlpatterns from links.views import HomeListView, SearchJaryanaks from links.views import LinkCreateView, LinkDetailView, LinkUpdateView, LinkDeleteView from links.views import TextCreateView, TextDetailView, TextUpdateView, TextDeleteView from links.views import JaryanakCreateView, JaryanakListView, JaryanakUpdateView, JaryanakDetailView, JReportsView from links.views import UserProfileDetailView, UserProfileEditView, UserProfileInvitations, UserProfileReports, FollowView from links.views import AllReportsView from links.models import Link, Text from comments.models import ThreadedComment # from votes.views import vote_on_object, vote_on_comment urlpatterns = [ url(r'^$', HomeListView.as_view(), name='home'), url(r'^admin/', include(admin.site.urls)), url(r'^manageeallreports/$', auth(AllReportsView.as_view()), name='all_reports_manager'), url(r'^accounts/', include('registration.backends.simple.urls')), url(r'^users/(?P<slug>\w+)/$', UserProfileDetailView.as_view(), name="profile"), url(r'^profile/edit/$', auth(UserProfileEditView.as_view()), name="edit_profile"), url(r'^profile/(?P<slug>\w+)/invitations/$', auth(UserProfileInvitations.as_view()), name="profile_invites"), url(r'^profile/(?P<slug>\w+)/reports/$', auth(UserProfileReports.as_view()), name="profile_reports"), url(r'^login/$', "django.contrib.auth.views.login", {"template_name": "login.html"}, name="login"), url(r'^logout/$', "django.contrib.auth.views.logout_then_login", name="logout"), url(r'^j/list/$', JaryanakListView.as_view(), name='jaryanak'), url(r'^j/create/$', auth(JaryanakCreateView.as_view()), name='jaryanak_create'), url(r'^j/(?P<pk>\d+)/(?P<slug>[-\w\d\_]+)/$', JaryanakDetailView.as_view(), name='jaryanak_detail'), url(r'^j/update/(?P<pk>\d+)/$', auth(JaryanakUpdateView.as_view()), name='jaryanak_update'), url(r'^j/(?P<pk>\d+)/reports$', auth(JReportsView.as_view()), name='j_reports'), url(r'^links/create/$', auth(LinkCreateView.as_view()), name='link_create'), url(r'^links/(?P<pk>\d+)/(?P<slug>[-\w\d\_]+)/$', LinkDetailView.as_view(), name='link_detail'), url(r'^links/update/(?P<pk>\d+)/$', auth(LinkUpdateView.as_view()), name='link_update'), url(r'^links/delete/(?P<pk>\d+)/$', auth(LinkDeleteView.as_view()), name='link_delete'), url(r'^texts/create/$', auth(TextCreateView.as_view()), name='text_create'), url(r'^texts/(?P<pk>\d+)/(?P<slug>[-\w\d\_]+)/$', TextDetailView.as_view(), name='text_detail'), url(r'^texts/update/(?P<pk>\d+)/$', auth(TextUpdateView.as_view()), name='text_update'), url(r'^texts/delete/(?P<pk>\d+)/$', auth(TextDeleteView.as_view()), name='text_delete'), # url(r'^vote/$', vote_on_object, name='vote_link'), # url(r'^texts/(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$', vote_on_object, text_dict, name='vote_text'), # url(r'^comments/(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$', vote_on_comment, comment_dict, name='comment-vote'), # url(r'^vote/$', vote_on_object, widget_dict, name='vote'), # url(r'^vote/$', auth(VoteFormView.as_view()), {"template_name": None}, name='vote'), url(r'^invite/', include('invitations.urls')), url(r'^comments/', include('comments.urls')), url(r'^report/', include('flags.urls')), url(r'^vote/', include('votes.urls')), url(r'^follow/jaryank/(?P<pk>\d+)/(?P<slug>[-\w\d\_]+)/$', FollowView, name='follow'), url(r'^blog/', include('blog.urls')), url(r'^markdown/', include('django_markdown.urls')), url(r'^search/$', SearchJaryanaks, name='search'), url(r'^about/$', views.flatpage, {'url': '/about/'}, name='about'), url(r'^rules/$', views.flatpage, {'url': '/rules/'}, name='rules'), # url(r'^tags/$', TagsListView.as_view(), name='tags'), # url(r'^tag/(?P<slug>[-\w\d]+)/$', TagsDetailView.as_view(), name='tag_detail'), # url(r'^category/(?P<slug>[-\w\d]+)/$', CategoryDetailView.as_view(), name='category_detail'), ] urlpatterns += staticfiles_urlpatterns() urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) # link_dict = { # 'model': Link, # } # text_dict = { # 'model': Text, # 'template_object_name': None, # 'allow_xmlhttprequest': True, # } # comment_dict = { # 'model': ThreadedComment, # 'template_object_name': 'home', # 'allow_xmlhttprequest': True, # }
py
b411560e0bab03873e3a43e076286714d9dd7857
from collections import namedtuple from django import forms from django.utils import timezone from django.utils.translation import gettext_lazy as _, pgettext from taggit.models import Tag import django_filters from elasticsearch_dsl.query import Q from froide.account.models import User from froide.publicbody.models import PublicBody, Category, Jurisdiction from froide.campaign.models import Campaign from froide.helper.search.filters import BaseSearchFilterSet from froide.helper.widgets import DateRangeWidget from .models import FoiRequest from .widgets import DropDownFilterWidget def resolution_filter(x): return Q("term", resolution=x) def status_filter(x): return Q("term", status=x) FILTER_ORDER = ("jurisdiction", "publicbody", "status", "category", "tag") SUB_FILTERS = {"jurisdiction": ("status", "category", "tag", "publicbody")} FoiRequestFilter = namedtuple("FoiRequestFilter", "slug filter key label description") def make_filter(slug, filter_func, key): return FoiRequestFilter( slug=slug, filter=filter_func, key=key, label=key.label, description=FoiRequest.STATUS_RESOLUTION_DICT[key].description, ) FOIREQUEST_FILTERS = [ make_filter( pgettext("URL part", "awaiting-classification"), status_filter, FoiRequest.STATUS.AWAITING_CLASSIFICATION, ), make_filter( pgettext("URL part", "successful"), resolution_filter, FoiRequest.RESOLUTION.SUCCESSFUL, ), make_filter( pgettext("URL part", "partially-successful"), resolution_filter, FoiRequest.RESOLUTION.PARTIALLY_SUCCESSFUL, ), make_filter( pgettext("URL part", "refused"), resolution_filter, FoiRequest.RESOLUTION.REFUSED, ), make_filter( pgettext("URL part", "withdrawn"), resolution_filter, FoiRequest.RESOLUTION.USER_WITHDREW, ), make_filter( pgettext("URL part", "withdrawn-costs"), resolution_filter, FoiRequest.RESOLUTION.USER_WITHDREW_COSTS, ), make_filter( pgettext("URL part", "awaiting-response"), status_filter, FoiRequest.STATUS.AWAITING_RESPONSE, ), make_filter( pgettext("URL part", "overdue"), ( lambda x: Q("range", due_date={"lt": timezone.now()}) & Q("term", status="awaiting_response") ), FoiRequest.FILTER_STATUS.OVERDUE, ), make_filter( pgettext("URL part", "asleep"), status_filter, FoiRequest.STATUS.ASLEEP ), make_filter( pgettext("URL part", "not-held"), resolution_filter, FoiRequest.RESOLUTION.NOT_HELD, ), FoiRequestFilter( slug=pgettext("URL part", "has-fee"), filter=lambda x: Q("range", costs={"gt": 0}), key=None, label=_("Fee charged"), description=_("This request is connected with a fee."), ), ] FOIREQUEST_FILTER_CHOICES = [(x.slug, x.label) for x in FOIREQUEST_FILTERS] FOIREQUEST_FILTER_DICT = dict([(x.slug, x) for x in FOIREQUEST_FILTERS]) REVERSE_FILTER_DICT = dict([(str(x.key), x) for x in FOIREQUEST_FILTERS]) FOIREQUEST_LIST_FILTER_CHOICES = [ x for x in FOIREQUEST_FILTER_CHOICES if x[0] not in {pgettext("URL part", "awaiting-classification")} ] def get_active_filters(data): for key in FILTER_ORDER: if not data.get(key): continue yield key sub_filters = SUB_FILTERS.get(key, ()) for sub_key in sub_filters: if data.get(sub_key): yield sub_key break break def get_filter_data(filter_kwargs, data): query = {} for key in get_active_filters(filter_kwargs): query[key] = filter_kwargs[key] data.update(query) return data class DropDownStatusFilterWidget(DropDownFilterWidget): def create_option( self, name, value, label, selected, index, subindex=None, attrs=None ): option = super(DropDownStatusFilterWidget, self).create_option( name, value, label, selected, index, subindex=subindex, attrs=attrs ) if value: status = FOIREQUEST_FILTER_DICT[value].key option["icon"] = "status-%s" % status return option class BaseFoiRequestFilterSet(BaseSearchFilterSet): query_fields = ["title^5", "description^3", "content"] q = django_filters.CharFilter( method="auto_query", widget=forms.TextInput( attrs={"placeholder": _("Search requests"), "class": "form-control"} ), ) FOIREQUEST_FILTER_DICT = FOIREQUEST_FILTER_DICT status = django_filters.ChoiceFilter( choices=FOIREQUEST_LIST_FILTER_CHOICES, label=_("status"), empty_label=_("any status"), widget=DropDownStatusFilterWidget( attrs={"label": _("status"), "class": "form-control"} ), method="filter_status", ) jurisdiction = django_filters.ModelChoiceFilter( queryset=Jurisdiction.objects.get_visible(), to_field_name="slug", empty_label=_("all jurisdictions"), widget=forms.Select( attrs={"label": _("jurisdiction"), "class": "form-control"} ), method="filter_jurisdiction", ) category = django_filters.ModelChoiceFilter( queryset=Category.objects.get_category_list(), to_field_name="slug", empty_label=_("all categories"), widget=forms.Select(attrs={"label": _("category"), "class": "form-control"}), method="filter_category", ) campaign = django_filters.ModelChoiceFilter( queryset=Campaign.objects.get_filter_list(), to_field_name="slug", null_value="-", empty_label=_("all/no campaigns"), null_label=_("no campaign"), widget=forms.Select(attrs={"label": _("campaign"), "class": "form-control"}), method="filter_campaign", ) tag = django_filters.ModelChoiceFilter( queryset=Tag.objects.all(), to_field_name="slug", method="filter_tag", widget=forms.HiddenInput(), ) publicbody = django_filters.ModelChoiceFilter( queryset=PublicBody._default_manager.all(), to_field_name="slug", method="filter_publicbody", widget=forms.HiddenInput(), ) user = django_filters.ModelChoiceFilter( queryset=User.objects.get_public_profiles(), to_field_name="username", method="filter_user", widget=forms.HiddenInput(), ) first = django_filters.DateFromToRangeFilter( method="filter_first", widget=DateRangeWidget, ) last = django_filters.DateFromToRangeFilter( method="filter_last", widget=DateRangeWidget ) sort = django_filters.ChoiceFilter( choices=[ ("-last", _("last message (newest first)")), ("last", _("last message (oldest first)")), ("-first", _("request date (newest first)")), ("first", _("request date (oldest first)")), ], label=_("sort"), empty_label=_("default sort"), widget=forms.Select(attrs={"label": _("sort"), "class": "form-control"}), method="add_sort", ) class Meta: model = FoiRequest fields = [ "q", "status", "jurisdiction", "campaign", "category", "tag", "publicbody", "first", ] def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if self.view is not None: self.filters["status"].field.widget.get_url = self.view.make_filter_url def auto_query(self, qs, name, value): if value: return qs.set_query( Q( "simple_query_string", query=value, fields=self.query_fields, default_operator="and", lenient=True, ) ) return qs def filter_status(self, qs, name, value): entry = self.FOIREQUEST_FILTER_DICT[value] return qs.filter(entry.filter(entry.key)) def filter_jurisdiction(self, qs, name, value): return qs.filter(jurisdiction=value.id) def filter_campaign(self, qs, name, value): if value == "-": return qs.filter(Q("bool", must_not={"exists": {"field": "campaign"}})) return qs.filter(campaign=value.id) def filter_category(self, qs, name, value): return qs.filter(categories=value.id) def filter_tag(self, qs, name, value): return qs.filter(tags=value.id) def filter_publicbody(self, qs, name, value): return qs.filter(publicbody=value.id) def filter_user(self, qs, name, value): return qs.filter(user=value.id) def filter_first(self, qs, name, value): range_kwargs = {} if value.start is not None: range_kwargs["gte"] = value.start if value.stop is not None: range_kwargs["lte"] = value.stop return qs.filter(Q("range", first_message=range_kwargs)) def filter_last(self, qs, name, value): range_kwargs = {} if value.start is not None: range_kwargs["gte"] = value.start if value.stop is not None: range_kwargs["lte"] = value.stop return qs.filter(Q("range", last_message=range_kwargs)) def add_sort(self, qs, name, value): if value: return qs.add_sort("%s_message" % value) return qs class FoiRequestFilterSet(BaseFoiRequestFilterSet): pass
py
b4115740479b24eb80767e53a56510f71c991dd0
import logging from rest_framework import serializers from api.conf.settings import BACKGROUND_TASK_ENABLED from api.documents.tasks import scan_document_for_viruses def process_document(document): if BACKGROUND_TASK_ENABLED: scan_document_for_viruses(str(document.id)) else: try: scan_document_for_viruses.now(str(document.id), scheduled_as_background_task=False) except Exception as e: logging.error(e) raise serializers.ValidationError({"document": e})
py
b41157e7af587dec661eaff1c258653fc28ecbfd
# # Copyright (c) 2021, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import abc from typing import Dict, Protocol, Union, runtime_checkable import tensorflow as tf from ..typing import TabularData class LossMixin(abc.ABC): """Mixin to use for Keras Layers that can calculate a loss.""" def compute_loss( self, inputs: Union[tf.Tensor, TabularData], targets: Union[tf.Tensor, TabularData], compute_metrics=True, training: bool = False, **kwargs, ) -> tf.Tensor: """Compute the loss on a batch of data. Parameters ---------- inputs: Union[torch.Tensor, TabularData] TODO targets: Union[torch.Tensor, TabularData] TODO training: bool, default=False """ raise NotImplementedError() class MetricsMixin(abc.ABC): """Mixin to use for Keras Layers that can calculate metrics.""" def calculate_metrics( self, inputs: Union[tf.Tensor, TabularData], targets: Union[tf.Tensor, TabularData], mode: str = "val", forward=True, **kwargs, ) -> Dict[str, Union[Dict[str, tf.Tensor], tf.Tensor]]: """Calculate metrics on a batch of data, each metric is stateful and this updates the state. The state of each metric can be retrieved by calling the `metric_results` method. Parameters ---------- inputs: Union[tf.Tensor, TabularData] TODO targets: Union[tf.Tensor, TabularData] TODO forward: bool, default True mode: str, default="val" """ raise NotImplementedError() def metric_results(self, mode: str = None) -> Dict[str, Union[float, tf.Tensor]]: """Returns the current state of each metric. The state is typically updated each batch by calling the `calculate_metrics` method. Parameters ---------- mode: str, default="val" Returns ------- Dict[str, Union[float, tf.Tensor]] """ raise NotImplementedError() def reset_metrics(self): """Reset all metrics.""" raise NotImplementedError() @runtime_checkable class ModelLikeBlock(Protocol): def compute_loss( self, inputs: Union[tf.Tensor, TabularData], targets: Union[tf.Tensor, TabularData], compute_metrics=True, training: bool = False, **kwargs, ) -> tf.Tensor: ... def calculate_metrics( self, inputs: Union[tf.Tensor, TabularData], targets: Union[tf.Tensor, TabularData], mode: str = "val", forward=True, **kwargs, ) -> Dict[str, Union[Dict[str, tf.Tensor], tf.Tensor]]: ... def metric_results(self, mode: str = None) -> Dict[str, Union[float, tf.Tensor]]: ... def _set_context(self, context): ...
py
b411580ce949222edff9f73c7056edf8e723ff16
from datetime import datetime import scipy.cluster.hierarchy as sch import numpy as np import pandas as pd from scipy.stats import norm import quantstats as qs from pyportlib.utils.time_series import TimeSeriesInterface from pyportlib.utils import time_series def skew(pos: TimeSeriesInterface, lookback: str = None, date: datetime = None, **kwargs) -> float: """ Compute the skew of the returns distribution from a TimeSeries object :param pos: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series :param lookback: String: ex. "1y", "15m". Only m and y is supported to generate look back. See date_window doc. :param date: Date to lookback from :param kwargs: Portfolio PnL or Position PnL kwargs :return: """ returns = time_series.prep_returns(ts=pos, lookback=lookback, date=date, **kwargs) return returns.skew() def kurtosis(pos: TimeSeriesInterface, lookback: str, date: datetime = None, **kwargs) -> float: """ Compute the kurtosis of the returns distribution from a TimeSeries object :param pos: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series :param lookback: String: ex. "1y", "15m". Only m and y is supported to generate look back. See date_window doc. :param date: Date to lookback from :param kwargs: Portfolio PnL or Position PnL kwargs :return: """ returns = time_series.prep_returns(ts=pos, lookback=lookback, date=date, **kwargs) return returns.kurtosis() def beta(pos: TimeSeriesInterface, benchmark: TimeSeriesInterface, lookback: str = None, date: datetime = None, **kwargs) -> float: """ Compute the beta of the returns distribution from a TimeSeries object on a benchmark on the specified time period. :param pos: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series :param benchmark: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series on which to compute Beta :param lookback: String: ex. "1y", "15m". Only m and y is supported to generate look back. See date_window doc. :param date: Date to lookback from :param kwargs: Portfolio PnL or Position PnL kwargs :return: """ returns = time_series.prep_returns(ts=pos, lookback=lookback, date=date, **kwargs) benchmark = time_series.prep_returns(ts=benchmark, lookback=lookback, date=date) returns, benchmark = time_series.match_index(returns, benchmark) matrix = np.cov(returns, benchmark) return round(matrix[0, 1] / matrix[1, 1], 2) def alpha(pos: TimeSeriesInterface, benchmark: TimeSeriesInterface, lookback: str = None, date: datetime = None, **kwargs) -> float: """ Compute the alpha of the returns distribution from a TimeSeries object on a benchmark on the specified time period. :param pos: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series :param benchmark: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series on which to compute Beta :param lookback: String: ex. "1y", "15m". Only m and y is supported to generate look back. See date_window doc. :param date: Date to lookback from :param kwargs: Portfolio PnL or Position PnL kwargs :return: """ returns = time_series.prep_returns(ts=pos, lookback=lookback, date=date, **kwargs) benchmark = time_series.prep_returns(ts=benchmark, lookback=lookback, date=date) returns, benchmark = time_series.match_index(returns, benchmark) matrix = np.cov(returns, benchmark) bet = matrix[0, 1] / matrix[1, 1] alph = returns.mean() - (bet * benchmark.mean()) return alph*len(returns) def rolling_alpha(pos: TimeSeriesInterface, benchmark: TimeSeriesInterface, lookback: str = None, date: datetime = None, rolling_period: int = 252, **kwargs) -> pd.Series: """ Compute the gaussian rolling value at risk of the returns distribution from a TimeSeries object with a specified quantile. :param pos: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series :param benchmark: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series on which to compute Beta :param lookback: String: ex. "1y", "15m". Only m and y is supported to generate look back. See date_window doc. :param date: Date to lookback from :param rolling_period: Number of trading days for the rolling period :param kwargs: Portfolio PnL or Position PnL kwargs :return: """ returns = time_series.prep_returns(ts=pos, lookback=lookback, date=date, **kwargs) benchmark = time_series.prep_returns(ts=benchmark, lookback=lookback, date=date) returns, benchmark = time_series.match_index(returns, benchmark) df = pd.DataFrame(data={"returns": returns, "benchmark": benchmark}) corr = df.rolling(int(rolling_period)).corr().unstack()['returns']['benchmark'] std = df.rolling(int(rolling_period)).std() rolling_b = corr * std['returns'] / std['benchmark'] rolling_alph = returns.rolling(int(rolling_period)).mean() - (rolling_b * benchmark.rolling(int(rolling_period)).mean()) return rolling_alph * rolling_period def annualized_volatility(pos: TimeSeriesInterface, lookback: str = None, date: datetime = None, **kwargs) -> float: """ Compute the annualized volatility of the returns distribution from a TimeSeries object :param pos: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series :param lookback: String: ex. "1y", "15m". Only m and y is supported to generate look back. See date_window doc. :param date: Date to lookback from :param kwargs: Portfolio PnL or Position PnL kwargs :return: """ returns = time_series.prep_returns(ts=pos, lookback=lookback, date=date, **kwargs) return qs.stats.volatility(returns=returns, prepare_returns=False, annualize=True) def value_at_risk(pos, lookback: str, date: datetime = None, quantile=0.95, method: str = "gaussian", **kwargs) -> float: """ Compute the value at risk of the returns distribution from a TimeSeries object with a specified quantile and method :param pos: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series :param lookback: String: ex. "1y", "15m". Only m and y is supported to generate look back. See date_window doc. :param date: Date to lookback from :param quantile: Quantile on which to compite VaR :param method: VaR compute method. 'gaussian' and 'historical' are implemented :param kwargs: Portfolio PnL or Position PnL kwargs :return: """ returns = time_series.prep_returns(ts=pos, lookback=lookback, date=date, **kwargs) if method == "gaussian": var = qs.stats.value_at_risk(returns=returns, confidence=quantile, prepare_returns=False) return abs(var) if method == "historical": var = returns.quantile(q=1 - quantile) return abs(var) raise NotImplementedError(f"{method}") def rolling_var(pos, lookback: str = None, date: datetime = None, rolling_period: int = 252, quantile=0.95, **kwargs) -> pd.Series: """ Compute the gaussian rolling value at risk of the returns distribution from a TimeSeries object with a specified quantile. :param pos: TimeSeries Object (Portfolio, Position, Pandas DataFrame/Series :param lookback: String: ex. "1y", "15m". Only m and y is supported to generate look back. See date_window doc. :param date: Date to lookback from :param rolling_period: Number of trading days for the rolling period :param quantile: Quantile on which to compite VaR :param kwargs: Portfolio PnL or Position PnL kwargs :return: """ returns = time_series.prep_returns(ts=pos, lookback=lookback, date=date, **kwargs) mean = returns.rolling(rolling_period).mean() var = returns.rolling(rolling_period).std() stat = norm.ppf(1-quantile, mean, var) return pd.Series(stat, index=returns.index).dropna() * -1 def cluster_corr(corr_array, inplace=False): """ Rearranges the correlation matrix, corr_array, so that groups of highly correlated variables are next to eachother. https://wil.yegelwel.com/cluster-correlation-matrix/ :param corr_array: pandas.DataFrame or numpy.ndarray a NxN correlation matrix :param inplace: bool :return: """ pairwise_distances = sch.distance.pdist(corr_array) linkage = sch.linkage(pairwise_distances, method='complete') cluster_distance_threshold = pairwise_distances.max() / 2 idx_to_cluster_array = sch.fcluster(linkage, cluster_distance_threshold, criterion='distance') idx = np.argsort(idx_to_cluster_array) if not inplace: corr_array = corr_array.copy() if isinstance(corr_array, pd.DataFrame): return corr_array.iloc[idx, :].T.iloc[idx, :] return corr_array[idx, :][:, idx]
py
b41158b4e7584f4ecb79f597c955802cbc1e9cbe
# Copyright 2017 Battelle Energy Alliance, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ''' from wikipedia: dx/dt = sigma*(y-x) ; dy/dt = x*(rho-z)-y dz/dt = x*y-beta*z ; ''' import numpy as np import copy #import pylab as pyl #import random #import mpl_toolkits.mplot3d.axes3d as p3 def initialize(self,runInfoDict,inputFiles): print('Life is beautiful my friends. Do not waste it!') self.max_time = 0.03 self.t_step = 0.01 self.numberTimeSteps = int(self.max_time/self.t_step) self.x = np.zeros(self.numberTimeSteps) self.y = np.zeros(self.numberTimeSteps) self.z = np.zeros(self.numberTimeSteps) self.time = np.zeros(self.numberTimeSteps) self.cnt = 0.0 return def createNewInput(self,myInput,samplerType,**Kwargs): return Kwargs['SampledVars'] def run(self,Input): self.cnt = 1.0 self.x0 = 1.0 self.y0 = 1.0 self.z0 = 1.0 self.x01 = copy.deepcopy(self.cnt+Input['x0']) self.x02 = copy.deepcopy(self.cnt+Input['x0']) self.z01 = copy.deepcopy(self.cnt+Input['x0']) self.z02 = 101.0 - copy.deepcopy(self.cnt+Input['x0']) self.y01 = copy.deepcopy(Input['x0']) self.y02 = copy.deepcopy(Input['y0']) self.time[0]= 0 self.x[0] = copy.deepcopy(self.cnt+Input['x0']) self.y[0] = copy.deepcopy(self.cnt+Input['y0']) self.z[0] = copy.deepcopy(self.cnt+Input['z0']) for t in range ( self.numberTimeSteps-1): self.time[t+1] = self.time[t] + self.t_step*(self.z[0]*0.01)*5.0 self.x[t+1] = self.x[t] + (self.y[t]-self.x[t])*self.t_step*(self.z[0]*0.01)*5.0 self.y[t+1] = self.y[t] + (self.x[t]*self.z[t]-self.y[t])*self.t_step*(self.z[0]*0.01)*5.0 self.z[t+1] = self.z[t] + (self.x[t]*self.y[t]-self.z[t])*self.t_step*(self.z[0]*0.01)*5.0
py
b4115ab78d81e5de171b34431f36379d79e4f529
# qubit number=4 # total number=13 import pyquil from pyquil.api import local_forest_runtime, QVMConnection from pyquil import Program, get_qc from pyquil.gates import * import numpy as np conn = QVMConnection() def make_circuit()-> Program: prog = Program() # circuit begin prog += H(0) # number=1 prog += Y(2) # number=9 prog += H(1) # number=2 prog += H(2) # number=3 prog += CNOT(0,2) # number=10 prog += X(2) # number=11 prog += CNOT(0,2) # number=12 prog += H(3) # number=4 prog += Y(3) # number=5 prog += Y(2) # number=7 prog += Y(2) # number=8 # circuit end return prog def summrise_results(bitstrings) -> dict: d = {} for l in bitstrings: if d.get(l) is None: d[l] = 1 else: d[l] = d[l] + 1 return d if __name__ == '__main__': prog = make_circuit() qvm = get_qc('4q-qvm') results = qvm.run_and_measure(prog,1024) bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T bitstrings = [''.join(map(str, l)) for l in bitstrings] writefile = open("../data/startPyquil296.csv","w") print(summrise_results(bitstrings),file=writefile) writefile.close()
py
b4115b1eebc0a3e598116e34d32249f841625780
from math import log2 _template="""\ \t\tHitOut{ways}:process(InState,Match_vec) \t\t\tvariable match: std_logic_vector({waybits} downto 0); \t\tbegin \t\t\tHitOutState <= InState; \t\t\tmatch := Match_vec; \t\t\tcase (match) is {}\t\t\t\twhen others => HitOutState <= (others => '-'); \t\t\tend case; \t\tend process; """ _templatecond="""\ \t\t\tif cond{bit} = b"{val}" then MissOutState({bit})<= not I({bit}); \t\t\telse MissOutState({bit})<= I({bit}); end if; """ _templateMiss="""\ \t\tMissOut{ways}:process(Instate) \t\t\talias I: std_logic_vector(WAYS-2 downto 0) is InState; {vars}\t\tbegin {v_asign}\t\t\tMissOutState(0) <= not I(0); \t\t\tMissOutState(1) <= I(0) xnor I(1); \t\t\tMissOutState(2) <= I(0) xor I(2); {conds}\t\tend process; """ _templateVictim="""\ \t\tVictim{ways}:process(InState) \t\t\talias I: std_logic_vector(WAYS-2 downto 0) is InState; \t\t\tvariable v{msb}: std_logic; {vars}\t\tbegin \t\t\tv{msb}<= I(0); {logic}{assign}\t\tend process; """ def lrumiss(state): flipmask = 0 bitcnt = 0 while bitcnt < nodebits: mask = 1<<bitcnt flipmask |= mask bitcnt *= 2 bitcnt += 1 if not state&mask else 2 mask = 1<<bitcnt flipmask |= mask toreplace = (bitcnt - nodebits)*2 toreplace += 0 if not state & mask else 1 state = state ^ flipmask return toreplace, state def lruhit(way,state): bitcnt = (way)//2 + nodebits mask = 0 andmask = (1&way)<<bitcnt while True: mask|= 1<<bitcnt tmp = bitcnt bitcnt = (bitcnt-1)//2 if bitcnt >=0: andmask|= (1&(~tmp))<<bitcnt else: break state = ((state | mask) & (~andmask)) & ((1<<(ways-1))-1) return state def getTouchedBits(way,ways): bitstouched = {} nodebits = ways - 1 - ways//2 bitcnt = (way)//2 + nodebits mask = 0 andmask = (1&way)<<bitcnt while True: mask|= 1<<bitcnt tmp = bitcnt bitcnt = (bitcnt-1)//2 if bitcnt >=0: andmask|= (1&(~tmp))<<bitcnt else: break for _ in range(ways-1): if mask & (1<<_): bitstouched[_] = 0 if andmask & (1<<_) else 1 return bitstouched def getbinary(size,num): n = bin(num)[2:] return ('0'*(size-len(n))) + n def genprocessHitOut(nways,fstream): txt = "" for a in range(nways): dic = getTouchedBits(a,nways) txt += f"\t\t\t\twhen \"{getbinary(nways,1<<a)}\" =>\n" for bit, value in dic.items(): txt += f"\t\t\t\t\tHitOutState({bit}) <= '{value}';\n" print(_template.format(txt,waybits=nways-1,ways=nways),file=fstream) def getcondsforbit(bit,ways): for _ in range(ways): dic = getTouchedBits(_,ways) if bit in dic: break newdic= {} for d_bit, value in dic.items(): if d_bit < bit: newdic[d_bit] = value return newdic def grenprocessMissOut(nways,fstream): if nways == 4: print(_templateMiss.format(ways=4,vars='',v_asign='',\ conds=''),file=fstream) else: vars='' v_asign='' conds='' for statusbit in range(3,nways-1): dic = getcondsforbit(statusbit,nways) t_asign='' cond='' for bit in sorted(dic): t_asign+= f" I({bit}) &" cond+= f"{(~dic[bit])&1}" vars+= f"\t\t\tvariable cond{statusbit}:\ std_logic_vector({(len(dic)-1)} downto 0);\n" v_asign+= f"\t\t\tcond{statusbit}:={t_asign[:-2]};\n" conds += _templatecond.format(bit=statusbit,val=cond) print(_templateMiss.format(ways=nways,vars=vars,v_asign=v_asign,\ conds=conds),file=fstream) def genProcessVictim(nways,fstream): s = ('I({})\t','not I({})') treedepth = int(log2(nways)) logic = '' vardeclaration = '' assign = '' for lvl in range(1,treedepth): vardeclaration+= f"\t\t\tvariable v{treedepth-1-lvl}: std_logic:='0';\n" assign+= f'v{treedepth-1-lvl} & ' for bit in range((2**lvl)-1,(2**(lvl+1)-1)): conds = getcondsforbit(bit,nways) logic+= f'\t\t\tv{treedepth-1-lvl}:= v{treedepth-1-lvl} or ' for condbit in sorted(conds): logic+= s[conds[condbit]].format(condbit) + '\tand\t' logic+= f'I({bit});\n' logic+= '\n' assign = f'\t\t\tVictimWay <= v{treedepth-1} & {assign[:-3]};\n' print(_templateVictim.format(ways=nways,msb=treedepth-1,\ vars=vardeclaration,logic=logic,\ assign=assign),file=fstream) def gentestvector(ways,stream): from random import randint v = int(log2(ways-1)//4 +1) N = 12000 statemax = (1<<(ways-1))-1 width = ways//4 for _ in range(N): InState = randint(0,statemax) w = randint(0,ways-1) MatchVec = 1<<w hitout = lruhit(w,InState) victim, missout = lrumiss(InState) txt = f"{InState:0{width}X} {MatchVec:0{width}X} {hitout:0{width}X} {missout:0{width}X} {victim:0{v}X}" print(txt,file=stream) if __name__ == "__main__": import sys if len(sys.argv) == 4: stream = open(sys.argv[3],"w") elif len(sys.argv) == 3: stream = sys.stdout else: print("lru.py t|g ways outputfile(if empty to stdout), t: testbench g:generate vhdl code") sys.exit(-1) ways = int(sys.argv[2]) nodebits = ways - 1 - ways//2 with stream: if sys.argv[1] == 'g': grenprocessMissOut(ways,stream) genprocessHitOut(ways,stream) genProcessVictim(ways,stream) elif sys.argv[1] == 't': gentestvector(ways,stream)
py
b4115c4807bfaec187b8646f00b3c5699df483e1
#!/usr/bin/env python3 """Generate a test model for frugally-deep. """ import numbers import sys import numpy as np from tensorflow.python import keras from tensorflow.python.keras.models import Model, load_model, Sequential from tensorflow.python.keras.layers import Input, Dense, Dropout, Flatten, Activation from tensorflow.python.keras.layers import Conv1D, ZeroPadding1D, Cropping1D from tensorflow.python.keras.layers import Conv2D, ZeroPadding2D, Cropping2D from tensorflow.python.keras.layers import MaxPooling1D, AveragePooling1D, UpSampling1D from tensorflow.python.keras.layers import MaxPooling2D, AveragePooling2D, UpSampling2D from tensorflow.python.keras.layers import GlobalAveragePooling1D, GlobalMaxPooling1D from tensorflow.python.keras.layers import GlobalAveragePooling2D, GlobalMaxPooling2D from tensorflow.python.keras.layers import SeparableConv2D, Conv2DTranspose from tensorflow.python.keras.layers import LeakyReLU, ELU from tensorflow.python.keras.layers import BatchNormalization from tensorflow.python.keras import backend as K __author__ = "Tobias Hermann" __copyright__ = "Copyright 2017, Tobias Hermann" __license__ = "MIT" __maintainer__ = "Tobias Hermann, https://github.com/Dobiasd/frugally-deep" __email__ = "[email protected]" def remove_sample_axis_from_shape(shape): """Remove first dimension from shape if not fixed.""" if len(shape) == 4: assert not isinstance(shape[0], numbers.Number) return shape[1:] if not isinstance(shape[0], numbers.Number): return shape[1:] return shape def get_shape_for_random_data(data_size, shape): """Include size of data to generate into shape.""" if len(shape) == 3: return (data_size, shape[0], shape[1], shape[2]) if len(shape) == 2: return (data_size, shape[0], shape[1]) if len(shape) == 1: return (data_size, shape[0]) assert False def generate_random_data(data_size, shape): """Random data for training.""" return np.random.random( size=get_shape_for_random_data(data_size, remove_sample_axis_from_shape(shape))) def generate_input_data(data_size, input_shapes): """Random input data for training.""" return [generate_random_data(data_size, input_shape) for input_shape in input_shapes] def generate_output_data(data_size, outputs): """Random output data for training.""" return [generate_random_data(data_size, output.shape) for output in outputs] def get_test_model_small(): """Returns a minimalistic test model.""" input_shapes = [ (17, 4), (16, 18, 3), (8,), ] inputs = [Input(shape=s) for s in input_shapes] outputs = [] outputs.append(Conv1D(2, 3, padding='valid')(inputs[0])) outputs.append(Conv2D(2, (5, 7), padding='valid')(inputs[1])) outputs.append(BatchNormalization()(inputs[0])) outputs.append(BatchNormalization()(inputs[1])) outputs.append(BatchNormalization()(inputs[2])) outputs.append(Activation('softmax')(inputs[0])) outputs.append(Activation('softmax')(inputs[1])) outputs.append(Activation('softmax')(inputs[2])) #outputs.append(Conv2DTranspose(2, (3, 3), padding='valid')(inputs[1])) model = Model(inputs=inputs, outputs=outputs, name='test_model_small') model.compile(loss='mse', optimizer='nadam') # fit to dummy data training_data_size = 1 data_in = generate_input_data(training_data_size, input_shapes) data_out = generate_output_data(training_data_size, outputs) model.fit(data_in, data_out, epochs=10) return model def get_test_model_sequential(): """Returns a typical (VGG-like) sequential test model.""" model = Sequential() model.add(Conv2D(8, (3, 3), activation='relu', input_shape=(32, 32, 3))) model.add(Conv2D(8, (3, 3), activation='relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Conv2D(16, (3, 3), activation='elu')) model.add(Conv2D(16, (3, 3))) model.add(ELU()) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(64, activation='sigmoid')) model.add(Dropout(0.5)) model.add(Dense(10, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer='sgd') # fit to dummy data training_data_size = 1 data_in = [np.random.random(size=(training_data_size, 32, 32, 3))] data_out = [np.random.random(size=(training_data_size, 10))] model.fit(data_in, data_out, epochs=10) return model def get_test_model_full(): """Returns a maximally complex test model, using all supported layer types with different parameter combination. """ input_shapes = [ (26, 28, 3), (4, 4, 3), (4, 4, 3), (4,), (2, 3), (27, 29, 1), (17, 1), (17, 4), ] inputs = [Input(shape=s) for s in input_shapes] outputs = [] for inp in inputs[6:8]: for padding in ['valid', 'same']: for s in range(1, 6): for out_channels in [1, 2]: for d in range(1, 4): outputs.append( Conv1D(out_channels, s, padding=padding, dilation_rate=d)(inp)) for padding_size in range(0, 5): outputs.append(ZeroPadding1D(padding_size)(inp)) for crop_left in range(0, 2): for crop_right in range(0, 2): outputs.append(Cropping1D((crop_left, crop_right))(inp)) for upsampling_factor in range(1, 5): outputs.append(UpSampling1D(upsampling_factor)(inp)) for padding in ['valid', 'same']: for pool_factor in range(1, 6): for s in range(1, 4): outputs.append( MaxPooling1D(pool_factor, strides=s, padding=padding)(inp)) outputs.append( AveragePooling1D(pool_factor, strides=s, padding=padding)(inp)) outputs.append(GlobalMaxPooling1D()(inp)) outputs.append(GlobalAveragePooling1D()(inp)) for inp in [inputs[0], inputs[5]]: for padding in ['valid', 'same']: for h in range(1, 6): for out_channels in [1, 2]: for d in range(1, 4): outputs.append( Conv2D(out_channels, (h, 1), padding=padding, dilation_rate=(d, 1))(inp)) outputs.append( SeparableConv2D(out_channels, (h, 1), padding=padding, dilation_rate=(d, 1))(inp)) for sy in range(1, 4): outputs.append( Conv2D(out_channels, (h, 1), strides=(1, sy), padding=padding)(inp)) outputs.append( SeparableConv2D(out_channels, (h, 1), strides=(sy, sy), padding=padding)(inp)) for sy in range(1, 4): outputs.append( MaxPooling2D((h, 1), strides=(1, sy), padding=padding)(inp)) for w in range(1, 6): for out_channels in [1, 2]: for d in range(1, 4) if sy == 1 else [1]: outputs.append( Conv2D(out_channels, (1, w), padding=padding, dilation_rate=(1, d))(inp)) outputs.append( SeparableConv2D(out_channels, (1, w), padding=padding, dilation_rate=(1, d))(inp)) for sx in range(1, 4): outputs.append( Conv2D(out_channels, (1, w), strides=(sx, 1), padding=padding)(inp)) outputs.append( SeparableConv2D(out_channels, (1, w), strides=(sx, sx), padding=padding)(inp)) for sx in range(1, 4): outputs.append( MaxPooling2D((1, w), strides=(1, sx), padding=padding)(inp)) outputs.append(ZeroPadding2D(2)(inputs[0])) outputs.append(ZeroPadding2D((2, 3))(inputs[0])) outputs.append(ZeroPadding2D(((1, 2), (3, 4)))(inputs[0])) outputs.append(Cropping2D(2)(inputs[0])) outputs.append(Cropping2D((2, 3))(inputs[0])) outputs.append(Cropping2D(((1, 2), (3, 4)))(inputs[0])) for y in range(1, 3): for x in range(1, 3): outputs.append(UpSampling2D(size=(y, x))(inputs[0])) outputs.append(GlobalAveragePooling2D()(inputs[0])) outputs.append(GlobalMaxPooling2D()(inputs[0])) outputs.append(AveragePooling2D((2, 2))(inputs[0])) outputs.append(MaxPooling2D((2, 2))(inputs[0])) outputs.append(UpSampling2D((2, 2))(inputs[0])) outputs.append(keras.layers.concatenate([inputs[0], inputs[0]])) outputs.append(Dropout(0.5)(inputs[0])) outputs.append(BatchNormalization()(inputs[0])) outputs.append(BatchNormalization(center=False)(inputs[0])) outputs.append(BatchNormalization(scale=False)(inputs[0])) outputs.append(Conv2D(2, (3, 3), use_bias=True)(inputs[0])) outputs.append(Conv2D(2, (3, 3), use_bias=False)(inputs[0])) outputs.append(SeparableConv2D(2, (3, 3), use_bias=True)(inputs[0])) outputs.append(SeparableConv2D(2, (3, 3), use_bias=False)(inputs[0])) outputs.append(Dense(2, use_bias=True)(inputs[3])) outputs.append(Dense(2, use_bias=False)(inputs[3])) shared_conv = Conv2D(1, (1, 1), padding='valid', name='shared_conv', activation='relu') up_scale_2 = UpSampling2D((2, 2)) x1 = shared_conv(up_scale_2(inputs[1])) # (1, 8, 8) x2 = shared_conv(up_scale_2(inputs[2])) # (1, 8, 8) x3 = Conv2D(1, (1, 1), padding='valid')(up_scale_2(inputs[2])) # (1, 8, 8) x = keras.layers.concatenate([x1, x2, x3]) # (3, 8, 8) outputs.append(x) x = Conv2D(3, (1, 1), padding='same', use_bias=False)(x) # (3, 8, 8) outputs.append(x) x = Dropout(0.5)(x) outputs.append(x) x = keras.layers.concatenate([ MaxPooling2D((2, 2))(x), AveragePooling2D((2, 2))(x)]) # (6, 4, 4) outputs.append(x) x = Flatten()(x) # (1, 1, 96) x = Dense(4, use_bias=False)(x) outputs.append(x) x = Dense(3)(x) # (1, 1, 3) outputs.append(x) intermediate_input_shape = (3,) intermediate_in = Input(intermediate_input_shape) intermediate_x = intermediate_in intermediate_x = Dense(8)(intermediate_x) intermediate_x = Dense(5)(intermediate_x) intermediate_model = Model( inputs=[intermediate_in], outputs=[intermediate_x], name='intermediate_model') intermediate_model.compile(loss='mse', optimizer='nadam') x = intermediate_model(x) # (1, 1, 5) intermediate_model_2 = Sequential() intermediate_model_2.add(Dense(7, input_shape=(5,))) intermediate_model_2.add(Dense(5)) intermediate_model_2.compile(optimizer='rmsprop', loss='categorical_crossentropy') x = intermediate_model_2(x) # (1, 1, 5) x = Dense(3)(x) # (1, 1, 3) shared_activation = Activation('tanh') outputs = outputs + [ Activation('tanh')(inputs[3]), Activation('hard_sigmoid')(inputs[3]), Activation('selu')(inputs[3]), Activation('sigmoid')(inputs[3]), Activation('softplus')(inputs[3]), Activation('softmax')(inputs[3]), Activation('relu')(inputs[3]), LeakyReLU()(inputs[3]), ELU()(inputs[3]), shared_activation(inputs[3]), inputs[4], inputs[1], x, shared_activation(x), ] print('Model has {} outputs.'.format(len(outputs))) model = Model(inputs=inputs, outputs=outputs, name='test_model_full') model.compile(loss='mse', optimizer='nadam') # fit to dummy data training_data_size = 1 batch_size = 1 epochs = 10 data_in = generate_input_data(training_data_size, input_shapes) data_out = generate_output_data(training_data_size, outputs) model.fit(data_in, data_out, epochs=epochs, batch_size=batch_size) return model def main(): """Generate different test models and save them to the given directory.""" if len(sys.argv) != 3: print('usage: [model name] [destination file path]') sys.exit(1) else: model_name = sys.argv[1] dest_path = sys.argv[2] get_model_functions = { 'small': get_test_model_small, 'sequential': get_test_model_sequential, 'full': get_test_model_full } if not model_name in get_model_functions: print('unknown model name: ', model_name) sys.exit(2) assert K.backend() == "tensorflow" assert K.floatx() == "float32" assert K.image_data_format() == 'channels_last' np.random.seed(0) model_func = get_model_functions[model_name] model = model_func() model.save(dest_path, include_optimizer=False) # Make sure models can be loaded again, # see https://github.com/fchollet/keras/issues/7682 model = load_model(dest_path) print(model.summary()) if __name__ == "__main__": main()
py
b4115c7cc4d4a3389e9751c595cc763081eb51f9
import logging import string from collections import Counter import xlsxwriter from django.core.management.base import BaseCommand from django.db import models from django.db.models import Q, Sum, Count from django.utils.translation import activate from logs.models import ReportType, AccessLog, ManualDataUpload from sushi.models import SushiFetchAttempt logger = logging.getLogger(__name__) class Command(BaseCommand): help = 'Compares the `default` database to the `old` one from the settings and creates a report' def add_arguments(self, parser): parser.add_argument( '-l', dest='lang', default='cs', help="language to use for object names" ) parser.add_argument('outfile') def handle(self, *args, **options): self.ignored_rts = list( ReportType.objects.filter( Q(materialization_spec__isnull=False) | Q(short_name='interest') ).values_list('pk', flat=True) ) base_qs = AccessLog.objects.exclude(report_type_id__in=self.ignored_rts) activate(options['lang']) workbook = xlsxwriter.Workbook(options['outfile']) self.base_fmt_dict = {'font_name': 'Arial', 'font_size': 9} # , 'num_format': '#,##0'} self.base_fmt = workbook.add_format(self.base_fmt_dict) self.header_fmt = workbook.add_format({'bold': True, **self.base_fmt_dict}) self.ok_fmt = workbook.add_format({'bg_color': '#ddffdd', **self.base_fmt_dict}) self.warn_fmt = workbook.add_format({'bg_color': '#ffd0b0', **self.base_fmt_dict}) self.perc_fmt = workbook.add_format({'num_format': '0.000%', **self.base_fmt_dict}) self.ok_perc_fmt = workbook.add_format( {'num_format': '0.000%', 'bg_color': self.ok_fmt.bg_color, **self.base_fmt_dict} ) self.warn_perc_fmt = workbook.add_format( {'num_format': '0.000%', 'bg_color': self.warn_fmt.bg_color, **self.base_fmt_dict} ) # detail view - all months that have a change for spec in [ {'name': 'organization', 'key': ('organization',), 'match': 'show'}, {'name': 'platform', 'key': ('platform',), 'match': 'show'}, {'name': 'report_type', 'key': ('report_type',), 'match': 'show'}, {'name': 'date', 'key': ('date',), 'match': 'show'}, # {'name': 'org-platform', 'key': ('organization', 'platform'), 'match': 'hide'}, { 'name': 'platform-org-report', 'key': ('platform', 'organization', 'report_type'), 'match': 'hide', }, { 'name': 'detail', 'key': ('platform', 'organization', 'report_type', 'date'), 'match': 'hide', 'extra': ['title_count', 'filenames'], }, ]: print("==", spec['name'], "==") sheet = workbook.add_worksheet(spec['name']) key = spec['key'] mappings = {} is_fk = {} header_row = [] for key_dim in key: mappings[key_dim] = {} field = AccessLog._meta.get_field(key_dim) if isinstance(field, models.ForeignKey): mappings[key_dim] = {obj.pk: obj for obj in field.related_model.objects.all()} is_fk[key_dim] = True header_row.append(f'{key_dim} id') header_row.append(key_dim) else: is_fk[key_dim] = False header_row.append(key_dim) header_row += ['before', 'after', 'diff', 'rel. diff'] if 'title_count' in spec.get('extra', []): header_row += ['titles before', 'titles after', 'IBs before'] # add notes column to make sure it is part of the auto-filter created later header_row += ['notes'] sheet.write_row(0, 0, header_row, self.header_fmt) query_key = tuple(f'{key_dim}_id' if is_fk[key_dim] else key_dim for key_dim in key) qs = base_qs.values(*query_key).annotate(sum=Sum('value')).order_by(*query_key) old = {tuple(rec[k] for k in query_key): rec['sum'] for rec in qs.using('old')} row_idx = 0 stats = Counter() max_lens = {key_dim: 0 for key_dim in key} seen_grp_ids = set() for rec in qs: grp_id = tuple(rec[k] for k in query_key) seen_grp_ids.add(grp_id) old_value = old.get(grp_id, 0) if self.process_row( row_idx, is_fk, key, mappings, max_lens, old_value, rec, sheet, spec, stats ): row_idx += 1 # process old stuff to see if something was missing in the new one for grp_id, old_value in old.items(): if grp_id not in seen_grp_ids: rec = { (f'{key_dim}_id' if is_fk[key_dim] else key_dim): grp_id[i] for i, key_dim in enumerate(key) } rec['sum'] = 0 if self.process_row( row_idx, is_fk, key, mappings, max_lens, old_value, rec, sheet, spec, stats, ): row_idx += 1 # adjust column widths col = 0 for key_dim in key: if is_fk[key_dim]: sheet.set_column(col, col, 4) sheet.set_column(col + 1, col + 1, 6 + int(0.7 * max_lens[key_dim])) col += 2 else: col += 1 # the notes column width sheet.set_column(len(header_row) - 1, len(header_row) - 1, 48) # add auto-filter sheet.autofilter(0, 0, row_idx, len(header_row) - 1) # add conditional formatting rel_diff_idx = header_row.index('rel. diff') sheet.conditional_format( 1, rel_diff_idx, row_idx, rel_diff_idx, { 'type': '3_color_scale', 'min_color': "#BB2222", 'mid_color': "#FFFFFF", 'max_color': "#22BB22", 'mid_type': 'num', 'mid_value': 0, }, ) print(" ", stats) workbook.close() def process_row(self, i, is_fk, key, mappings, max_lens, old_value, rec, sheet, spec, stats): new_value = rec['sum'] if old_value == new_value: stats['match'] += 1 if spec['match'] == 'hide': return False fmt = self.ok_fmt cur_perc_fmt = self.ok_perc_fmt else: fmt = self.warn_fmt if spec['match'] != 'hide' else self.base_fmt cur_perc_fmt = self.warn_perc_fmt if spec['match'] != 'hide' else self.perc_fmt stats['mismatch'] += 1 row = [] for key_dim in key: if is_fk[key_dim]: key_attr = f'{key_dim}_id' row.append(rec[key_attr]) s = str(mappings[key_dim].get(rec[key_attr], rec[key_attr])) row.append(s) max_lens[key_dim] = max(max_lens[key_dim], len(s)) else: row.append(str(rec[key_dim])) sheet.write_row( i + 1, 0, [*row, old_value, new_value], fmt, ) # writing formulas with empty value to force recalc in LibreOffice letter1 = string.ascii_letters[len(row) + 1] letter2 = string.ascii_letters[len(row)] letter3 = string.ascii_letters[len(row) + 2] sheet.write_formula(i + 1, len(row) + 2, f'={letter1}{i + 2}-{letter2}{i + 2}', fmt, '') sheet.write_formula( i + 1, len(row) + 3, f'={letter3}{i + 2}/{letter2}{i + 2}', cur_perc_fmt, '' ) last_col = len(row) + 3 fltr = dict(rec) del fltr['sum'] # write extra info if 'title_count' in spec.get('extra', []): detail_new = ( AccessLog.objects.exclude(report_type_id__in=self.ignored_rts) .filter(**fltr) .aggregate( title_count=Count( 'target_id', distinct=True, filter=Q(target_id__isnull=False) ), ib_count=Count('import_batch_id', distinct=True), ) ) detail_old = ( AccessLog.objects.exclude(report_type_id__in=self.ignored_rts) .filter(**fltr) .using('old') .aggregate( title_count=Count( 'target_id', distinct=True, filter=Q(target_id__isnull=False) ), ib_count=Count('import_batch_id', distinct=True), ) ) sheet.write_row( i + 1, last_col + 1, [detail_old['title_count'], detail_new['title_count'], detail_old['ib_count']], self.base_fmt, ) last_col += 3 if 'filenames' in spec.get('extra', []): al_subq = ( AccessLog.objects.exclude(report_type_id__in=self.ignored_rts) .filter(**fltr) .values('import_batch_id') .distinct() ) fas = SushiFetchAttempt.objects.filter(import_batch_id__in=al_subq.using('old')).using( 'old' ) mdus = ManualDataUpload.objects.filter(import_batches__in=al_subq.using('old')).using( 'old' ) fnames = [fa.data_file.name for fa in [*fas, *mdus]] if len(fnames) == 0: # no files, try with current DB fas = SushiFetchAttempt.objects.filter(import_batch_id__in=al_subq) mdus = ManualDataUpload.objects.filter(import_batches__in=al_subq) fnames = [fa.data_file.name for fa in [*fas, *mdus]] sheet.write_comment( i + 1, last_col, "Filenames:\n\n" + '\n'.join(fnames), {'x_scale': 3.0} ) sheet.write_string(i + 1, last_col + 1, '', self.base_fmt) last_col += 1 return True
py
b4115c9a1290ed2e0a64cfd23a0a004630e7ff8d
class LifeClass(): def __init__(self, name: str): # do something self.name = name def can_breath(self) -> str: return f"{self.name} maybe can" class AnimalClass(LifeClass): def can_breath(self) -> str: return f"yes {self.name} can, i'm breathing oxygen" def walk(self) -> str: return "i'm walking on the road" life = LifeClass('blob') print(life.can_breath()) animal = AnimalClass('cat') print(animal.can_breath()) print(animal.walk())
py
b4115ccc193a5e5ba3331df13335eea7afac544a
import posixpath from urllib.parse import urlparse from .azure import AzureTree from .dropbox import DropboxTree from .gdrive import GDriveTree from .gs import GSTree from .hdfs import HDFSTree from .http import HTTPTree from .https import HTTPSTree from .local import LocalTree from .oss import OSSTree from .s3 import S3Tree from .ssh import SSHTree from .webdav import WebDAVTree from .webdavs import WebDAVSTree from .webhdfs import WebHDFSTree TREES = [ AzureTree, DropboxTree, GDriveTree, GSTree, HDFSTree, HTTPTree, HTTPSTree, S3Tree, SSHTree, OSSTree, WebDAVTree, WebDAVSTree, WebHDFSTree # NOTE: LocalTree is the default ] def get_tree_cls(remote_conf): for tree_cls in TREES: if tree_cls.supported(remote_conf): return tree_cls return LocalTree def get_tree_config(config, **kwargs): name = kwargs.get("name") if name: remote_conf = config["remote"][name.lower()] else: remote_conf = kwargs return _resolve_remote_refs(config, remote_conf) def _resolve_remote_refs(config, remote_conf): # Support for cross referenced remotes. # This will merge the settings, shadowing base ref with remote_conf. # For example, having: # # dvc remote add server ssh://localhost # dvc remote modify server user root # dvc remote modify server ask_password true # # dvc remote add images remote://server/tmp/pictures # dvc remote modify images user alice # dvc remote modify images ask_password false # dvc remote modify images password asdf1234 # # Results on a config dictionary like: # # { # "url": "ssh://localhost/tmp/pictures", # "user": "alice", # "password": "asdf1234", # "ask_password": False, # } parsed = urlparse(remote_conf["url"]) if parsed.scheme != "remote": return remote_conf base = get_tree_config(config, name=parsed.netloc) url = posixpath.join(base["url"], parsed.path.lstrip("/")) return {**base, **remote_conf, "url": url} def get_cloud_tree(repo, **kwargs): from dvc.config import ConfigError from dvc.config_schema import SCHEMA, Invalid remote_conf = get_tree_config(repo.config, **kwargs) try: remote_conf = SCHEMA["remote"][str](remote_conf) except Invalid as exc: raise ConfigError(str(exc)) from None return get_tree_cls(remote_conf)(repo, remote_conf)
py
b4115cecea2b9dfc4b0119113c73d5cb4784b455
# emacs: -*- mode: python; py-indent-offset: 4; tab-wstrth: 4; indent-tabs-mode: nil -*- # ex: set sts=4 ts=4 sw=4 et: # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # # See COPYING file distributed along with the datalad package for the # copyright and license terms. # # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """tests for UI switcher""" __docformat__ = 'restructuredtext' from unittest.mock import patch from ...tests.utils_pytest import ( assert_equal, assert_false, assert_not_equal, assert_raises, with_testsui, ) from .. import _UI_Switcher from ..dialog import ( ConsoleLog, DialogUI, IPythonUI, ) def test_ui_switcher(): ui = _UI_Switcher('dialog') assert(isinstance(ui.ui, DialogUI)) message_str = str(ui.message) assert_equal(message_str, str(ui._ui.message)) ui.set_backend('console') assert(isinstance(ui.ui, ConsoleLog)) assert_equal(str(ui.message), str(ui._ui.message)) assert_not_equal(message_str, str(ui._ui.message)) with assert_raises(AttributeError): ui.yesno ui.set_backend('annex') # Let's pretend we are under IPython class ZMQInteractiveShell(object): pass with patch('datalad.utils.get_ipython', lambda: ZMQInteractiveShell(), create=True): ui = _UI_Switcher() assert (isinstance(ui.ui, IPythonUI)) def test_tests_ui(): ui = _UI_Switcher('dialog') # Let's test our responses construct ui.set_backend('tests') with ui.add_responses('abc'): assert_equal(ui.question("text"), 'abc') with ui.add_responses(['a', 'bb']): assert_equal(ui.question("text"), 'a') assert_equal(ui.question("text"), 'bb') # should raise exception if not all responses were # used with assert_raises(AssertionError): with ui.add_responses(['a', 'bb']): assert_equal(ui.question("text"), 'a') # but clear it up assert_false(ui.get_responses()) # assure that still works with ui.add_responses('abc'): assert_equal(ui.question("text"), 'abc') # and if we switch back to some other backend -- we would loose *responses methods ui.set_backend('annex') assert_false(hasattr(ui, 'add_responses')) def test_with_testsui(): @with_testsui def nothing(x, k=1): assert_equal(x, 1) assert_equal(k, 2) nothing(1, k=2) @with_testsui(responses='a') def nothing(x, k=1): assert_equal(x, 1) assert_equal(k, 2) # responses were not used assert_raises(AssertionError, nothing, 1, k=2) from datalad.ui import ui @with_testsui(responses='a') def ask(): assert_equal(ui.question('what is a?'), 'a') ask()
py
b4115dd755bbe76f4a39f04ac41cfbe9e0b7b4bf
""" Access Denied Message Filter Transformer implementation. """ # TODO: Remove this file after REVE-52 lands and old-mobile-app traffic falls to < 5% of mobile traffic from openedx.core.djangoapps.content.block_structure.transformer import BlockStructureTransformer class AccessDeniedMessageFilterTransformer(BlockStructureTransformer): """ A transformer that removes any block from the course that has an authorization_denial_reason or an authorization_denial_message. """ WRITE_VERSION = 1 READ_VERSION = 1 @classmethod def name(cls): """ Unique identifier for the transformer's class; same identifier used in setup.py. """ return "access_denied_message_filter" @classmethod def collect(cls, block_structure): """ Collects any information that's necessary to execute this transformer's transform method. """ block_structure.request_xblock_fields('authorization_denial_reason', 'authorization_denial_message') def transform(self, usage_info, block_structure): def _filter(block_key): reason = block_structure.get_xblock_field(block_key, 'authorization_denial_reason') message = block_structure.get_xblock_field(block_key, 'authorization_denial_message') return reason and message block_structure.remove_block_traversal(_filter)
py
b4115e874e12939b5008b752809a43792035a009
"""DO NOT MODIFY. Auto-generated by script/fingerprint_frontend.""" FINGERPRINTS = { "compatibility.js": "8e4c44b5f4288cc48ec1ba94a9bec812", "core.js": "d4a7cb8c80c62b536764e0e81385f6aa", "frontend.html": "f170a7221615ca2839cb8fd51a82f50a", "mdi.html": "c92bd28c434865d6cabb34cd3c0a3e4c", "micromarkdown-js.html": "93b5ec4016f0bba585521cf4d18dec1a", "panels/ha-panel-automation.html": "4f98839bb082885657bbcd0ac04fc680", "panels/ha-panel-config.html": "76853de505d173e82249bf605eb73505", "panels/ha-panel-dev-event.html": "4886c821235492b1b92739b580d21c61", "panels/ha-panel-dev-info.html": "24e888ec7a8acd0c395b34396e9001bc", "panels/ha-panel-dev-service.html": "92c6be30b1af95791d5a6429df505852", "panels/ha-panel-dev-state.html": "8f1a27c04db6329d31cfcc7d0d6a0869", "panels/ha-panel-dev-template.html": "d33a55b937b50cdfe8b6fae81f70a139", "panels/ha-panel-hassio.html": "9474ba65077371622f21ed9a30cf5229", "panels/ha-panel-history.html": "35177e2046c9a4191c8f51f8160255ce", "panels/ha-panel-iframe.html": "d920f0aa3c903680f2f8795e2255daab", "panels/ha-panel-logbook.html": "7c45bd41c146ec38b9938b8a5188bb0d", "panels/ha-panel-map.html": "0ba605729197c4724ecc7310b08f7050", "panels/ha-panel-zwave.html": "2ea2223339d1d2faff478751c2927d11", "websocket_test.html": "575de64b431fe11c3785bf96d7813450" }
py
b4115f36c68c7bb50775b708392fe35c43fe7c1f
""" Copyright 2017 Fair Isaac Corp. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import cvxpy.settings as s from collections import namedtuple from cvxpy.problems.problem import Problem from cvxpy.utilities.deterministic import unique_list # Used in self._cached_data to check if problem's objective or constraints have # changed. CachedProblem = namedtuple('CachedProblem', ['objective', 'constraints']) # Used by pool.map to send solve result back. SolveResult = namedtuple( 'SolveResult', ['opt_value', 'status', 'primal_values', 'dual_values']) class XpressProblem (Problem): """A convex optimization problem associated with the Xpress Optimizer Attributes ---------- objective : Minimize or Maximize The expression to minimize or maximize. constraints : list The constraints on the problem variables. """ # The solve methods available. REGISTERED_SOLVE_METHODS = {} def __init__(self, objective, constraints=None): super(XpressProblem, self).__init__(objective, constraints) self._iis = None def _reset_iis(self): """Clears the iis information """ self._iis = None self._transferRow = None def _update_problem_state(self, results_dict, sym_data, solver): """Updates the problem state given the solver results. Updates problem.status, problem.value and value of primal and dual variables. Parameters ---------- results_dict : dict A dictionary containing the solver results. sym_data : SymData The symbolic data for the problem. solver : Solver The solver type used to obtain the results. """ super(XpressProblem, self)._update_problem_state(results_dict, sym_data, solver) self._iis = results_dict[s.XPRESS_IIS] self._transferRow = results_dict[s.XPRESS_TROW] def __repr__(self): return "XpressProblem(%s, %s)" % (repr(self.objective), repr(self.constraints)) def __neg__(self): return XpressProblem(-self.objective, self.constraints) def __add__(self, other): if other == 0: return self elif not isinstance(other, XpressProblem): return NotImplemented return XpressProblem(self.objective + other.objective, unique_list(self.constraints + other.constraints)) def __sub__(self, other): if not isinstance(other, XpressProblem): return NotImplemented return XpressProblem(self.objective - other.objective, unique_list(self.constraints + other.constraints)) def __mul__(self, other): if not isinstance(other, (int, float)): return NotImplemented return XpressProblem(self.objective * other, self.constraints) def __div__(self, other): if not isinstance(other, (int, float)): return NotImplemented return XpressProblem(self.objective * (1.0 / other), self.constraints)
py
b4115f714a9a712cf9b6ff43564a61253f302134
import os import bpy import bpy_extras from ... import ops, plugin, plugin_prefs, registry, utils from ...version_utils import assign_props, IS_28 from .. import imp from . import utils as imp_utils, props op_import_object_props = { 'filter_glob': bpy.props.StringProperty( default='*.object', options={'HIDDEN'} ), 'directory': bpy.props.StringProperty(subtype="DIR_PATH"), 'files': bpy.props.CollectionProperty( type=bpy.types.OperatorFileListElement ), 'import_motions': props.PropObjectMotionsImport(), 'mesh_split_by_materials': props.PropObjectMeshSplitByMaterials(), 'use_motion_prefix_name': props.PropObjectUseMotionPrefixName(), 'shaped_bones': props.PropObjectBonesCustomShapes(), 'fmt_version': plugin_prefs.PropSDKVersion() } @registry.module_thing class OpImportObject(ops.BaseOperator, bpy_extras.io_utils.ImportHelper): bl_idname = 'xray_import.object' bl_label = 'Import .object' bl_description = 'Imports X-Ray object' bl_options = {'UNDO', 'PRESET'} if not IS_28: for prop_name, prop_value in op_import_object_props.items(): exec('{0} = op_import_object_props.get("{0}")'.format(prop_name)) @utils.execute_with_logger @utils.set_cursor_state def execute(self, _context): textures_folder = plugin_prefs.get_preferences().textures_folder_auto objects_folder = plugin_prefs.get_preferences().objects_folder_auto if not textures_folder: self.report({'WARNING'}, 'No textures folder specified') if not self.files or (len(self.files) == 1 and not self.files[0].name): self.report({'ERROR'}, 'No files selected') return {'CANCELLED'} import_context = imp_utils.ImportObjectContext() import_context.textures_folder=textures_folder import_context.soc_sgroups=self.fmt_version == 'soc' import_context.import_motions=self.import_motions import_context.split_by_materials=self.mesh_split_by_materials import_context.operator=self import_context.use_motion_prefix_name=self.use_motion_prefix_name import_context.objects_folder=objects_folder for file in self.files: ext = os.path.splitext(file.name)[-1].lower() if ext == '.object': import_context.before_import_file() imp.import_file( os.path.join(self.directory, file.name), import_context ) else: self.report( {'ERROR'}, 'Format of "{}" not recognised'.format(file.name) ) return {'FINISHED'} def draw(self, _context): layout = self.layout row = layout.row() row.enabled = False row.label(text='%d items' % len(self.files)) row = layout.split() row.label(text='Format Version:') row.row().prop(self, 'fmt_version', expand=True) layout.prop(self, 'import_motions') row = layout.row() row.active = self.import_motions row.prop(self, 'use_motion_prefix_name') layout.prop(self, 'mesh_split_by_materials') layout.prop(self, 'shaped_bones') def invoke(self, context, event): prefs = plugin_prefs.get_preferences() self.fmt_version = prefs.sdk_version self.import_motions = prefs.object_motions_import self.mesh_split_by_materials = prefs.object_mesh_split_by_mat self.shaped_bones = prefs.object_bones_custom_shapes self.use_motion_prefix_name = prefs.use_motion_prefix_name return super().invoke(context, event) assign_props([ (op_import_object_props, OpImportObject), ]) def menu_func_import(self, _context): icon = plugin.get_stalker_icon() self.layout.operator( OpImportObject.bl_idname, text='X-Ray object (.object)', icon_value=icon )
py
b4116052d01404f2da9c532b0760e55e6f60f36a
from PIL import Image import numpy as np import doctest def convert_image_to_mosaic(image,size,gradation_step): """ Convert image to mosaic param image: needed image param size: block size mosaic param gradation_step: gradation of gray return image >>> convert_image_to_mosaic((np.ones((3, 3, 3)) * 200), 2, 15) array([[[195., 195., 195.], [195., 195., 195.], [ 90., 90., 90.]], <BLANKLINE> [[195., 195., 195.], [195., 195., 195.], [ 90., 90., 90.]], <BLANKLINE> [[ 90., 90., 90.], [ 90., 90., 90.], [ 45., 45., 45.]]]) """ for x in range(0, len(image), size) : for y in range(0, len(image[0]), size): image[x:x + size, y:y + size] = get_average_brightness( image[x:x + size, y:y + size], size, gradation_step ) return image def get_average_brightness(block, size, gradation_step): """ Get average brightness of image param block: mosaic block size param size: size param gradation_step: gradation of gray return int >>> get_average_brightness(np.ones((3, 3, 3)) * 200, 2, 15) 195 >>> get_average_brightness(np.ones((3, 3, 3)) * 100, 2, 15) 90 >>> get_average_brightness(np.ones((3, 3, 3)) * 100, 6, 6) 24 >>> get_average_brightness(np.ones((10, 10, 3)) * 100, 6, 6) 96 """ average_color = (block[:size, :size].sum() / 3) // size ** 2 res = int(average_color // gradation_step) * gradation_step return res def main () : image_file = Image.open(input("Введите имя файла, которое хотите конвертировать: ")) block_size = int(input("Введите размер блока: ")) gradations_count = int(input("Введите количество градаций серого: ")) image = np.array(image_file) gradation_step = 255 // gradations_count res = Image.fromarray(convert_image_to_mosaic(image, block_size, gradation_step)) res.save(input("Введите имя файла, в которой хотите сохранить результат: ")) if __name__ == '__main__' : main()
py
b41161c7f653243a2c8af3610769a482c26e269e
import numpy as np import sys from six import StringIO, b from environments.mujoco.rand_param_envs.gym import utils from environments.mujoco.rand_param_envs.gym.envs.toy_text import discrete LEFT = 0 DOWN = 1 RIGHT = 2 UP = 3 MAPS = { "4x4": ["SFFF", "FHFH", "FFFH", "HFFG"], "8x8": [ "SFFFFFFF", "FFFFFFFF", "FFFHFFFF", "FFFFFHFF", "FFFHFFFF", "FHHFFFHF", "FHFFHFHF", "FFFHFFFG", ], } class FrozenLakeEnv(discrete.DiscreteEnv): """ Winter is here. You and your friends were tossing around a frisbee at the park when you made a wild throw that left the frisbee out in the middle of the lake. The water is mostly frozen, but there are a few holes where the ice has melted. If you step into one of those holes, you'll fall into the freezing water. At this time, there's an international frisbee shortage, so it's absolutely imperative that you navigate across the lake and retrieve the disc. However, the ice is slippery, so you won't always move in the direction you intend. The surface is described using a grid like the following SFFF FHFH FFFH HFFG S : starting point, safe F : frozen surface, safe H : hole, fall to your doom G : goal, where the frisbee is located The episode ends when you reach the goal or fall in a hole. You receive a reward of 1 if you reach the goal, and zero otherwise. """ metadata = {"render.modes": ["human", "ansi"]} def __init__(self, desc=None, map_name="4x4", is_slippery=True): if desc is None and map_name is None: raise ValueError("Must provide either desc or map_name") elif desc is None: desc = MAPS[map_name] self.desc = desc = np.asarray(desc, dtype="c") self.nrow, self.ncol = nrow, ncol = desc.shape nA = 4 nS = nrow * ncol isd = np.array(desc == b"S").astype("float64").ravel() isd /= isd.sum() P = {s: {a: [] for a in range(nA)} for s in range(nS)} def to_s(row, col): return row * ncol + col def inc(row, col, a): if a == 0: # left col = max(col - 1, 0) elif a == 1: # down row = min(row + 1, nrow - 1) elif a == 2: # right col = min(col + 1, ncol - 1) elif a == 3: # up row = max(row - 1, 0) return (row, col) for row in range(nrow): for col in range(ncol): s = to_s(row, col) for a in range(4): li = P[s][a] letter = desc[row, col] if letter in b"GH": li.append((1.0, s, 0, True)) else: if is_slippery: for b in [(a - 1) % 4, a, (a + 1) % 4]: newrow, newcol = inc(row, col, b) newstate = to_s(newrow, newcol) newletter = desc[newrow, newcol] done = bytes(newletter) in b"GH" rew = float(newletter == b"G") li.append((1.0 / 3.0, newstate, rew, done)) else: newrow, newcol = inc(row, col, a) newstate = to_s(newrow, newcol) newletter = desc[newrow, newcol] done = bytes(newletter) in b"GH" rew = float(newletter == b"G") li.append((1.0, newstate, rew, done)) super(FrozenLakeEnv, self).__init__(nS, nA, P, isd) def _render(self, mode="human", close=False): if close: return outfile = StringIO() if mode == "ansi" else sys.stdout row, col = self.s // self.ncol, self.s % self.ncol desc = self.desc.tolist() desc = [[c.decode("utf-8") for c in line] for line in desc] desc[row][col] = utils.colorize(desc[row][col], "red", highlight=True) if self.lastaction is not None: outfile.write( " ({})\n".format(["Left", "Down", "Right", "Up"][self.lastaction]) ) else: outfile.write("\n") outfile.write("\n".join("".join(line) for line in desc) + "\n") return outfile
py
b4116252469169d342bae114d7e797a595f78cfa
from .leftRightTextCompleter import *
py
b4116389c1cde78af77240f72c6c519fe36d6aeb
from django.conf.urls.defaults import * from controller.models import Experiment info_dict = { 'queryset' : Experiment.objects.all().order_by("queued") } urlpatterns = patterns('', #controller.views', # Example: #(r'^/index', 'controller.views.index'), (r'^experiment/create$', 'controller.views.experiment_create'), (r'^experiment/(?P<object_id>\d+)/view$', 'django.views.generic.list_detail.object_detail',info_dict ), (r'^experiment/(?P<object_id>\d+)/plot$', 'controller.views.experiment_plot'), (r'^experiment/(?P<object_id>\d+)/csv$', 'controller.views.experiment_csv'), (r'^experiment$', 'django.views.generic.list_detail.object_list', info_dict), (r'^experiment/compare/', 'controller.views.compare'), (r'^register/', 'controller.views.user_create'), (r'^$', 'controller.views.index'), )
py
b41163c1d6b3cebfddd606cb5818a6b946781329
""" Benchmark models. """ from sqlalchemy import Column, Integer, MetaData, String, create_engine from sqlalchemy.orm import declarative_base, sessionmaker from sqlalchemy.pool import QueuePool def get_engine(settings): return create_engine( settings["sqlalchemy.url"], poolclass=QueuePool, pool_size=100, max_overflow=25, enable_from_linting=False, future=True, ) def get_session_factory(engine): Session = sessionmaker(bind=engine, autoflush=False, future=True) return Session metadata = MetaData() Base = declarative_base() class World(Base): __tablename__ = "world" id = Column("id", Integer, primary_key=True) randomNumber = Column("randomnumber", Integer, nullable=False, server_default="0") def __json__(self, request=None): return {"id": self.id, "randomNumber": self.randomNumber} class Fortune(Base): __tablename__ = "fortune" id = Column("id", Integer, primary_key=True) message = Column("message", String, nullable=False) def __json__(self): return {"id": self.id, "message": self.message}
py
b4116425468dd4a655147cc0fd5c6dcf1b7b43fd
# Copyright 2010 OpenStack Foundation # Copyright 2012 University Of Minho # Copyright 2014-2015 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from oslo_utils import encodeutils from nova import context from nova import exception from nova import test from nova.tests.unit.virt.libvirt import fakelibvirt from nova import utils from nova.virt.libvirt import config as vconfig from nova.virt.libvirt import guest as libvirt_guest from nova.virt.libvirt import host host.libvirt = fakelibvirt libvirt_guest.libvirt = fakelibvirt CONF = cfg.CONF class GuestTestCase(test.NoDBTestCase): def setUp(self): super(GuestTestCase, self).setUp() self.useFixture(fakelibvirt.FakeLibvirtFixture()) self.host = host.Host("qemu:///system") self.context = context.get_admin_context() self.domain = mock.Mock(spec=fakelibvirt.virDomain) self.guest = libvirt_guest.Guest(self.domain) def test_repr(self): self.domain.ID.return_value = 99 self.domain.UUIDString.return_value = "UUID" self.domain.name.return_value = "foo" self.assertEqual("<Guest 99 foo UUID>", repr(self.guest)) @mock.patch.object(fakelibvirt.Connection, 'defineXML') def test_create(self, mock_define): libvirt_guest.Guest.create("xml", self.host) mock_define.assert_called_once_with("xml") @mock.patch.object(fakelibvirt.Connection, 'defineXML') def test_create_exception(self, mock_define): mock_define.side_effect = test.TestingException self.assertRaises(test.TestingException, libvirt_guest.Guest.create, "foo", self.host) def test_launch(self): self.guest.launch() self.domain.createWithFlags.assert_called_once_with(0) def test_launch_and_pause(self): self.guest.launch(pause=True) self.domain.createWithFlags.assert_called_once_with( fakelibvirt.VIR_DOMAIN_START_PAUSED) @mock.patch.object(encodeutils, 'safe_decode') def test_launch_exception(self, mock_safe_decode): self.domain.createWithFlags.side_effect = test.TestingException mock_safe_decode.return_value = "</xml>" self.assertRaises(test.TestingException, self.guest.launch) self.assertEqual(1, mock_safe_decode.called) @mock.patch.object(utils, 'execute') @mock.patch.object(libvirt_guest.Guest, 'get_interfaces') def test_enable_hairpin(self, mock_get_interfaces, mock_execute): mock_get_interfaces.return_value = ["vnet0", "vnet1"] self.guest.enable_hairpin() mock_execute.assert_has_calls([ mock.call( 'tee', '/sys/class/net/vnet0/brport/hairpin_mode', run_as_root=True, process_input='1', check_exit_code=[0, 1]), mock.call( 'tee', '/sys/class/net/vnet1/brport/hairpin_mode', run_as_root=True, process_input='1', check_exit_code=[0, 1])]) @mock.patch.object(encodeutils, 'safe_decode') @mock.patch.object(utils, 'execute') @mock.patch.object(libvirt_guest.Guest, 'get_interfaces') def test_enable_hairpin_exception(self, mock_get_interfaces, mock_execute, mock_safe_decode): mock_get_interfaces.return_value = ["foo"] mock_execute.side_effect = test.TestingException('oops') self.assertRaises(test.TestingException, self.guest.enable_hairpin) self.assertEqual(1, mock_safe_decode.called) def test_get_interfaces(self): self.domain.XMLDesc.return_value = """<domain> <devices> <interface type="network"> <target dev="vnet0"/> </interface> <interface type="network"> <target dev="vnet1"/> </interface> </devices> </domain>""" self.assertEqual(["vnet0", "vnet1"], self.guest.get_interfaces()) def test_get_interfaces_exception(self): self.domain.XMLDesc.return_value = "<bad xml>" self.assertEqual([], self.guest.get_interfaces()) def test_poweroff(self): self.guest.poweroff() self.domain.destroy.assert_called_once_with() def test_resume(self): self.guest.resume() self.domain.resume.assert_called_once_with() def test_get_vcpus_info(self): self.domain.vcpus.return_value = ([(0, 1, 10290000000L, 2)], [(True, True)]) vcpus = list(self.guest.get_vcpus_info()) self.assertEqual(0, vcpus[0].id) self.assertEqual(2, vcpus[0].cpu) self.assertEqual(1, vcpus[0].state) self.assertEqual(10290000000L, vcpus[0].time) def test_delete_configuration(self): self.guest.delete_configuration() self.domain.undefineFlags.assert_called_once_with( fakelibvirt.VIR_DOMAIN_UNDEFINE_MANAGED_SAVE) def test_delete_configuration_exception(self): self.domain.undefineFlags.side_effect = fakelibvirt.libvirtError( 'oops') self.domain.ID.return_value = 1 self.guest.delete_configuration() self.domain.undefine.assert_called_once_with() def test_attach_device(self): conf = mock.Mock(spec=vconfig.LibvirtConfigGuestDevice) conf.to_xml.return_value = "</xml>" self.guest.attach_device(conf) self.domain.attachDeviceFlags.assert_called_once_with( "</xml>", flags=0) def test_attach_device_persistent(self): conf = mock.Mock(spec=vconfig.LibvirtConfigGuestDevice) conf.to_xml.return_value = "</xml>" self.guest.attach_device(conf, persistent=True) self.domain.attachDeviceFlags.assert_called_once_with( "</xml>", flags=fakelibvirt.VIR_DOMAIN_AFFECT_CONFIG) def test_attach_device_live(self): conf = mock.Mock(spec=vconfig.LibvirtConfigGuestDevice) conf.to_xml.return_value = "</xml>" self.guest.attach_device(conf, live=True) self.domain.attachDeviceFlags.assert_called_once_with( "</xml>", flags=fakelibvirt.VIR_DOMAIN_AFFECT_LIVE) def test_attach_device_persistent_live(self): conf = mock.Mock(spec=vconfig.LibvirtConfigGuestDevice) conf.to_xml.return_value = "</xml>" self.guest.attach_device(conf, persistent=True, live=True) self.domain.attachDeviceFlags.assert_called_once_with( "</xml>", flags=(fakelibvirt.VIR_DOMAIN_AFFECT_CONFIG | fakelibvirt.VIR_DOMAIN_AFFECT_LIVE)) def test_detach_device(self): conf = mock.Mock(spec=vconfig.LibvirtConfigGuestDevice) conf.to_xml.return_value = "</xml>" self.guest.detach_device(conf) self.domain.detachDeviceFlags.assert_called_once_with( "</xml>", flags=0) def test_detach_device_persistent(self): conf = mock.Mock(spec=vconfig.LibvirtConfigGuestDevice) conf.to_xml.return_value = "</xml>" self.guest.detach_device(conf, persistent=True) self.domain.detachDeviceFlags.assert_called_once_with( "</xml>", flags=fakelibvirt.VIR_DOMAIN_AFFECT_CONFIG) def test_detach_device_live(self): conf = mock.Mock(spec=vconfig.LibvirtConfigGuestDevice) conf.to_xml.return_value = "</xml>" self.guest.detach_device(conf, live=True) self.domain.detachDeviceFlags.assert_called_once_with( "</xml>", flags=fakelibvirt.VIR_DOMAIN_AFFECT_LIVE) def test_detach_device_persistent_live(self): conf = mock.Mock(spec=vconfig.LibvirtConfigGuestDevice) conf.to_xml.return_value = "</xml>" self.guest.detach_device(conf, persistent=True, live=True) self.domain.detachDeviceFlags.assert_called_once_with( "</xml>", flags=(fakelibvirt.VIR_DOMAIN_AFFECT_CONFIG | fakelibvirt.VIR_DOMAIN_AFFECT_LIVE)) def test_get_xml_desc(self): self.guest.get_xml_desc() self.domain.XMLDesc.assert_called_once_with(flags=0) def test_get_xml_desc_dump_inactive(self): self.guest.get_xml_desc(dump_inactive=True) self.domain.XMLDesc.assert_called_once_with( flags=fakelibvirt.VIR_DOMAIN_XML_INACTIVE) def test_get_xml_desc_dump_sensitive(self): self.guest.get_xml_desc(dump_sensitive=True) self.domain.XMLDesc.assert_called_once_with( flags=fakelibvirt.VIR_DOMAIN_XML_SECURE) def test_get_xml_desc_dump_inactive_dump_sensitive(self): self.guest.get_xml_desc(dump_inactive=True, dump_sensitive=True) self.domain.XMLDesc.assert_called_once_with( flags=(fakelibvirt.VIR_DOMAIN_XML_INACTIVE | fakelibvirt.VIR_DOMAIN_XML_SECURE)) def test_get_xml_desc_dump_migratable(self): self.guest.get_xml_desc(dump_migratable=True) self.domain.XMLDesc.assert_called_once_with( flags=fakelibvirt.VIR_DOMAIN_XML_MIGRATABLE) def test_has_persistent_configuration(self): self.assertTrue( self.guest.has_persistent_configuration()) self.domain.isPersistent.assert_called_once_with() def test_save_memory_state(self): self.guest.save_memory_state() self.domain.managedSave.assert_called_once_with(0) def test_get_block_device(self): disk = 'vda' gblock = self.guest.get_block_device(disk) self.assertEqual(disk, gblock._disk) self.assertEqual(self.guest, gblock._guest) class GuestBlockTestCase(test.NoDBTestCase): def setUp(self): super(GuestBlockTestCase, self).setUp() self.useFixture(fakelibvirt.FakeLibvirtFixture()) self.host = host.Host("qemu:///system") self.context = context.get_admin_context() self.domain = mock.Mock(spec=fakelibvirt.virDomain) self.guest = libvirt_guest.Guest(self.domain) self.gblock = self.guest.get_block_device('vda') def test_abort_job(self): self.gblock.abort_job() self.domain.blockJobAbort.assert_called_once_with('vda', flags=0) def test_abort_job_async(self): self.gblock.abort_job(async=True) self.domain.blockJobAbort.assert_called_once_with( 'vda', flags=fakelibvirt.VIR_DOMAIN_BLOCK_JOB_ABORT_ASYNC) def test_abort_job_pivot(self): self.gblock.abort_job(pivot=True) self.domain.blockJobAbort.assert_called_once_with( 'vda', flags=fakelibvirt.VIR_DOMAIN_BLOCK_JOB_ABORT_PIVOT) def test_get_job_info(self): self.domain.blockJobInfo.return_value = { "type": 1, "bandwidth": 18, "cur": 66, "end": 100} info = self.gblock.get_job_info() self.assertEqual(1, info.job) self.assertEqual(18, info.bandwidth) self.assertEqual(66, info.cur) self.assertEqual(100, info.end) self.domain.blockJobInfo.assert_called_once_with('vda', flags=0) def test_resize(self): self.gblock.resize(10) self.domain.blockResize.assert_called_once_with('vda', 10) def test_rebase(self): self.gblock.rebase("foo") self.domain.blockRebase.assert_called_once_with( 'vda', "foo", 0, flags=0) def test_rebase_shallow(self): self.gblock.rebase("foo", shallow=True) self.domain.blockRebase.assert_called_once_with( 'vda', "foo", 0, flags=fakelibvirt.VIR_DOMAIN_BLOCK_REBASE_SHALLOW) def test_rebase_reuse_ext(self): self.gblock.rebase("foo", reuse_ext=True) self.domain.blockRebase.assert_called_once_with( 'vda', "foo", 0, flags=fakelibvirt.VIR_DOMAIN_BLOCK_REBASE_REUSE_EXT) def test_rebase_copy(self): self.gblock.rebase("foo", copy=True) self.domain.blockRebase.assert_called_once_with( 'vda', "foo", 0, flags=fakelibvirt.VIR_DOMAIN_BLOCK_REBASE_COPY) def test_rebase_relative(self): self.gblock.rebase("foo", relative=True) self.domain.blockRebase.assert_called_once_with( 'vda', "foo", 0, flags=fakelibvirt.VIR_DOMAIN_BLOCK_REBASE_RELATIVE) def test_commit(self): self.gblock.commit("foo", "top") self.domain.blockCommit.assert_called_once_with( 'vda', "foo", "top", 0, flags=0) def test_commit_relative(self): self.gblock.commit("foo", "top", relative=True) self.domain.blockCommit.assert_called_once_with( 'vda', "foo", "top", 0, flags=fakelibvirt.VIR_DOMAIN_BLOCK_COMMIT_RELATIVE) def test_wait_for_job(self): self.domain.blockJobInfo.return_value = { "type": 4, "bandwidth": 18, "cur": 95, "end": 100} in_progress = self.gblock.wait_for_job() self.assertTrue(in_progress) self.domain.blockJobInfo.return_value = { "type": 4, "bandwidth": 18, "cur": 100, "end": 100} in_progress = self.gblock.wait_for_job() self.assertFalse(in_progress) self.domain.blockJobInfo.return_value = {"type": 0} in_progress = self.gblock.wait_for_job(wait_for_job_clean=True) self.assertFalse(in_progress) def test_wait_for_job_arbort_on_error(self): self.domain.blockJobInfo.return_value = -1 self.assertRaises( exception.NovaException, self.gblock.wait_for_job, abort_on_error=True)
py
b411647e1b423968b55a2228be9a50fed118fb6a
""" CLI to show events of a scenario. """ import datetime import sys import click from psutil._common import bytes2human from jade.common import OUTPUT_DIR from jade.loggers import setup_logging from jade.events import EventsSummary from jade.resource_monitor import CpuStatsViewer, DiskStatsViewer, \ MemoryStatsViewer, NetworkStatsViewer STATS = ( "cpu", "disk", "mem", "net" ) @click.group() def stats(): """View stats from a run.""" setup_logging("stats", None) @click.argument("stats", nargs=-1) @click.option( "-o", "--output", default=OUTPUT_DIR, show_default=True, help="Output directory." ) @click.command() def show(stats, output): """Shows stats from a run. \b Examples: jade stats jade stats cpu jade stats disk jade stats mem jade stats net jade stats cpu disk mem """ events = EventsSummary(output) if not stats: stats = STATS for stat in stats: if stat == "cpu": viewer = CpuStatsViewer(events) elif stat == "disk": viewer = DiskStatsViewer(events) elif stat == "mem": viewer = MemoryStatsViewer(events) elif stat == "net": viewer = NetworkStatsViewer(events) else: print(f"Invalid stat={stat}") sys.exit(1) viewer.show_stats() @click.option( "--human-readable/--no-human-readable", is_flag=True, default=True, show_default=True, help="Output directory." ) @click.option( "-o", "--output", default=OUTPUT_DIR, show_default=True, help="Output directory." ) @click.command() def bytes_consumed(output, human_readable): events = EventsSummary(output) consumed = events.get_bytes_consumed() if human_readable: print(bytes2human(consumed)) else: print(consumed) @click.option( "--human-readable/--no-human-readable", is_flag=True, default=True, show_default=True, help="Output directory." ) @click.option( "-o", "--output", default=OUTPUT_DIR, show_default=True, help="Output directory." ) @click.command() def exec_time(output, human_readable): events = EventsSummary(output) config_exec_time = events.get_config_exec_time() if human_readable: print(datetime.timedelta(seconds=config_exec_time)) else: print(config_exec_time) stats.add_command(bytes_consumed) stats.add_command(exec_time) stats.add_command(show)
py
b411648411ba6f3bcecac1bae3c6e7a409f6cb96
from storages.backends.s3boto3 import S3Boto3Storage class MediaStorage(S3Boto3Storage): location = 'media' file_overwrite = False
py
b41165f4f2094174e7027e938a5703be3503e520
# -*- coding: utf-8 -*- """ Created on Wed Sep 22 09:14:58 2021 @author: Mahsa """ import network import analyze_result ### run sensitivity analysis for a parameter def run_sensitivity_statusQuo(parameter1, percentage_change1, parameter2=None, percentage_change2=None): if parameter2==None: model_data=network.model_inputs() new_data1=network.model_inputs() new_data1.modify_parameter(parameter1, percentage_change1) original_result=analyze_result.calculate_net_present_value_statusQuo(model_data) new_result=analyze_result.calculate_net_present_value_statusQuo(new_data1) else: #data=Network.model_inputs() model_data=network.model_inputs() new_data1=network.model_inputs() new_data1=new_data1.modify_parameter(parameter1, percentage_change1) new_data1=new_data1.modify_parameter(parameter2, percentage_change2) original_result=analyze_result.calculate_net_present_value_statusQuo(model_data) new_result=analyze_result.calculate_net_present_value_statusQuo(new_data1) Percentage_change_result=(new_result[-1]-original_result[-1])/original_result[-1]*100 print(Percentage_change_result) return (Percentage_change_result) def run_sensitivity_under(parameter1, percentage_change1, parameter2=None, percentage_change2=None): if parameter2==None: model_data=network.model_inputs() data=model_data new_data1=network.model_inputs() new_data1.modify_parameter(parameter1, percentage_change1) original_result=analyze_result.calculate_net_present_value_under_after_lifespan(data) new_result=analyze_result.calculate_net_present_value_under_after_lifespan(new_data1) else: #data=Network.model_inputs() model_data=network.model_inputs() data=model_data new_data1=network.model_inputs() new_data1=new_data1.modify_parameter(parameter1, percentage_change1) new_data1=new_data1.modify_parameter(parameter2, percentage_change2) original_result=analyze_result.calculate_net_present_value_under_after_lifespan(data) new_result=analyze_result.calculate_net_present_value_under_after_lifespan(new_data1) Percentage_change_result=(new_result[-1]-original_result[-1])/original_result[-1]*100 return (Percentage_change_result)
py
b41166246355ad346f847c61ca011a52bac058a5
#!/usr/bin/env python import os import sys import logging import subprocess logging.basicConfig() l = logging.getLogger("lint") #l.setLevel(logging.DEBUG) def lint_file(filename): l.debug("Linting file %s", filename) try: cmd = [ "pylint", "--rcfile=%s" % pylint_rc, os.path.abspath(filename) ] pylint_out = subprocess.check_output(cmd).decode() except subprocess.CalledProcessError as e: if e.returncode == 32: print("LINT FAILURE: pylint failed to run on %s" % filename) pylint_out = "-1337/10" else: pylint_out = e.output.decode() if "\n0 statements analysed." in pylint_out: return [ ], 10.00 if "Report" not in pylint_out: return [ "LINT FAILURE: syntax error in file?" ], 0 out_lines = pylint_out.split('\n') errors = out_lines[1:out_lines.index('Report')-2] score = float(out_lines[-3].split("/")[0].split(" ")[-1]) l.info("File %s has score %.2f", filename, score) return errors, score def lint_files(tolint): return { f: lint_file(f) for f in tolint if os.path.isfile(f) } def compare_lint(): repo_dir = subprocess.check_output("git rev-parse --show-toplevel".split()).decode().strip() repo_name = os.path.basename(repo_dir) os.chdir(repo_dir) cur_branch = subprocess.check_output("git rev-parse --abbrev-ref HEAD".split()).decode().strip() if cur_branch == "master": print("### Aborting linting for %s because it is on master." % repo_name) return True # get the files to lint changed_files = [ o.split()[-1] for o in subprocess.check_output("git diff --name-status origin/master".split()).decode().split("\n")[:-1] ] tolint = [ f for f in changed_files if f.endswith(".py") ] print("Changed files: %s" % (tolint,)) if len(tolint) > 50: print("") print("...You know what, I trust you") return True new_results = lint_files(tolint) subprocess.check_call("git checkout origin/master".split()) try: old_results = lint_files(tolint) finally: subprocess.check_call("git checkout @{-1}".split()) print("") print("###") print("### LINT REPORT FOR %s" % repo_name) print("###") print("") regressions = [ ] for v in new_results: new_errors, new_score = new_results[v] if v not in old_results: if new_score != 10.00: print("LINT FAILURE: new file %s lints at %.2f/10.00. Errors:" % (v, new_score)) print("... " + "\n... ".join(new_errors)) regressions.append((v, None, new_score)) else: print("LINT SUCCESS: new file %s is a perfect 10.00!" % v) else: _, old_score = old_results[v] if new_score < old_score: print("LINT FAILURE: %s regressed to %.2f/%.2f" % (v, new_score, old_score)) print("... " + "\n... ".join(new_errors)) regressions.append((v, old_score, new_score)) elif new_score > old_score: print("LINT SUCCESS: %s has improved to %.2f (from %.2f)! " % (v, new_score, old_score)) else: print("LINT SUCCESS: %s has remained at %.2f " % (v, new_score)) print("") print("###") print("### END LINT REPORT FOR %s" % repo_name) print("###") print("") return len(regressions) == 0 def do_in(directory, function, *args, **kwargs): cur_dir = os.path.abspath(os.getcwd()) try: os.chdir(directory) return function(*args, **kwargs) finally: os.chdir(cur_dir) if __name__ == '__main__': pylint_rc = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../pylintrc') if not os.path.isfile("tests/lint.py"): # lint the cwd sys.exit(0 if compare_lint() else 1) elif len(sys.argv) == 1: # lint all sys.exit(0 if all(do_in(r, compare_lint) for r in [ i for i in os.listdir(".") if os.path.isdir(os.path.join(i, ".git")) ]) else 1) else: # lint several sys.exit(0 if all(do_in(r, compare_lint) for r in sys.argv[1:]) else 1)
py
b41166cc146f5ea2776ce00c6cff0a5e5e2dfe03
import string import numpy from cupy.core._fusion_variable import _TraceVariable from cupy.core._fusion_variable import _TraceArray from cupy.core._fusion_variable import _VariableSet from cupy.core import _fusion_thread_local from cupy.core import _fusion_emit_code from cupy.core import _kernel from cupy.core import _reduction _dtype_to_ctype = _fusion_emit_code._dtype_to_ctype class _UfuncRoutine: """A device function for single elementwise operations. """ def __init__( self, name, ufunc, routine_code, in_params, out_params, compute_dtypes): assert isinstance(name, str) assert isinstance(ufunc, _kernel.ufunc) assert isinstance(routine_code, str) assert isinstance(compute_dtypes, tuple) assert all(isinstance(t, numpy.dtype) for t in compute_dtypes) assert isinstance(in_params, list) assert all(isinstance(p, _TraceVariable) for p in in_params) assert isinstance(out_params, list) assert all(isinstance(p, _TraceArray) for p in out_params) self.name = name self.in_params = in_params self.out_params = out_params self.preamble = ufunc._preamble self.routine_code = routine_code self.compute_dtypes = compute_dtypes def emit_code(self): """Returns a CUDA device function code. Returns a string like: ``` __device__ void cupy_add_0(int &in0_, float &in1_, double &out0_) { typedef double in0_type; typedef double in1_type; typedef double out0_type; double in0 = (double) in0_; double in1 = (double) in1_; double out0 = (double) out0_; out0 = in0 + in1; out0_ = out0; } ``` """ nin = len(self.in_params) dtypes = self.compute_dtypes assert len(self.in_params) == len(self.compute_dtypes[:nin]) in_params = [ (_dtype_to_ctype[p.dtype], _dtype_to_ctype[t], 'in{}'.format(i)) for i, (p, t) in enumerate(zip(self.in_params, dtypes[:nin])) ] out_params = [ (_dtype_to_ctype[p.dtype], _dtype_to_ctype[t], 'out{}'.format(i)) for i, (p, t) in enumerate(zip(self.out_params, dtypes[nin:])) ] params = in_params + out_params params_code = ', '.join(['{} &{}_'.format(t, s) for t, _, s in params]) typedef = ['typedef {} {}_type;'.format(t, s) for _, t, s in params] read = ['{} {} = ({}) {}_;'.format(t, s, t, s) for _, t, s in params] write = ['{}_ = {};'.format(s, s, s) for _, _, s in out_params] return _fusion_emit_code._CodeBlock( '__device__ void {}({})'.format(self.name, params_code), typedef + read + [self.routine_code + ';'] + write) def emit_call_code(self): params = self.in_params + self.out_params return '{op_name}({params});'.format( op_name=self.name, params=', '.join([var.lvar_name for var in params])) class _ElementwiseTraceOp: """Ufunc or elementwise kernel with types. """ def __init__(self, ufunc_routines, in_params, out_params, ashape): # The `in_params` and `out_params` should be already broadcasted to # `ashape`, but they don't guarantee to be exactly same as # `param.ashape`. _fusion_thread_local.check_not_runtime() assert isinstance(ufunc_routines, list) assert all(isinstance(r, _UfuncRoutine) for r in ufunc_routines) assert isinstance(ashape, tuple) self.ops = ufunc_routines self.in_params = _VariableSet(*in_params) self.out_params = _VariableSet(*out_params) self.ashape = ashape @property def params(self): """Returns the set of all variable the loop uses. """ res = _VariableSet() for op in self.ops: res += _VariableSet(*op.in_params) res += _VariableSet(*op.out_params) return res @staticmethod def _emit_declaration(params, in_params): """Returns a tuple of size 2. 1. CUDA code: declaring local variables. 2. The set of arrays which require indexer. """ _fusion_thread_local.check_not_runtime() indexed_arrays = _VariableSet() code = [] for var in params: if var in in_params: if isinstance(var, _TraceArray): indexed_arrays.add(var) f = '${type} ${lvar} = ${var}[${indexer}.get()];' else: f = '${type} ${lvar} = ${var};' else: f = '${type} ${lvar};' code.append(var.format(f)) return code, indexed_arrays @staticmethod def _emit_after_operation(out_params): """Returns a tuple of size 2. 1. CUDA code: writing the results of operations back to global memory. 2. The set of arrays which require indexer. """ _fusion_thread_local.check_not_runtime() indexed_arrays = _VariableSet() codes = [] for var in out_params: if isinstance(var, _TraceArray): indexed_arrays.add(var) f = '${var}[${indexer}.get()] = ${lvar};' else: f = '${var} = ${lvar};' codes.append(var.format(f)) return codes, indexed_arrays @staticmethod def _emit_set_index(indexed_params, tid): """Returns a CUDA code: setting a raw index to indexers. """ _fusion_thread_local.check_not_runtime() assert isinstance(indexed_params, _VariableSet) return [ p.format('${indexer}.set(${tid});', tid=tid) for p in indexed_params ] def emit_code(self): _fusion_thread_local.check_not_runtime() declaration, s1 = self._emit_declaration(self.params, self.in_params) operation = [op.emit_call_code() for op in self.ops] after_operation, s2 = self._emit_after_operation(self.out_params) index_name = 'i' indexed_array = s1 + s2 indexer_name = next(iter(indexed_array)).indexer_name indexer_setup = self._emit_set_index(indexed_array, index_name) return _fusion_emit_code._CodeBlock( 'CUPY_FOR({}, {}.size())'.format(index_name, indexer_name), indexer_setup + declaration + operation + after_operation) def emit_preamble_codes(self): return [subm.preamble for subm in self.ops if subm.preamble != ''] def emit_submodule_codes(self): return [str(subm.emit_code()) for subm in self.ops] class _ReductionTraceOp: def __init__(self, name, reduce_func, expr, in_param, out_param, axis): """Reduction operation. """ _fusion_thread_local.check_not_runtime() assert isinstance(name, str) assert isinstance(reduce_func, _reduction._SimpleReductionKernel) assert isinstance(in_param, _TraceArray) assert isinstance(out_param, _TraceArray) assert isinstance(axis, tuple) assert all([0 <= x < in_param.ndim for x in axis]) self.name = name self.preamble = reduce_func.preamble self.in_params = _VariableSet(in_param) self.out_params = _VariableSet(out_param) self.block_stride_name = 'block_stride_' + name self.axis = axis if reduce_func.identity is None: self.identity = '' else: self.identity = str(reduce_func.identity) _, self.expr, self.postmap_cast_code, self.reduce_ctype = expr if self.reduce_ctype is None: out_param, = self.out_params self.reduce_ctype = _dtype_to_ctype[out_param.dtype] self.premap_op = None self.postmap_op = None @property def params(self): return self.in_params + self.out_params def emit_code(self): _fusion_thread_local.check_not_runtime() assert len(self.in_params) == 1 assert len(self.out_params) == 1 in_param = list(self.in_params)[0] out_param = list(self.out_params)[0] params = ', '.join([ in_param.var_name, out_param.var_name, in_param.indexer_name, out_param.indexer_name, ]) return '{}({}, {});'.format( self.name, params, self.block_stride_name) def emit_preamble_codes(self): preamble = self.preamble return [preamble] if preamble != '' else [] def emit_submodule_codes(self): """Returns a CUDA device function code. The emitted code assumes that ``block_stride`` and `blockDim.x` is a power of 2. """ in_param, = self.in_params out_param, = self.out_params op_name = '{}_op'.format(self.name) postmap_name = '{}_postmap'.format(self.name) code = string.Template(''' #define ${op_name}(a, b) (${reduce_expr}) #define ${postmap_name}(a, out0) (${postmap_cast}) template <typename InType, typename OutType, typename InIndexerType, typename OutIndexerType> __device__ void ${name}( InType in_arr, OutType out_arr, InIndexerType in_ind, OutIndexerType out_ind, int block_stride) { typedef ${in_type} type_in0_raw; typedef ${out_type} type_out0_raw; typedef ${reduce_ctype} _type_reduce; extern __shared__ char _sdata_raw[]; _type_reduce *sdata = reinterpret_cast<_type_reduce*>(_sdata_raw); unsigned int tid = threadIdx.x; int _J = tid >> __popc(block_stride - 1); ptrdiff_t _j = (ptrdiff_t)_J * out_ind.size(); int J_stride = blockDim.x >> __popc(block_stride - 1); ptrdiff_t j_stride = (ptrdiff_t)J_stride * out_ind.size(); for (ptrdiff_t _i = (ptrdiff_t)blockIdx.x * block_stride; _i < out_ind.size(); _i += (ptrdiff_t)gridDim.x * block_stride) { _type_reduce s = _type_reduce(${identity}); ptrdiff_t i = _i + (tid & (block_stride - 1)); for (ptrdiff_t j = i + _j; j < in_ind.size(); j += j_stride) { in_ind.set(j); s = ${op_name}(s, static_cast<_type_reduce>(in_arr[in_ind.get()])); } sdata[tid] = s; __syncthreads(); for (unsigned int block = blockDim.x / 2; block >= block_stride; block >>= 1) { if (tid < block) { sdata[tid] = ${op_name}(sdata[tid], sdata[tid + block]); } __syncthreads(); } if (tid < block_stride) { s = sdata[tid]; } if (tid < block_stride && i < out_ind.size()) { out_ind.set(i); ${postmap_name}(s, out_arr[out_ind.get()]); } __syncthreads(); } }''' # NOQA ).substitute( name=self.name, op_name=op_name, postmap_name=postmap_name, in_type=_dtype_to_ctype[in_param.dtype], out_type=_dtype_to_ctype[out_param.dtype], reduce_ctype=self.reduce_ctype, reduce_expr=self.expr, identity=self.identity, postmap_cast=self.postmap_cast_code) return [code]
py
b41167146af93abaf9cbf355e09a41b1d160a3b6
import os import textwrap import pytest from conans.test.assets.sources import gen_function_cpp, gen_function_h from conans.test.functional.toolchains.meson._base import TestMesonBase class MesonInstall(TestMesonBase): _conanfile_py = textwrap.dedent(""" import os import shutil from conan import ConanFile from conan.tools.meson import Meson, MesonToolchain class App(ConanFile): settings = "os", "arch", "compiler", "build_type" options = {"shared": [True, False], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} exports_sources = "meson.build", "hello.cpp", "hello.h" def config_options(self): if self.settings.os == "Windows": del self.options.fPIC def layout(self): self.folders.build = "build" def generate(self): tc = MesonToolchain(self) # https://mesonbuild.com/Release-notes-for-0-50-0.html#libdir-defaults-to-lib-when-cross-compiling tc.project_options["libdir"] = "lib" tc.generate() def build(self): meson = Meson(self) meson.configure() meson.build() def package(self): meson = Meson(self) meson.install() # https://mesonbuild.com/FAQ.html#why-does-building-my-project-with-msvc-output-static-libraries-called-libfooa if self.settings.compiler == 'Visual Studio' and not self.options.shared: shutil.move(os.path.join(self.package_folder, "lib", "libhello.a"), os.path.join(self.package_folder, "lib", "hello.lib")) def package_info(self): self.cpp_info.libs = ['hello'] """) _meson_build = textwrap.dedent(""" project('tutorial', 'cpp') library('hello', 'hello.cpp', install: true) install_headers('hello.h') """) _test_package_conanfile_py = textwrap.dedent(""" import os from conan import ConanFile from conan.tools.cmake import CMake from conan.tools.layout import cmake_layout from conans import tools class TestConan(ConanFile): settings = "os", "compiler", "build_type", "arch" generators = "CMakeToolchain", "CMakeDeps" def layout(self): cmake_layout(self) def build(self): cmake = CMake(self) cmake.configure() cmake.build() def test(self): if not tools.cross_building(self): cmd = os.path.join(self.cpp.build.bindirs[0], "test_package") self.run(cmd) """) _test_package_cmake_lists = textwrap.dedent(""" cmake_minimum_required(VERSION 3.1) project(test_package CXX) find_package(hello CONFIG REQUIRED) add_executable(${PROJECT_NAME} test_package.cpp) target_link_libraries(${PROJECT_NAME} hello::hello) """) @pytest.mark.tool_meson def test_install(self): hello_cpp = gen_function_cpp(name="hello") hello_h = gen_function_h(name="hello") test_package_cpp = gen_function_cpp(name="main", includes=["hello"], calls=["hello"]) self.t.save({"conanfile.py": self._conanfile_py, "meson.build": self._meson_build, "hello.cpp": hello_cpp, "hello.h": hello_h, os.path.join("test_package", "conanfile.py"): self._test_package_conanfile_py, os.path.join("test_package", "CMakeLists.txt"): self._test_package_cmake_lists, os.path.join("test_package", "test_package.cpp"): test_package_cpp}) self.t.run("create . hello/0.1@ %s" % self._settings_str) self._check_binary()
py
b411673955c3db6ce5208eb34fd86c21bb126ab6
from sympy import * from math import * lines = [] angles = [] dep_tru = [] dep_res = [] index =[] x = [] y = [] z = [] x_res = [] y_res = [] z_res = [] x_error=[] x_dis=[] y_error=[] y_dis=[] z_error=[] z_dis=[] def onepoint(d_ab, d_ac, dep_b, dep_a, angle, a_x, a_y, c_x, c_y): def distance(x, y): xx = (x[0] - y[0]) ** 2 yy = (x[1] - y[1]) ** 2 zz = (x[2] - y[2]) ** 2 return (sqrt(xx + yy + zz)) # comput the intput values #d_ab = distance(a, b) #d_ac = distance(a, c) #zz = (a[2] - b[2]) / d_ab #theta = asin(zz) #angle = acos((d_ab ** 2 + d_ac ** 2 - distance(b, c) ** 2) / (2 * d_ab * d_ac)) # print(angle) #print(theta) # compute outputs b_a = d_ab * sin(dep_b) d_bc = sqrt(d_ab ** 2 + d_ac ** 2 - 2 * d_ab * d_ac * cos(angle)) c_a = d_ac * sin(dep_a) ab = d_ab ** 2 - (b_a) ** 2 bc = d_bc ** 2 - (c_a + b_a) ** 2 ra = sqrt(ab) rc = sqrt(bc) x = Symbol('x') y = Symbol('y') res = solve([(x - a_x) ** 2 + (y - a_y) ** 2 - ab, (x - c_x) ** 2 + (y - c_y) ** 2 - bc], [x, y]) final_res=[list(res[0]), list(res[1])] return final_res def onepoint2(d_ab, d_ac, dep_b, angle, a_x, a_y, a_z, c_x, c_y, c_z): def distance(x, y): xx = (x[0] - y[0]) ** 2 yy = (x[1] - y[1]) ** 2 zz = (x[2] - y[2]) ** 2 return (sqrt(xx + yy + zz)) # comput the intput values #d_ab = distance(a, b) #d_ac = distance(a, c) #zz = (a[2] - b[2]) / d_ab #theta = asin(zz) #angle = acos((d_ab ** 2 + d_ac ** 2 - distance(b, c) ** 2) / (2 * d_ab * d_ac)) # print(angle) #print(theta) # compute outputs b_a = d_ab * sin(dep_b) d_bc = sqrt(d_ab ** 2 + d_ac ** 2 - 2 * d_ab * d_ac * cos(angle)) c_b = c_z-a_z+b_a ab = abs(d_ab ** 2 - b_a ** 2) bc = abs(d_bc ** 2 - c_b ** 2) #ra = sqrt(ab) #rc = sqrt(bc) x = Symbol('x') y = Symbol('y') res = solve([(x - a_x) ** 2 + (y - a_y) ** 2 - ab, (x - c_x) ** 2 + (y - c_y) ** 2 - bc], [x, y]) final_res=[list(res[0]), list(res[1])] return final_res def read_test(): tmp1=[] with open("data/allMotion05dep.txt", 'r') as f: for line in f.readlines(): for kk in (line.strip().split(" ")): tmp1.append(float(kk)) tmp2=[] with open("data/test_selected05.txt", 'r') as f: for line in f.readlines(): for kk in (line.strip().split(" ")): tmp2.append(float(kk)) tmp_all = [] tmp_part = [] for i in range(int(len(tmp1)/20)): tmp = [] for j in range(20): tmp.append(tmp1[i*20+j]) tmp_all.append(tmp) for i in range(int(len(tmp2)/20)): tmp = [] for j in range(20): tmp.append(tmp2[i*20+j]) tmp_part.append(tmp) dep_tru.append(tmp) index=[] #print(len(tmp_all)) #print(len(tmp_part)) #print(tmp_part) for list in tmp_part: for i in range(int(len(tmp_all))): if tmp_all[i] == list: #print(tmp_list) index.append(i) break #for j in tmp_part: #print(j) #print(len(index)) return index #read_test() def read_xyd(): index = read_test() #read lines, angles and depths tmp1=[] with open("data/linesallMotion10.txt", 'r') as f: for line in f.readlines(): for kk in (line.strip().split(" ")): tmp1.append(float(kk)) tmp2 = [] with open("data/allMotion05angles.txt", 'r') as f: for line in f.readlines(): for kk in (line.strip().split(" ")): tmp2.append(float(kk)) tmp3 = [] with open("data/test_results05.txt", 'r') as f: for line in f.readlines(): for kk in (line.strip().split(" ")): tmp3.append(float(kk)) tmp_x=[] for i in range(int(len(tmp1)/19)): tmp = [] for j in range(19): tmp.append(tmp1[i*19+j]) tmp_x.append(tmp) tmp_y=[] for i in range(int(len(tmp2)/18)): tmp = [] for j in range(18): tmp.append(tmp2[i * 18 + j]) tmp_y.append(tmp) for i in range(int(len(tmp3)/20)): tmp = [] for j in range(20): tmp.append(tmp3[i * 20 + j]) dep_res.append(tmp) for i in index: lines.append(tmp_x[i]) angles.append(tmp_y[i]) #read x, y, z tmp1 = [] with open("data/axies_allMotion05x.txt", 'r') as f: for line in f.readlines(): for kk in (line.strip().split(" ")): tmp1.append(float(kk)) tmp2 = [] with open("data/axies_allMotion05y.txt", 'r') as f: for line in f.readlines(): for kk in (line.strip().split(" ")): tmp2.append(float(kk)) tmp3 = [] with open("data/axies_allMotion05z.txt", 'r') as f: for line in f.readlines(): for kk in (line.strip().split(" ")): tmp3.append(float(kk)) tmp_x = [] for i in range(int(len(tmp1)/20)): tmp = [] for j in range(20): tmp.append(tmp1[i * 20 + j]) tmp_x.append(tmp) tmp_y = [] for i in range(int(len(tmp2)/20)): tmp = [] for j in range(20): tmp.append(tmp2[i * 20 + j]) tmp_y.append(tmp) tmp_z = [] for i in range(int(len(tmp2)/20)): tmp = [] for j in range(20): tmp.append(tmp3[i * 20 + j]) tmp_z.append(tmp) for i in index: x.append(tmp_x[i]) y.append(tmp_y[i]) z.append(tmp_z[i]) #print(x[0]) #print(len(y)) #print(len(z)) #print(len(lines)) #print(len(angles)) #print(len(dep_tru)) #print(len(dep_res)) #read_xyd() def produce_pos_1(): read_xyd() #onepoint(d_ab, d_ac, dep_b, dep_c, angle, a_x, a_y, c_x, c_y) #compute the right elbow x_t = [] y_t = [] for i in range(525): #i=0 res = onepoint2(lines[i][3], lines[i][1], dep_res[i][7], angles[i][2], x[i][0], y[i][0], z[i][0], x[i][2], y[i][2], z[i][2]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][7]-x1) + abs(y[i][7]-y1) dis2 = abs(x[i][7]-x2) + abs(y[i][7]-y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i]-x[i][7])/abs(x[i][7]) sum2 = sum2 + abs(y_t[i] - y[i][7]) / abs(y[i][7]) sum3 = sum3 + abs(x_t[i]-x[i][7]) sum4 = sum4 + abs(y_t[i] - y[i][7]) x_error.append(sum1/525) y_error.append(sum2/525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(1) print(sum1/525) print(sum2/525) print(sum3 / 525) print(sum4 / 525) #compute the right wrist x_t = [] y_t = [] for i in range(525): # i=0 res = onepoint2(lines[i][4], lines[i][3], dep_res[i][9], angles[i][4], x[i][7], y[i][7], z[i][7], x[i][0], y[i][0], z[i][0]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][9] - x1) + abs(y[i][9] - y1) dis2 = abs(x[i][9] - x2) + abs(y[i][9] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][9]) / abs(x[i][9]) sum2 = sum2 + abs(y_t[i] - y[i][9]) / abs(y[i][9]) sum3 = sum3 + abs(x_t[i] - x[i][9]) sum4 = sum4 + abs(y_t[i] - y[i][9]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(2) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) # compute the right hand x_t = [] y_t = [] for i in range(525): # i=0 res = onepoint2(lines[i][5], lines[i][4], dep_res[i][11], angles[i][6], x[i][9], y[i][9], z[i][9], x[i][7], y[i][7], z[i][7]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][11] - x1) + abs(y[i][11] - y1) dis2 = abs(x[i][11] - x2) + abs(y[i][11] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][11]) / abs(x[i][11]) sum2 = sum2 + abs(y_t[i] - y[i][11]) / abs(y[i][11]) sum3 = sum3 + abs(x_t[i] - x[i][11]) sum4 = sum4 + abs(y_t[i] - y[i][11]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(3) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) # compute the left elbow x_t = [] y_t = [] for i in range(525): # i=0 res = onepoint2(lines[i][6], lines[i][2], dep_res[i][8], angles[i][3], x[i][1], y[i][1], z[i][1], x[i][2], y[i][2], z[i][2]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][8] - x1) + abs(y[i][8] - y1) dis2 = abs(x[i][8] - x2) + abs(y[i][8] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][8]) / abs(x[i][8]) sum2 = sum2 + abs(y_t[i] - y[i][8]) / abs(y[i][8]) sum3 = sum3 + abs(x_t[i] - x[i][8]) sum4 = sum4 + abs(y_t[i] - y[i][8]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(4) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) # compute the left wrist x_t = [] y_t = [] for i in range(525): # i=0 res = onepoint2(lines[i][7], lines[i][6], dep_res[i][10], angles[i][5], x[i][8], y[i][8], z[i][8], x[i][1], y[i][1], z[i][1]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][10] - x1) + abs(y[i][10] - y1) dis2 = abs(x[i][10] - x2) + abs(y[i][10] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][10]) / abs(x[i][10]) sum2 = sum2 + abs(y_t[i] - y[i][10]) / abs(y[i][10]) sum3 = sum3 + abs(x_t[i] - x[i][10]) sum4 = sum4 + abs(y_t[i] - y[i][10]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(5) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) # compute the left hand x_t = [] y_t = [] num = 12 for i in range(525): # i=0 res = onepoint2(lines[i][8], lines[i][7], dep_res[i][12], angles[i][7], x[i][10], y[i][10], z[i][10], x[i][8], y[i][8], z[i][8]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1) dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num]) sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num]) sum3 = sum3 + abs(x_t[i] - x[i][num]) sum4 = sum4 + abs(y_t[i] - y[i][num]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(6) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) # compute the left knee x_t = [] y_t = [] num = 13 for i in range(525): # i=0 res = onepoint2(lines[i][12], lines[i][11], dep_res[i][13], angles[i][12], x[i][4], y[i][4], z[i][4], x[i][6], y[i][6], z[i][6]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1) dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num]) sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num]) sum3 = sum3 + abs(x_t[i] - x[i][num]) sum4 = sum4 + abs(y_t[i] - y[i][num]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(7) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) # compute the left ankle x_t = [] y_t = [] num = 15 for i in range(525): # i=0 res = onepoint2(lines[i][13], lines[i][12], dep_res[i][15], angles[i][14], x[i][13], y[i][13], z[i][13], x[i][4], y[i][4], z[i][4]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1) dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num]) sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num]) sum3 = sum3 + abs(x_t[i] - x[i][num]) sum4 = sum4 + abs(y_t[i] - y[i][num]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(8) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) # compute the left foot x_t = [] y_t = [] num = 17 for i in range(525): # i=0 res = onepoint2(lines[i][14], lines[i][13], dep_res[i][17], angles[i][16], x[i][15], y[i][15], z[i][15], x[i][13], y[i][13], z[i][13]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1) dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num]) sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num]) sum3 = sum3 + abs(x_t[i] - x[i][num]) sum4 = sum4 + abs(y_t[i] - y[i][num]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(9) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) # compute the right knee x_t = [] y_t = [] num = 14 for i in range(525): # i=0 res = onepoint2(lines[i][16], lines[i][15], dep_res[i][14], angles[i][13], x[i][5], y[i][5], z[i][5], x[i][6], y[i][6], z[i][6]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1) dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num]) sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num]) sum3 = sum3 + abs(x_t[i] - x[i][num]) sum4 = sum4 + abs(y_t[i] - y[i][num]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(10) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) # compute the right ankle x_t = [] y_t = [] num = 16 for i in range(525): # i=0 res = onepoint2(lines[i][17], lines[i][16], dep_res[i][16], angles[i][15], x[i][14], y[i][14], z[i][14], x[i][5], y[i][5], z[i][5]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1) dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num]) sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num]) sum3 = sum3 + abs(x_t[i] - x[i][num]) sum4 = sum4 + abs(y_t[i] - y[i][num]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(11) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) # compute the right foot x_t = [] y_t = [] num = 18 for i in range(525): # i=0 res = onepoint2(lines[i][18], lines[i][17], dep_res[i][18], angles[i][17], x[i][16], y[i][16], z[i][16], x[i][14], y[i][14], z[i][14]) x1 = res[0][0] x2 = res[1][0] y1 = res[0][1] y2 = res[1][1] dis1 = abs(x[i][num] - x1) + abs(y[i][num] - y1) dis2 = abs(x[i][num] - x2) + abs(y[i][num] - y2) if dis1 < dis2: x_t.append(x1) y_t.append(y1) else: x_t.append(x2) y_t.append(y2) sum1 = 0; sum2 = 0; sum3 = 0; sum4 = 0; for i in range(525): sum1 = sum1 + abs(x_t[i] - x[i][num]) / abs(x[i][num]) sum2 = sum2 + abs(y_t[i] - y[i][num]) / abs(y[i][num]) sum3 = sum3 + abs(x_t[i] - x[i][num]) sum4 = sum4 + abs(y_t[i] - y[i][num]) x_error.append(sum1 / 525) y_error.append(sum2 / 525) x_dis.append(sum3 / 525) y_dis.append(sum4 / 525) print(12) print(sum1 / 525) print(sum2 / 525) print(sum3 / 525) print(sum4 / 525) #produce_pos_1() def produce_z(): read_xyd() #print(len(z_res)) sum1=0 sum2=0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][7])-sin(dep_res[i][7]))*lines[i][3] sum2 = sum2 + abs(sin(dep_tru[i][7])-sin(dep_res[i][7]))/abs(sin(dep_tru[i][7])*lines[i][3]) z_dis.append(sum1/525) z_error.append(sum2/525) print(sum1/525) print(sum2/525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][9]) - sin(dep_res[i][9])) * lines[i][4] sum2 = sum2 + abs(sin(dep_tru[i][9]) - sin(dep_res[i][9])) / abs(sin(dep_tru[i][9]) * lines[i][4]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][11]) - sin(dep_res[i][11])) * lines[i][5] sum2 = sum2 + abs(sin(dep_tru[i][11]) - sin(dep_res[i][11])) / abs(sin(dep_tru[i][11]) * lines[i][5]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][8]) - sin(dep_res[i][8])) * lines[i][6] sum2 = sum2 + abs(sin(dep_tru[i][8]) - sin(dep_res[i][8])) / abs(sin(dep_tru[i][8]) * lines[i][6]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][10]) - sin(dep_res[i][10])) * lines[i][7] sum2 = sum2 + abs(sin(dep_tru[i][10]) - sin(dep_res[i][10])) / abs(sin(dep_tru[i][10]) * lines[i][7]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][12]) - sin(dep_res[i][12])) * lines[i][8] sum2 = sum2 + abs(sin(dep_tru[i][12]) - sin(dep_res[i][12])) / abs(sin(dep_tru[i][12]) * lines[i][8]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][13]) - sin(dep_res[i][13])) * lines[i][12] sum2 = sum2 + abs(sin(dep_tru[i][13]) - sin(dep_res[i][13])) / abs(sin(dep_tru[i][13]) * lines[i][12]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][15]) - sin(dep_res[i][15])) * lines[i][13] sum2 = sum2 + abs(sin(dep_tru[i][15]) - sin(dep_res[i][15])) / abs(sin(dep_tru[i][15]) * lines[i][13]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][17]) - sin(dep_res[i][17])) * lines[i][14] sum2 = sum2 + abs(sin(dep_tru[i][17]) - sin(dep_res[i][17])) / abs(sin(dep_tru[i][17]) * lines[i][14]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][14]) - sin(dep_res[i][14])) * lines[i][16] sum2 = sum2 + abs(sin(dep_tru[i][14]) - sin(dep_res[i][14])) / abs(sin(dep_tru[i][14]) * lines[i][16]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][16]) - sin(dep_res[i][16])) * lines[i][17] sum2 = sum2 + abs(sin(dep_tru[i][16]) - sin(dep_res[i][16])) / abs(sin(dep_tru[i][16]) * lines[i][17]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) sum1 = 0 sum2 = 0 for i in range(525): sum1 = sum1 + abs(sin(dep_tru[i][18]) - sin(dep_res[i][18])) * lines[i][18] sum2 = sum2 + abs(sin(dep_tru[i][18]) - sin(dep_res[i][18])) / abs(sin(dep_tru[i][18]) * lines[i][18]) z_dis.append(sum1 / 525) z_error.append(sum2 / 525) print(sum1 / 525) print(sum2 / 525) produce_z()
py
b411691aaaff72789116048577d8f06047a3d581
# coding: utf-8 """ ORCID Member No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 OpenAPI spec version: Latest Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from orcid_api_v3.models.bulk_element import BulkElement # noqa: F401,E501 class WorkBulkV30Rc2(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'bulk': 'list[BulkElement]' } attribute_map = { 'bulk': 'bulk' } def __init__(self, bulk=None): # noqa: E501 """WorkBulkV30Rc2 - a model defined in Swagger""" # noqa: E501 self._bulk = None self.discriminator = None if bulk is not None: self.bulk = bulk @property def bulk(self): """Gets the bulk of this WorkBulkV30Rc2. # noqa: E501 :return: The bulk of this WorkBulkV30Rc2. # noqa: E501 :rtype: list[BulkElement] """ return self._bulk @bulk.setter def bulk(self, bulk): """Sets the bulk of this WorkBulkV30Rc2. :param bulk: The bulk of this WorkBulkV30Rc2. # noqa: E501 :type: list[BulkElement] """ self._bulk = bulk def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(WorkBulkV30Rc2, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, WorkBulkV30Rc2): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
py
b4116a08394bee273200f3e7dc2ddc2b4011932f
# coding: utf-8 from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 from openapi_server.models.base_model_ import Model from openapi_server.models.rpc_code import RpcCode from openapi_server import util from openapi_server.models.rpc_code import RpcCode # noqa: E501 class Protov1Response(Model): """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). Do not edit the class manually. """ def __init__(self, code=None, message=None): # noqa: E501 """Protov1Response - a model defined in OpenAPI :param code: The code of this Protov1Response. # noqa: E501 :type code: RpcCode :param message: The message of this Protov1Response. # noqa: E501 :type message: str """ self.openapi_types = { 'code': RpcCode, 'message': str } self.attribute_map = { 'code': 'code', 'message': 'message' } self._code = code self._message = message @classmethod def from_dict(cls, dikt) -> 'Protov1Response': """Returns the dict as a model :param dikt: A dict. :type: dict :return: The protov1Response of this Protov1Response. # noqa: E501 :rtype: Protov1Response """ return util.deserialize_model(dikt, cls) @property def code(self): """Gets the code of this Protov1Response. :return: The code of this Protov1Response. :rtype: RpcCode """ return self._code @code.setter def code(self, code): """Sets the code of this Protov1Response. :param code: The code of this Protov1Response. :type code: RpcCode """ self._code = code @property def message(self): """Gets the message of this Protov1Response. :return: The message of this Protov1Response. :rtype: str """ return self._message @message.setter def message(self, message): """Sets the message of this Protov1Response. :param message: The message of this Protov1Response. :type message: str """ self._message = message
py
b4116a09229446cd60838b3efacbb3b255045d88
import logging from datetime import datetime as dt import dask.array as da import numpy as np from dask import delayed from slaid.classifiers.base import BasicClassifier from slaid.commons import BasicSlide, Filter, NapariSlide from slaid.commons.dask import Mask from slaid.models.base import Model logger = logging.getLogger('dask') logger.setLevel(logging.DEBUG) class Classifier(BasicClassifier): MASK_CLASS = Mask def __init__(self, model: Model, feature: str, compute_mask: bool = False): super().__init__(model, feature) self.compute_mask = compute_mask def classify(self, slide: NapariSlide, filter_=None, threshold: float = None, level: int = 2, round_to_0_100: bool = True, chunk=None) -> Mask: logger.info('patch size %s', self._patch_size) if self._patch_size: predictions = self._classify_patches(slide, self._patch_size, level, filter_, threshold, round_to_0_100, chunk) else: predictions = self._classify_batches(slide, level, filter_, chunk) predictions = self._threshold(predictions, threshold) predictions = self._round_to_0_100(predictions, round_to_0_100) mask = self._get_mask(slide, predictions, level, slide.level_downsamples[level], round_to_0_100) if self.compute_mask: mask.compute() return mask def _classify_batches(self, slide, level, filter_, chunk): slide_array = self._get_slide_array(slide, level) if filter_ is None: return self._classify_batches_no_filter(slide_array, chunk) else: return self._classify_batches_with_filter(slide, slide_array, level, filter_, chunk) def _predict_batch(self, array, model): n_px = array.shape[0] * array.shape[1] p = model.predict(array.reshape((n_px, 3))).reshape(array.shape[:2]) return p def _classify_batches_no_filter(self, slide_array, _chunk): prediction = slide_array.array.map_blocks(self._predict_batch, delayed(self._model), meta=np.array( (), dtype='float32'), drop_axis=2) return prediction def _classify_batches_with_filter(self, slide, slide_array, level, filter_, chunk): size = slide_array.size scale = (size[0] // filter_.array.shape[0], size[1] // filter_.array.shape[1]) filter_array = da.from_array(filter_.array, chunks=20) chunks = [] for i, _chunk in enumerate(filter_array.chunks): chunks.append([c * scale[i] for c in _chunk]) filter_array = da.map_blocks( lambda x, scale: x.repeat(scale[0], 0).repeat(scale[1], 1), filter_array, scale, meta=np.array([], dtype='float32'), chunks=chunks) predictions = da.map_blocks(self._predict_with_filter, filter_array, delayed(self.model), slide.filename, type(slide._slide), self.model.image_info, level, meta=np.array([], dtype='float32')) return predictions def _predict_with_filter(self, filter_array, model, slide_filename, slide_cls, image_info, level, block_info=None): res = np.zeros(filter_array.shape, dtype='float32') if (filter_array == 0).all(): return res loc = block_info[0]['array-location'][::-1] slide = slide_cls(slide_filename) data = slide.read_region((loc[0][0], loc[1][0]), level, (loc[0][1] - loc[0][0], loc[1][1] - loc[1][0])).to_array(image_info) predictions = model.predict(data[filter_array]) res[filter_array] = predictions return res def _classify_patches(self, slide: BasicSlide, patch_size, level, filter_: Filter, threshold, round_to_0_100: bool = True, chunk=None) -> Mask: slide_array = self._get_slide_array(slide, level).array shape_1 = slide_array.shape[1] - (slide_array.shape[1] % self._patch_size[0]) shape_2 = slide_array.shape[2] - (slide_array.shape[2] % self._patch_size[1]) slide_array = slide_array[:, :shape_1, :shape_2] chunks = [] for axis in range(2): slide_chunks = np.array( slide_array.chunks[axis + 1]) // self._patch_size[axis] chunks.append(slide_chunks.tolist()) if filter_ is None: predictions = da.map_blocks(self._predict_patches, slide_array, delayed(self._model), drop_axis=0, meta=np.array([], dtype='float32'), chunks=chunks) else: chunks = 20 # chunks = chunks if min(filter_.array.shape + # chunks) > chunks[0] else 'auto' filter_array = da.from_array(filter_.array, chunks=chunks) predictions = da.map_blocks(self._predict_patch_with_filter, filter_array, delayed(self.model), slide.filename, type(slide._slide), level, meta=np.array([], dtype='float32')) return predictions def _predict_patch_with_filter(self, filter_array, model, slide_filename, slide_cls, level, block_info=None): predictions = np.zeros(filter_array.shape, dtype='float32') if np.count_nonzero(filter_array) == 0: return predictions loc = block_info[0]['array-location'][::-1] slide = slide_cls(slide_filename) pos = (loc[0][0] * self._patch_size[0], loc[1][0] * self._patch_size[1]) size = ((loc[0][1] - loc[0][0]) * self._patch_size[0], (loc[1][1] - loc[1][0]) * self._patch_size[1]) data = slide.read_region(pos, level, size).to_array(self.model.image_info) data = np.concatenate([ np.split(row, data.shape[2] // self._patch_size[1], 2) for row in np.split(data, data.shape[1] // self._patch_size[0], 1) ]) filtered_predictions = model.predict(data[filter_array.flatten()]) predictions[filter_array] = filtered_predictions return predictions def _predict_patches(self, chunk, model): final_shape = (chunk.shape[1] // self._patch_size[0], chunk.shape[2] // self._patch_size[1]) data = np.concatenate([ np.split(row, final_shape[1], 2) for row in np.split(chunk, final_shape[0], 1) ]) predictions = model.predict(data).reshape(final_shape) return predictions @staticmethod def _get_zeros(size, dtype): return da.zeros(size, dtype=dtype) @staticmethod def _concatenate(seq, axis): # seq = [el for el in seq if el.size] return da.concatenate(seq, axis) @staticmethod def _reshape(array, shape): return da.reshape(array, shape) def _rescale(array, scale, block_info=None): # logger.debug('scale %s', scale) # logger.debug('array.shape %s', array.shape) res = np.zeros((array.shape[0] * scale[0], array.shape[1] * scale[1], 1), dtype='bool') for x in range(array.shape[0]): for y in range(array.shape[1]): res[x * scale[0]:x * scale[0] + scale[0], y * scale[1]:y * scale[1] + scale[1]] = array[x, y] # res = np.expand_dims(res, 2) # logger.debug('res.shape %s', res.shape) return res
py
b4116a79ff90eb13182f05448604fb0a3b6439ef
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import math import numpy as np class Grids: def __init__(self, gridSize=(512, 512), overlap=(24, 24)): # TODO: 这个size如果支持长和宽不同有用吗 # 可能可以铺满用户屏幕? # self.sizePair = namedtuple("sizePair", "h w") self.gridSize = np.array(gridSize) self.overlap = np.array(overlap) # print("_------------", self.gridSize, self.overlap) self.clear() def clear(self): # 图像HWC格式 self.rawimg = None # 保存原始的遥感图像或者医疗图像(多通道) self.detimg = None # 宫格初始图像 self.gridInit = False # 是否初始化了宫格 # self.imagesGrid = [] # 图像宫格 self.masksGrid = [] # 标签宫格 self.gridCount = None # (row count, col count) self.currIdx = None # (current row, current col) def createGrids(self, img): if self.detimg is None: self.detimg = img.copy() # 计算宫格横纵向格数 imgSize = np.array(img.shape[:2]) gridCount = np.ceil((imgSize + self.overlap) / self.gridSize) self.gridCount = gridCount = gridCount.astype("uint16") # ul = self.overlap - self.gridSize # for row in range(gridCount[0]): # ul[0] = ul[0] + self.gridSize[0] - self.overlap[0] # for col in range(gridCount[1]): # ul[1] = ul[1] + self.gridSize[1] - self.overlap[1] # lr = ul + self.gridSize # # print("ul, lr", ul, lr) # # 扩充 # det_tmp = self.detimg[ul[0]: lr[0], ul[1]: lr[1]] # tmp = np.zeros((self.gridSize[0], self.gridSize[1], self.detimg.shape[-1])) # tmp[:det_tmp.shape[0], :det_tmp.shape[1], :] = det_tmp # self.imagesGrid.append(tmp) # self.masksGrid = [[np.zeros(self.gridSize)] * gridCount[1]] * gridCount[0] # 不能用浅拷贝 self.masksGrid = [[np.zeros(self.gridSize) for _ in range(gridCount[1])] for _ in range(gridCount[0])] # print(len(self.masksGrid), len(self.masksGrid[0])) self.gridInit = True return list(gridCount) def getGrid(self, row, col): gridIdx = np.array([row, col]) ul = gridIdx * (self.gridSize - self.overlap) lr = ul + self.gridSize # print("ul, lr", ul, lr) img = self.detimg[ul[0]: lr[0], ul[1]: lr[1]] mask = self.masksGrid[row][col] self.currIdx = (row, col) return img, mask def splicingList(self): """ 将slide的out进行拼接,raw_size保证恢复到原状 """ imgs = self.masksGrid # print(len(imgs), len(imgs[0])) raw_size = self.detimg.shape[:2] # h, w = None, None # for i in range(len(imgs)): # for j in range(len(imgs[i])): # im = imgs[i][j] # if im is not None: # h, w = im.shape[:2] # break # if h is None and w is None: # return False h, w = self.gridSize row = math.ceil(raw_size[0] / h) col = math.ceil(raw_size[1] / w) # print('row, col:', row, col) result_1 = np.zeros((h * row, w * col), dtype=np.uint8) result_2 = result_1.copy() # k = 0 for i in range(row): for j in range(col): # print('h, w:', h, w) ih, iw = imgs[i][j].shape[:2] im = np.zeros(self.gridSize) im[:ih, :iw] = imgs[i][j] start_h = (i * h) if i == 0 else (i * (h - self.overlap[0])) end_h = start_h + h start_w = (j * w) if j == 0 else (j * (w - self.overlap[1])) end_w = start_w + w # print("se: ", start_h, end_h, start_w, end_w) # 单区自己,重叠取或 if (i + j) % 2 == 0: result_1[start_h: end_h, start_w: end_w] = im else: result_2[start_h: end_h, start_w: end_w] = im # k += 1 # print('r, c, k:', i_r, i_c, k) result = np.where(result_2 != 0, result_2, result_1) return result[:raw_size[0], :raw_size[1]] # g = Grids() # g.getGrid(0, 1) # def sliceImage(self, row, col): # """ # 根据输入的图像[h, w, C]和行列数以及索引输出对应图像块 # index (list) # """ # bimg = self.detimg # h, w = bimg.shape[:2] # c_size = [math.ceil(h / row), math.ceil(w / col)] # # 扩展不够的以及重叠部分 # h_new = row * c_size[0] + self.overlap # w_new = col * c_size[1] + self.overlap # # 新图 # tmp = np.zeros((h_new, w_new, bimg.shape[-1])) # tmp[: bimg.shape[0], : bimg.shape[1], :] = bimg # h, w = tmp.shape[:2] # cell_h = c_size[0] # cell_w = c_size[1] # # 开始分块 # result = [] # for i in range(row): # for j in range(col): # start_h = i * cell_h # end_h = start_h + cell_h + self.overlap # start_w = j * cell_w # end_w = start_w + cell_w + self.overlap # result.append(tmp[start_h:end_h, start_w:end_w, :]) # # for r in result: # # print(r.shape) # return result