content
stringlengths
7
1.05M
""" flask_konch ~~~~~~~~~~~ An improved shell commmand for the Flask CLI. """ __version__ = "2.0.0" __all__ = ["EXTENSION_NAME"] EXTENSION_NAME = "flask-konch"
# https://baike.baidu.com/item/%E5%BF%AB%E9%80%9F%E5%B9%82 # 11 = 2^0 + 2^! + 2^3 # a^11 = a^(2^0) + a^(2^1) + a^(2^3) class Solution: def myPow(self, x: float, n: int) -> float: N = n if N < 0: x = 1/x N = -N ans = 1 current_product = x while N > 0: if N % 2 == 1: ans = ans * current_product current_product = current_product * current_product N //= 2 return ans
# time Complexity: O(n^2) # space Complexity: O(1) def bubble_sort(arr): current = 0 next = 1 last_index = len(arr) while last_index >= next: if arr[current] > arr[next]: arr[current], arr[next] = arr[next], arr[current] current += 1 next += 1 if next == last_index: current = 0 next = 1 last_index -= 1 if __name__ == '__main__': arr = [2, 3, 5, 6, 1] bubble_sort(arr) print(arr)
class Solution: def isIsomorphic(self, s: str, t: str) -> bool: # Copy / paste from the "fastest" solution. # It's sort of beautiful in its simplicity, if wildly esoteric. # Basically the same thing as the hacky failsafe in my first solution; # compare the number of unique characters in each string with the # number of unique characters in a zip of both strings. If there's a # mismatch in any of them, they can't be isomorphic. return len(set(zip(s, t))) == len(set(s)) == len(set(t))
#Sieve of Eratosthenes #Решето Эратосфена # Оригинал: https://github.com/nquidox/learn2python. Вещание на 8-Bit Tea Party. #предел работы скрипта limit=16; it = 0 #будем считать, что каждый индекс списка является простым числом numbers_list=[True]*limit #и сразу отбросим ноль с единицей numbers_list[0]=False numbers_list[1]=False #просеиваем все числа от 2 до 1000 включительно for number in range(2, limit): if numbers_list[number]: for i in range(2*number, limit, number): it += 1 numbers_list[i]=False #создаем список только из простых чисел prime_numbers=[] for number in range(limit): if numbers_list[number]: prime_numbers.append(number) print("Prime numbers to", limit, ":", prime_numbers) print("Iterations:", it) limit = 16; it = 0 nums = list(range(0, limit + 1)) print("\nSource list all numbers:", nums) i = 2 while i * i <= limit: if nums[i] != 0: j = i * i while j <= limit: it += 1 nums[j] = 0 j += i i += 1 nums.remove(1) while 0 in nums: nums.remove(0) print("Prime numbers to", limit, ":", nums) print("Iterations:", it)
# # PySNMP MIB module APDNSALG-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/APDNSALG-MIB # Produced by pysmi-0.3.4 at Wed May 1 11:23:12 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # acmepacketMgmt, = mibBuilder.importSymbols("ACMEPACKET-SMI", "acmepacketMgmt") ApTransportType, ApHardwareModuleFamily, ApRedundancyState = mibBuilder.importSymbols("ACMEPACKET-TC", "ApTransportType", "ApHardwareModuleFamily", "ApRedundancyState") SysMgmtPercentage, = mibBuilder.importSymbols("APSYSMGMT-MIB", "SysMgmtPercentage") ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion") ifIndex, InterfaceIndexOrZero, InterfaceIndex = mibBuilder.importSymbols("IF-MIB", "ifIndex", "InterfaceIndexOrZero", "InterfaceIndex") InetZoneIndex, InetAddressPrefixLength, InetVersion, InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetZoneIndex", "InetAddressPrefixLength", "InetVersion", "InetAddressType", "InetAddress") ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup") MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, IpAddress, Bits, Counter64, Integer32, Counter32, Unsigned32, Gauge32, TimeTicks, NotificationType, ObjectIdentity, ModuleIdentity, iso = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "IpAddress", "Bits", "Counter64", "Integer32", "Counter32", "Unsigned32", "Gauge32", "TimeTicks", "NotificationType", "ObjectIdentity", "ModuleIdentity", "iso") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") apDNSALGModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 9148, 3, 14)) if mibBuilder.loadTexts: apDNSALGModule.setLastUpdated('201106080000Z') if mibBuilder.loadTexts: apDNSALGModule.setOrganization('Acme Packet, Inc') if mibBuilder.loadTexts: apDNSALGModule.setContactInfo(' Customer Service Postal: Acme Packet, Inc 100 Crosby Drive Bedford, MA 01730 US Tel: 1-781-328-4400 E-mail: [email protected]') if mibBuilder.loadTexts: apDNSALGModule.setDescription('The Dns Alg MIB for Acme Packet.') apDNSALGMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1)) apDNSALGMIBGeneralObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 1)) apDNSALGMIBTabularObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2)) apDNSALGNotificationObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2)) apDNSALGNotifObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 1)) apDNSALGNotifPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2)) apDNSALGNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0)) apDNSALGConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3)) apDNSALGObjectGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1)) apDNSALGNotificationGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 2)) apDNSALGServerStatusTable = MibTable((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1), ) if mibBuilder.loadTexts: apDNSALGServerStatusTable.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerStatusTable.setDescription('A read-only table to hold the status of configured DNSALG servers, indexed by the name of the Dns alg config name, server realm and server IP.') apDNSALGServerStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1), ).setIndexNames((0, "APDNSALG-MIB", "apDNSALGConfigIndex"), (0, "APDNSALG-MIB", "apDNSALGServerIndex"), (0, "APDNSALG-MIB", "apDNSALGServerIpAddress")) if mibBuilder.loadTexts: apDNSALGServerStatusEntry.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerStatusEntry.setDescription('An entry designed to hold the status of a single DNSALG server') apDNSALGConfigIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: apDNSALGConfigIndex.setStatus('current') if mibBuilder.loadTexts: apDNSALGConfigIndex.setDescription('An integer for the sole purpose of indexing the DNS-ALG configuration.Only one DNS-ALG configuration is allowed per a realm.') apDNSALGServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: apDNSALGServerIndex.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerIndex.setDescription('An integer for the sole purpose of indexing the Dns Server Attributes in a DNS-ALG config. Each DNS-ALG config can have multiple Dns Server Attributes.') apDNSALGConfigName = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: apDNSALGConfigName.setStatus('current') if mibBuilder.loadTexts: apDNSALGConfigName.setDescription('The name of the dns-alg-config element that contains this DNS-ALG server.') apDNSALGServerRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: apDNSALGServerRealm.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerRealm.setDescription('The name of the server realm element that contains this DNSALG server.') apDNSALGDomainSuffix = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: apDNSALGDomainSuffix.setStatus('current') if mibBuilder.loadTexts: apDNSALGDomainSuffix.setDescription('The name of the domain suffix element that contains this DNSALG server.') apDNSALGServerIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 7), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDNSALGServerIpAddress.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerIpAddress.setDescription('The IP address of this DNSALG server.') apDNSALGServerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("inservice", 0), ("lowerpriority", 1), ("oosunreachable", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: apDNSALGServerStatus.setStatus('current') if mibBuilder.loadTexts: apDNSALGServerStatus.setDescription('The status of this DNSALG server.') apDNSALGStatsTable = MibTable((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2), ) if mibBuilder.loadTexts: apDNSALGStatsTable.setStatus('current') if mibBuilder.loadTexts: apDNSALGStatsTable.setDescription('per DNS-ALG config(i.e.client realm)stats.') apDnsALGStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1), ).setIndexNames((0, "APDNSALG-MIB", "apDnsAlgClientRealmIndex")) if mibBuilder.loadTexts: apDnsALGStatsEntry.setStatus('current') if mibBuilder.loadTexts: apDnsALGStatsEntry.setDescription('A table entry designed to hold DNS-ALG stats data') apDnsAlgClientRealmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: apDnsAlgClientRealmIndex.setStatus('current') if mibBuilder.loadTexts: apDnsAlgClientRealmIndex.setDescription('An integer for the sole purpose of indexing the DNS-ALG configuration.Only one DNS-ALG configuration is allowed per a realm.') apDnsAlgClientRealmName = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgClientRealmName.setStatus('current') if mibBuilder.loadTexts: apDnsAlgClientRealmName.setDescription('DNS-ALG Config realm name') apDnsAlgCurrentQueries = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 3), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentQueries.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentQueries.setDescription('Number of queries sent in recent period received on DNS-ALG config realm.') apDnsAlgTotalQueries = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalQueries.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalQueries.setDescription('Total number of queries sent in life time received on DNS-ALG config realm.') apDnsAlgCurrentSucess = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 5), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentSucess.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentSucess.setDescription('Number of success responses in recent period received on DNS-ALG config realm.') apDnsAlgTotalSucess = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalSucess.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalSucess.setDescription('Total number of success responses in life time received on DNS-ALG config realm.') apDnsAlgCurrentNotFound = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 7), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentNotFound.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentNotFound.setDescription('Number of not-found responses in recent period received on DNS-ALG config realm.') apDnsAlgTotalNotFound = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalNotFound.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalNotFound.setDescription('Total number of not-found responses in life time received on DNS-ALG config realm.') apDnsAlgCurrentTimeOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 9), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentTimeOut.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentTimeOut.setDescription('Number of time out responses in recent period received on DNS-ALG config realm.') apDnsAlgTotalTimeOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 10), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalTimeOut.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalTimeOut.setDescription('Total number of time out responses in life time received on DNS-ALG config realm') apDnsAlgCurrentBadStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 11), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentBadStatus.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentBadStatus.setDescription('Number of bad status responses in recent period received on DNS-ALG config realm.') apDnsAlgTotalBadStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 12), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalBadStatus.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalBadStatus.setDescription('Total number of bad status responses in life time received on DNS-ALG config realm.') apDnsAlgCurrentOtherFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 13), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgCurrentOtherFailures.setStatus('current') if mibBuilder.loadTexts: apDnsAlgCurrentOtherFailures.setDescription('Number of other failure responses in recent period received on DNS-ALG config realm.') apDnsAlgTotalOtherFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 14), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgTotalOtherFailures.setStatus('current') if mibBuilder.loadTexts: apDnsAlgTotalOtherFailures.setDescription('Total number of other failure responses in life time received on DNS-ALG config realm.') apDnsAlgAvgLatency = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 15), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgAvgLatency.setStatus('current') if mibBuilder.loadTexts: apDnsAlgAvgLatency.setDescription('Average observed one-way signalling latency during the period in milliseconds') apDnsAlgMaxLatency = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 16), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgMaxLatency.setStatus('current') if mibBuilder.loadTexts: apDnsAlgMaxLatency.setDescription('Maximum observed one-way signalling latency during the period in milliseconds') apDnsAlgMaxBurstRate = MibTableColumn((1, 3, 6, 1, 4, 1, 9148, 3, 14, 1, 2, 2, 1, 17), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: apDnsAlgMaxBurstRate.setStatus('current') if mibBuilder.loadTexts: apDnsAlgMaxBurstRate.setDescription('Maximum burst rate of traffic measured during the period (combined inbound and outbound)') apDNSALGConstraintsStatus = MibScalar((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("inservice", 0), ("constraintsExceeded", 1)))).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: apDNSALGConstraintsStatus.setStatus('current') if mibBuilder.loadTexts: apDNSALGConstraintsStatus.setDescription('The status of this DNS-ALG config realm for constraints.') apDnsAlgStatusChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 1)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGServerStatus")) if mibBuilder.loadTexts: apDnsAlgStatusChangeTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgStatusChangeTrap.setDescription(' The trap will be generated if the reachability status of an DNS-ALG server changes from In-Service to either Timed out or Out of Service.') apDnsAlgStatusChangeClearTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 2)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGServerStatus")) if mibBuilder.loadTexts: apDnsAlgStatusChangeClearTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgStatusChangeClearTrap.setDescription(' The trap will be generated if the reachability status of an DNS-ALG server changes from either Timed out or Out of Service to In-Service') apDnsAlgConstraintStateChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 3)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus")) if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeTrap.setDescription(" The trap will be generated if an DNS-ALG config's constriants state changed from inservice to constraintsExceeded.") apDnsAlgConstraintStateChangeClearTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 4)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus")) if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeClearTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgConstraintStateChangeClearTrap.setDescription(" The trap will be generated if an DNS-ALG config's constriants state changed from constraintsExceeded to inservice.") apDnsAlgSvrConstraintStateChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 5)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus")) if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeTrap.setDescription(' The trap will be generated if an Dns Server(i.e.IP-Address) constriants state changed from inservice to constraintsExceeded.') apDnsAlgSvrConstraintStateChangeClearTrap = NotificationType((1, 3, 6, 1, 4, 1, 9148, 3, 14, 2, 2, 0, 6)).setObjects(("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGConstraintsStatus")) if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeClearTrap.setStatus('current') if mibBuilder.loadTexts: apDnsAlgSvrConstraintStateChangeClearTrap.setDescription(' The trap will be generated if an Dns Server(i.e.IP-Address) constriants state changed from constraintsExceeded to inservice.') apDnsAlgServerStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1, 1)).setObjects(("APDNSALG-MIB", "apDNSALGConfigIndex"), ("APDNSALG-MIB", "apDNSALGServerIndex"), ("APDNSALG-MIB", "apDNSALGConfigName"), ("APDNSALG-MIB", "apDNSALGServerRealm"), ("APDNSALG-MIB", "apDNSALGDomainSuffix"), ("APDNSALG-MIB", "apDNSALGServerIpAddress"), ("APDNSALG-MIB", "apDNSALGServerStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): apDnsAlgServerStatusGroup = apDnsAlgServerStatusGroup.setStatus('current') if mibBuilder.loadTexts: apDnsAlgServerStatusGroup.setDescription('A collection of statistics for DNS-ALG server status.') apDnsAlgStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 1, 2)).setObjects(("APDNSALG-MIB", "apDnsAlgClientRealmIndex"), ("APDNSALG-MIB", "apDnsAlgClientRealmName"), ("APDNSALG-MIB", "apDnsAlgCurrentQueries"), ("APDNSALG-MIB", "apDnsAlgTotalQueries"), ("APDNSALG-MIB", "apDnsAlgCurrentSucess"), ("APDNSALG-MIB", "apDnsAlgTotalSucess"), ("APDNSALG-MIB", "apDnsAlgCurrentNotFound"), ("APDNSALG-MIB", "apDnsAlgTotalNotFound"), ("APDNSALG-MIB", "apDnsAlgCurrentTimeOut"), ("APDNSALG-MIB", "apDnsAlgTotalTimeOut"), ("APDNSALG-MIB", "apDnsAlgCurrentBadStatus"), ("APDNSALG-MIB", "apDnsAlgTotalBadStatus"), ("APDNSALG-MIB", "apDnsAlgCurrentOtherFailures"), ("APDNSALG-MIB", "apDnsAlgTotalOtherFailures"), ("APDNSALG-MIB", "apDnsAlgAvgLatency"), ("APDNSALG-MIB", "apDnsAlgMaxLatency"), ("APDNSALG-MIB", "apDnsAlgMaxBurstRate")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): apDnsAlgStatsGroup = apDnsAlgStatsGroup.setStatus('current') if mibBuilder.loadTexts: apDnsAlgStatsGroup.setDescription('Report the stats of configured DNSALG config objects.') apDNSALGNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9148, 3, 14, 3, 2, 1)).setObjects(("APDNSALG-MIB", "apDnsAlgStatusChangeTrap"), ("APDNSALG-MIB", "apDnsAlgStatusChangeClearTrap"), ("APDNSALG-MIB", "apDnsAlgConstraintStateChangeTrap"), ("APDNSALG-MIB", "apDnsAlgConstraintStateChangeClearTrap"), ("APDNSALG-MIB", "apDnsAlgSvrConstraintStateChangeTrap"), ("APDNSALG-MIB", "apDnsAlgSvrConstraintStateChangeClearTrap")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): apDNSALGNotificationsGroup = apDNSALGNotificationsGroup.setStatus('current') if mibBuilder.loadTexts: apDNSALGNotificationsGroup.setDescription('A collection of mib objects accessible only to traps.') mibBuilder.exportSymbols("APDNSALG-MIB", apDnsAlgTotalNotFound=apDnsAlgTotalNotFound, apDnsAlgConstraintStateChangeClearTrap=apDnsAlgConstraintStateChangeClearTrap, apDnsAlgStatusChangeTrap=apDnsAlgStatusChangeTrap, apDnsAlgTotalTimeOut=apDnsAlgTotalTimeOut, apDnsAlgStatsGroup=apDnsAlgStatsGroup, apDnsALGStatsEntry=apDnsALGStatsEntry, apDNSALGMIBGeneralObjects=apDNSALGMIBGeneralObjects, apDnsAlgTotalSucess=apDnsAlgTotalSucess, apDNSALGServerStatusEntry=apDNSALGServerStatusEntry, apDNSALGNotificationsGroup=apDNSALGNotificationsGroup, apDNSALGConstraintsStatus=apDNSALGConstraintsStatus, apDnsAlgConstraintStateChangeTrap=apDnsAlgConstraintStateChangeTrap, apDNSALGServerRealm=apDNSALGServerRealm, apDnsAlgTotalBadStatus=apDnsAlgTotalBadStatus, apDNSALGObjectGroups=apDNSALGObjectGroups, apDNSALGConfigName=apDNSALGConfigName, apDnsAlgMaxLatency=apDnsAlgMaxLatency, PYSNMP_MODULE_ID=apDNSALGModule, apDNSALGMIBTabularObjects=apDNSALGMIBTabularObjects, apDnsAlgTotalOtherFailures=apDnsAlgTotalOtherFailures, apDNSALGConfigIndex=apDNSALGConfigIndex, apDnsAlgStatusChangeClearTrap=apDnsAlgStatusChangeClearTrap, apDNSALGStatsTable=apDNSALGStatsTable, apDnsAlgClientRealmIndex=apDnsAlgClientRealmIndex, apDnsAlgSvrConstraintStateChangeTrap=apDnsAlgSvrConstraintStateChangeTrap, apDNSALGNotifications=apDNSALGNotifications, apDNSALGConformance=apDNSALGConformance, apDnsAlgCurrentNotFound=apDnsAlgCurrentNotFound, apDNSALGNotifPrefix=apDNSALGNotifPrefix, apDnsAlgMaxBurstRate=apDnsAlgMaxBurstRate, apDNSALGMIBObjects=apDNSALGMIBObjects, apDnsAlgAvgLatency=apDnsAlgAvgLatency, apDnsAlgServerStatusGroup=apDnsAlgServerStatusGroup, apDNSALGNotificationObjects=apDNSALGNotificationObjects, apDNSALGNotificationGroups=apDNSALGNotificationGroups, apDnsAlgCurrentOtherFailures=apDnsAlgCurrentOtherFailures, apDnsAlgClientRealmName=apDnsAlgClientRealmName, apDNSALGNotifObjects=apDNSALGNotifObjects, apDNSALGServerStatus=apDNSALGServerStatus, apDnsAlgCurrentSucess=apDnsAlgCurrentSucess, apDNSALGServerStatusTable=apDNSALGServerStatusTable, apDnsAlgSvrConstraintStateChangeClearTrap=apDnsAlgSvrConstraintStateChangeClearTrap, apDnsAlgCurrentQueries=apDnsAlgCurrentQueries, apDnsAlgCurrentBadStatus=apDnsAlgCurrentBadStatus, apDnsAlgCurrentTimeOut=apDnsAlgCurrentTimeOut, apDNSALGServerIpAddress=apDNSALGServerIpAddress, apDNSALGModule=apDNSALGModule, apDNSALGDomainSuffix=apDNSALGDomainSuffix, apDnsAlgTotalQueries=apDnsAlgTotalQueries, apDNSALGServerIndex=apDNSALGServerIndex)
''' /****************************************************************** * * Copyright 2018 Samsung Electronics All Rights Reserved. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************/ ''' class TestRunOption: max_total_count = 3 min_pass_count = 1 max_timeout_count = 2 XML_PASS_CRITERIA = 'xml' LOG_PASS_CRITERIA = 'log' def __init__(self, binary_name, suite_name, tc_name, package_name): self.binary_name = binary_name self.suite_name = suite_name self.tc_name = tc_name self.package_name = package_name self.total_count = 0 self.pass_count = 0 self.fail_count = 0 self.timeout_count = 0 def increase_total_count(self): self.total_count += 1 def increase_pass_count(self): self.pass_count += 1 def increase_fail_count(self): self.fail_count += 1 def increase_timeout_count(self): self.timeout_count += 1 def is_execution_complete(self): if self.pass_count >= TestRunOption.min_pass_count or self.timeout_count >= TestRunOption.max_timeout_count or self.total_count >= TestRunOption.max_total_count: return True return False
# -*- coding: utf-8 -*- # 设置订阅人的邮箱 RECEIVER = ['**@**'] # 信息化平台的用户名和密码 USER_ID = "SA****" USER_PWD = "***" # 是否在实习,用于提醒写实习月报,后期建立receiver的字典保存,目前没有必要 IS_INTERNSHIP = True # SMTP服务器信息设置(发件邮箱) # 我用的是163邮箱 # 可以参考:https://blog.csdn.net/qlzy_5418/article/details/86661883 HOST = "smtp.163.com" # SMTP服务器host SENDER = "***@163.com" # SENDER为自己的邮箱名 SMTP_PWD = "***" # 密码为smtp服务器的授权码,需要到邮箱上设置。 跟登陆密码不一样!*3
# -*- coding: utf-8 -*- { 'name': "onesphere_mdm", 'summary': """ MOM主数据管理模块 """, 'description': """ Long description of module's purpose """, 'author': "上海文享信息科技有限公司", 'website': "http://www.oneshare.com.cn", # Categories can be used to filter modules in modules listing # Check https://github.com/odoo/odoo/blob/14.0/odoo/addons/base/data/ir_module_category_data.xml # for the full list 'category': 'Manufacturing/Manufacturing', 'version': '14.0.10.1', # any module necessary for this one to work correctly 'depends': ['mrp', 'maintenance', 'onesphere_core'], # always loaded 'data': [ 'security/ir.model.access.csv', 'data/work_area_category_data.xml', 'data/maintenance_category_data.xml', 'views/assets.xml', 'views/equipment_connection_views.xml', 'views/maintenance_views.xml', 'views/product_views.xml', 'views/mrp_workcenter_views.xml', 'views/mrp_routing_workcenter_views.xml', 'views/work_area_views.xml', 'views/mrp_workcenter_group_views.xml', 'views/mdm_menu_views.xml', 'views/res_config_settings_views.xml', ], 'demo': [ 'data/mrp_demo.xml', ], 'post_init_hook': 'create_related_work_station_area_hook', }
hpp = 'AL-Import' # Specify the name of the hpp to print the graph graph_title='EM- Total Impact of the energy maximization scenario on '+ hpp df_em2 = df_em1.groupby(['scenario'])['value'].sum().round(2).reset_index() fig5c = px.bar(df_em2, x='scenario', y='value', text= 'value', color='scenario',barmode='group', labels={"value": "GWh", "tech":"HPP"}, title=graph_title, category_orders={"scenario": ["Reference", "Energy Max"]}, facet_col_spacing=0.05, facet_row_spacing=0.05) #fig.for_each_annotation(lambda a: a.update(text=a.text.split("=")[-1])) fig5c.update_traces(texttemplate='%{text:.5s}', textposition='outside') #to format the text on each bar #fig.update_layout(uniformtext_minsize=7, uniformtext_mode='hide') #to format the text on each bar #fig.update_yaxes(range=[0, 2300]) #setting the y-axis scale to ensure enough space for the text on each bar #fig.update_xaxes(showline=True, linewidth=2, linecolor='black', mirror=True) #drawing the border on x-axis #fig.update_yaxes(showline=True, linewidth=2, linecolor='black', mirror=True) #drawing the border on y-axis #You can change the image extension to *.png if you want or keep it as pdf (for high resolution) #output_folder = os.path.join('Results_graphics') #os.makedirs(output_folder, exist_ok = True) #pio.write_image(fig, 'Results_graphics/{}.pdf'.format(graph_title)) #fig.show()
class MyModule(): def my_function(): pass def main(): """The main entrypoint for this script Used in the setup.py file """ MyModule.my_function() if __name__ == '__main__': main()
class MessageTypeNotSupported(Exception): pass class MessageDoesNotExist(Exception): pass
# https://www.codechef.com/problems/RAINBOWA for T in range(int(input())): n,l=int(input()),list(map(int,input().split())) print("no") if(set(l)!=set(list(range(1,8))) or l[0]!=1 or l[-1]!=1 or l!=l[::-1]) else print("yes")
# -*- encoding:utf-8 -*- __version__ = (1, 2, 11) __version_str__ = ".".join(map(str, __version__)) __version_core__ = (3, 0, 4)
def to_camel_case(s): return ('' if not s else s[0] + ''.join(c.upper() if s[::-1][i + 1] in '-_' else '' if c in '-_' else c for i, c in enumerate(s[::-1][:-1]))[::-1])
##NIM, Umur, Tinggi = (211080200045, 18, 170) ##print(NIM, Umur, Tinggi) angka_positif = 1,2,3,4,5,6,7,8,9 print(angka_positif)
GOLD = ["7374", "7857", "7990", "8065", "8250"] ANNOTATORS = ["01", "02", "03", "04", "05", "06"] DOC_HEADER = ["order", "doc_id", "assigned", "nr_sens_calculated", "nr_sens", "annotator_1", "annotator_2", "assigned_2"] CYCLE_FILE = "../input/batch_cycles.csv" CYCLE_COL = "cycle" ASSIGNMENT_TXT = "assignment.txt" ASSIGNMENT_XLSX = "assignment.xlsx" ASSIGNMENT_FILE_HEADER = ["doc_id"] ASSIGNMENT_DF_HEADER_BASE = ["annotator", "assigned_sentences"] ASSIGNMENT_ADDITIONAL_HEADER = ["docs_in_batch", "sentences_in_batch", "sum_sentences"] ANNOTATOR_DOWNLOAD_FOLDER = "download" ANNOTATOR_UPLOAD_FOLDER = "upload" PHASE_STR = "phase" ATTRIBUTES_TO_IGNORE = { "AusnahmePruefungErforderlich", "WeitereBestimmungPruefungErforderlich", "ZuVorherigemSatzGehoerig", "Segmentierungsfehler", "NoAttribute", "N/A", "StrittigeBedeutung", } ############ # Labels review ############ class LabelReviewExcelConstants: MAIN_SHEET_NAME = "Review" ATTRIBUTE_NAMED_RANGE = "Attribute" ATTRIBUTE_REVIEW_NAMED_RANGE = "Attribute_Review" SENTENCE_REVIEW_NAMED_RANGE = "Sentence_Review" ERROR_LABEL = "Error" FIRST_DATA_ROW = 2 SEN_ID_COL = 1 SEN_REVIEW_COL = 2 SEN_TEXT_COL = 3 ATTRIBUTE_OFFSET = 4 ATTRIBUTE_STEP = 5 CATEGORY_OFFSET = 0 LABEL_OFFSET = 1 COUNT_OFFSET = 2 ANNOTATORS_OFFSET = 3 ATTRIBUTE_REVIEW_OFFSET = 4 ANNOTATOR_SEPARATOR = "\n" ############ # Full xlsx ############ class FullAnnotationExcelConstants: MAIN_SHEET_NAME = "Data" ATTRIBUTE_NAMED_RANGE = "Attribute" TYPE_NAMED_RANGE = "Type" MODALITY_NAMED_RANGE = "Modality" FIRST_DATA_ROW = 2 SEN_ID_COL = 1 SEN_TEXT_COL = 2 MODALITY_COL = 3 ATTRIBUTE_OFFSET = 4 ATTRIBUTE_STEP = 4 CATEGORY_OFFSET = 0 LABEL_OFFSET = 1 VALUE_OFFSET = 2 TYPE_OFFSET = 3 LAST_COLUMN = "BO1" ############ # Full review ############ class FullReviewExcelConstants: MAIN_SHEET_NAME = "Data" ATTRIBUTE_NAMED_RANGE = "Attribute" TYPE_NAMED_RANGE = "Type" MODALITY_NAMED_RANGE = "Modality" SENTENCE_REVIEW_NAMED_RANGE = "Sentence_Review" ERROR_LABEL = "Error" FIRST_DATA_ROW = 2 SEN_ID_COL = 1 SEN_REVIEW_COL = 2 SEN_TEXT_COL = 3 MODALITY_ANN_1_COL = 4 MODALITY_ANN_2_COL = 5 MODALITY_ANN_REV_COL = 6 ATTRIBUTE_OFFSET = 7 ATTRIBUTE_STEP = 6 CATEGORY_OFFSET = 0 LABEL_OFFSET = 1 VALUE_OFFSET = 2 TYPE_ANN_1_OFFSET = 3 TYPE_ANN_2_OFFSET = 4 TYPE_ANN_REV_OFFSET = 5 LAST_COLUMN = "CX1"
""" Item 29: Avoid Repeated Work in Comprehensions by Using Assignment Expressions """ stock = { 'nails': 125, 'screws': 35, 'wingnuts': 8, 'washers': 24, } order = ['screws', 'wingnuts', 'clips'] def get_batches(count, size): return count // size result = {} for name in order: count = stock.get(name, 0) batches = get_batches(count, 8) if batches: result[name] = batches print(f'result: {result}') # Use a dictionary comprehension to shorten this code. found = {name: get_batches(stock.get(name, 0), 8) for name in order if get_batches(stock.get(name, 0), 8)} print(f'found: {found}') # To avoid the repeated code above we can use the walrus operator. # Note that the assignment is made in the condition since this is evaluated first. # If the assignment is made in the value expression it will cause an NameError. found_better = {name: batches for name in order if (batches := get_batches(stock.get(name, 0), 8))} print(f'found_better: {found}') # One other advantage of the comprehensions is that they avoid the leakage caused by looping. # This example leaks because of the assignment operator. half = [(last := count // 2) for count in stock.values()] print(f'Last item of {half} is {last}') # This example leaks. for count in stock.values(): pass print(f'Last item of {list(stock.values())} is {count}') # This example has a loop variable in a comprehension and does not leak. half = [count_comp // 2 for count_comp in stock.values()] print(f'half = {half}') try: count_comp except NameError: print('Oops! name \'count_comp\' is not defined') # An assignment expression also works with generator expressions found = ((name, batches) for name in order if (batches := get_batches(stock.get(name, 0), 8))) print(f'next(found): {next(found)}') print(f'next(found): {next(found)}')
""" >>> 'dir/bar.py:2' """
class IntegerField: def __str__(self): return "integer"
class AdministrativeDivision: def __init__(self, level): self.level = level pass class Province(AdministrativeDivision): type = 'Province' area = 0 center = '' def __init__(self, name): self.name = name self.level = 1 def __str__(self): return f"{self.name} {self.type}" pass class Regency(AdministrativeDivision): type = 'Regency' area = 0 center = '' def __init__(self, name): self.name = name self.level = 2 def __str__(self): return f"{self.name} {self.type}" pass class City(AdministrativeDivision): type = 'City' area = 0 center = '' def __init__(self, name): self.name = name self.level = 2 def __str__(self): return f"{self.name} {self.type}" class District(AdministrativeDivision): type = 'District' area = 0 center = '' def __init__(self, name): self.name = name self.level = 3 def __str__(self): return f"{self.name} {self.type}" pass
# Binary Tree implemented using python list class BinaryTree: def __init__(self,size) -> None: self.cl=size*[None] self.lastUsedIndex=0 self.maxSize=size def insertNode(self,value): if self.lastUsedIndex+1==self.maxSize: return "BT is full" self.cl[self.lastUsedIndex+1]=value self.lastUsedIndex+=1 return "value successfully inserted" def searchNode(self,value): if value in self.cl: return "Success" return "Failed" def preOrderTraversal(self,index=1): if index>self.lastUsedIndex: return print(self.cl[index]) #call left subtree self.preOrderTraversal(index*2) self.preOrderTraversal(index*2+1) def inOrderTraversal(self,index=1): if index>self.lastUsedIndex: return self.inOrderTraversal(index*2) print(self.cl[index]) self.inOrderTraversal(index*2+1) def postOrderTraversal(self,index=1): if index>self.lastUsedIndex: return self.postOrderTraversal(index*2) self.postOrderTraversal(index*2+1) print(self.cl[index]) def levelOrderTraversal(self,index=1): for i in range(index,self.lastUsedIndex+1): print(self.cl[i]) def deleteNode(self,value): if self.lastUsedIndex==0: return "List is empty" for i in range(1,self.lastUsedIndex+1): if self.cl[i]==value: self.cl[i]=self.cl[self.lastUsedIndex] self.cl[self.lastUsedIndex]=None self.lastUsedIndex-=1 return "Node successfully deleted" def deleteBT(self): self.cl=None return "BT deleted successfully" bt=BinaryTree(8) bt.insertNode("drinks") bt.insertNode("hot") bt.insertNode("cold") bt.insertNode("tea") bt.insertNode("coffee") print(bt.searchNode('hot')) print(bt.deleteNode('tea')) #bt.preOrderTraversal() #bt.inOrderTraversal() #bt.postOrderTraversal() bt.levelOrderTraversal()
num1 = int(input()) count1 = 0 while 1 <= num1 <= 5: if num1 == 5: count1 += 1 num1 = int(input()) print(count1)
# MSCT TAU = 1 TAU_INFINITE = 1e-3 C_PUCT = 5 # A higher value means relying on the prior more. ALPHA = 0.55 # Dirichlet noise的参数 NOISE_EPS = 0.25 SELF_PLAY_SIMULATION_NUM = 400 SELF_PLAY_GAME_NUM = 2 # self-play的游戏次数 FREE_PLAY_SIMULATION_NUM = 400 REPLAY_BUFF_CAPACITY = 50000 # replay_buffer的容量,论文中是存储最近的500000把游戏的数据 # 训练参数 LR = 2e-3 # 学习率 BATCH_SIZE = 256 PIPELINE_ITER_NUM = 2000 # alpha_zero pipeline迭代次数 L2_WEIGHT_DECAY = 1e-4 # l2_weight_decay 参数 TRAIN_BATCH_NUM = 5 # 网络训练迭代次数 FREQUENCY_TO_SAVE = 10 EVAL_GAME_NUM = FREQUENCY_TO_SAVE * SELF_PLAY_GAME_NUM # 模型之间评估所用的游戏次数 # 网络其他 FILTER_NUM = 256 # 卷积核的数量 RES_BLOCK_NUM = 3 SAVE_MODEL_DIR = './ckpts6x6/' # Env BOARD_SIZE = 6 # 棋盘大小 CONNECT_N = 4 # 达到几连游戏结束 # 其他 USE_PYTORCH = True # USE_PYTORCH = False
class Label(object): def __eq__(self, other): assert(isinstance(other, Label)) return type(self) == type(other) def __ne__(self, other): assert(isinstance(other, Label)) return type(self) != type(other) def __hash__(self): return hash(self.to_class_str()) def to_class_str(self): return self.__class__.__name__ class NoLabel(Label): pass
#Function to insert a string in the middle of a string def string_in(): string=str(input("Enter a string :")) mid=len(string)//2 word=str(input("Enter a word to insert in middle :")) new_string=string[:mid]+word+string[mid:] print(new_string) string_in()
# # This file contains "references" to unreferenced code that should be kept and not considered dead code # not_used_but_whitelisted
""" Contains exception classes. """ class KRDictException(Exception): """ Contains information about an API error. This exception is only thrown if the argument passed to the ``raise_api_errors`` parameter is True. - ``message``: The error message associated with the error. - ``error_code``: The error code returned by the API. - ``request_params``: A dict containing the transformed parameters that were sent to the API. """ def __init__(self, message, error_code, params): super().__init__(message) self.message = message self.error_code = error_code self.request_params = params def __reduce__(self): return (KRDictException, (self.message, self.error_code, self.request_params))
# Copyright 2018 TNG Technology Consulting GmbH, Unterfoehring, Germany # Licensed under the Apache License, Version 2.0 - see LICENSE.md in project root directory # TODO IT-1: give this function some great functionality def great_function(): pass # TODO: give this function some greater functionality def greater_function(): pass
class Stats:# pragma: no cover """Abstract class defining the basis of all Stats """ def get_keys(self): """Return the keys of the Stats Returns ------- keys : tuple of strings Key for the Stats """ return () def get_manager(self): """Return the StatsManager required Returns ------- stats_manager : StatsManager StatsManager for the Stats """ pass def get_game_fields_required(self): """Return the required fields at game level Returns ------- game_fields_required : list of strings List of fields """ return [] def get_participant_fields_required(self): """Return the required fields at participant level Returns ------- participant_fields_required : list of strings List of fields """ return [] def get_stats_fields_required(self): """Return the required fields at stats level Returns ------- stats_fields_required : list of strings List of fields """ return [] def get_id_fields_required(self): """Return the required fields at ID level Returns ------- id_fields_required : list of strings List of fields """ return [] def get_stats(self, df): """Return the computed stats Parameters ---------- df : Pandas DataFrame DataFrame containing all fields required to compute the stats Returns ------- stats : Pandas Series Value oif the computed stats grouped by the key """ pass class ChampionStats(Stats):# pragma: no cover """Abstract class defining a Stats for Champions """ pass class ChampionBanStats(Stats):# pragma: no cover """Abstract class defining a Stats for Champions bans """ def get_stats(self, dfs): """Return the computed stats Parameters ---------- dfs : tuple of Pandas DataFrames (df, df_bans) df :Pandas DataFrame DataFrame containing all fields required to compute the stats df_bans: Pandas DataFrame DataFrame containing bans information to compute the stats Returns ------- stats : Pandas Series Value oif the computed stats grouped by the key """ pass class ItemStats(Stats):# pragma: no cover """Abstract class defining a Stats for Items """ pass class PlayerStats(Stats):# pragma: no cover """Abstract class defining a Stats for Players """ pass class SpecialStats(Stats):# pragma: no cover """Abstract class defining a Stats that will handle itself the game data """ def push_game(self, game): pass def get_stats(self): pass def set_rank_manager(self, rank_manager): """Set the rank manager for when needed Parameters ---------- rank_manager : RankManager Object containing players rank """ self._rank_manager = rank_manager class DerivedStats(Stats):# pragma: no cover """Abstract class defining a Stats that is derived from another and must be computed afterward """ order = 0 def get_stats(self, df, stats): pass def get_stats_required(self): return []
n = 0 for i in range(999, 100, -1): for j in range(i, 100, -1): x = i * j if x > n: s = str(i * j) if s == s[::-1]: n = i * j print(n)
class DictSerializable: @classmethod def from_dict(cls, data: dict) -> 'DictSerializable': return cls(**data) def to_dict(self) -> dict: return vars(self)
def ticket_printer(lister,number,total): f=open("ticket.txt","w") f.write("本次购物清单如下:\n") f.write("商品编号\t商品名称\t价格\t数量\t小计\n") for (good,num) in zip(lister,number): f.write(str(good.id)+"\t\t"+good.name+"\t\t"+str(good.price)+"\t"+str(num)+"\t"+str(num*good.price)+"\n") f.write("总价: "+str(total)) f.close()
known = {} def ack(m, n): if m == 0: return n + 1 if m > 0 and n == 0: return ack(m-1, 1) if m > 0 and n > 0: if (m,n) in known: print('Cache hit') return known[(m, n)] else: known[(m, n)] = ack(m - 1, ack(m , n - 1)) return known[(m, n)] else: return None print ('ack(3, 4) =', ack(3, 4)) print ('ack(3, 5) =', ack(3, 5)) print ('ack(3, 6) =', ack(3, 6)) print ('ack(3, 7) =', ack(3, 7))
# # @lc app=leetcode id=450 lang=python3 # # [450] Delete Node in a BST # # @lc code=start # Definition for a binary tree node. # class TreeNode: # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right class Solution: def deleteNode(self, root: TreeNode, key: int) -> TreeNode: if not root: return None if root.val == key: if not root.right: left = root.left return left right = root.right while root.left: right = root.left root.val, right.val = right.val, root.values() root.left = self.deleteNode(root.left, key) root.right = self.deleteNode(root.right, key) return root # @lc code=end
__title__ = 'pairing-functions' __description__ = 'A collection of pairing functions' __url__ = 'https://github.com/ConvertGroupLabs/pairing-functions' __version__ = '0.2.1' __author__ = 'Convert Group Labs' __author_email__ = '[email protected]' __license__ = 'MIT License' __copyright__ = 'Copyright 2020 Convert Group'
def deleteMid(head): # check if the list contains 1 or more nodes if head is None or head.next is None: return None #assign pointers to their respective positions prev, i, j = None, head, head while j and j.next: j = j.next.next;# j pointer moves 2 nodes ahead # update prev pointer , prev holds previous value of i pointer prev = i; # i pointer moves 1 node ahead i = i.next; # since i pointer was moving at half speed of j pointer , it points at # mid node when j pointer reaches the end prev.next = i.next; # bypassing mid node return head; #Driver's code class Node: def __init__(self,data): self.data = data self.next = None class Llist: def __init__(self): self.head = None def insert(self,data,link): node = Node (data) if not self.head: self.head = node return node link.next = node return node def printList(head): while head: print(head.data, end=" ") head = head.next print() if __name__ == "__main__": t = int (input()) for x in range(t): n = int(input()) arr1 = [int(y) for y in input().split()] L1 = Llist() link = None for nodeData in arr1: link = L1.insert (nodeData, link) res = deleteMid(l1.head) printList(res)
"""Contains ascii-art project related logos.""" # http://patorjk.com/software/taag/#p=display&f=Varsity&t=PnP PNP = r""" _______ _______ |_ __ \ |_ __ \ | |__) |_ .--. | |__) | | ___/[ `.-. | | ___/ _| |_ | | | | _| | |_____| [___||__]|_____| """
N = int(input()) a = N % 1000 if a == 0: print(0) else: print(1000 - a)
# -*- coding: utf-8 -*- f = open("dico.txt", "r") contrasenia = "hola" contador = 0 linea = f.readline() while linea: contador += 1 if linea.strip() == contrasenia.strip(): print('Contrasenia encontrada: ' + linea) print('en ' + str(contador) + ' intentos') break linea = f.readline() f.close()
first = "Murat" last = "Aksoy" name = f"Welcome to pyhton '{last}', {first}" print(name)
class Solution: def frequencySort(self, s): """ :type s: str :rtype: str """ dic = {} for item in s: if item in dic: dic[item] += 1 else: dic[item] = 1 ans = [0 for x in range(len(dic))] i = 0 for item in dic: # print(item, dic[item]) ans[i] = (item, dic[item]) i += 1 ans.sort(key= lambda item: item[1], reverse=True) end = "" for item in ans: end += item[0] * item[1] return end
AUTHOR="Zawadi Done" DESCRIPTION="This module wil install/update MassDNS" INSTALL_TYPE="GIT" REPOSITORY_LOCATION="https://github.com/blechschmidt/massdns" INSTALL_LOCATION="massdns" DEBIAN="" AFTER_COMMANDS="cd {INSTALL_LOCATION},make,cp bin/massdns /usr/local/bin/" LAUNCHER="massdns"
# Definition for singly-linked list. # class ListNode(object): # def __init__(self, val=0, next=None): # self.val = val # self.next = next class Solution(object): # # Iterative (accepted), Time: O(m + n), Space: O(1) # def insert(self, pos_node, node): # node.next = pos_node.next # pos_node.next = node # def mergeTwoLists(self, l1, l2): # """ # :type l1: ListNode # :type l2: ListNode # :rtype: ListNode # """ # # Handle Base Case # if l1 is None: # return l2 # elif l2 is None: # return l1 # root = l1 if l1.val < l2.val else l2 # other = l2 if root is l1 else l1 # cur = root # while cur.next: # if other and other.val < cur.next.val: # self.insert(cur, ListNode(other.val, other.next)) # other = other.next # cur = cur.next # while other: # self.insert(cur, ListNode(other.val, other.next)) # cur = cur.next # other = other.next # return root # # Recursive (Top Voted), Time: O(m + n), Space: O(m + n) # def mergeTwoLists(self, l1, l2): # if not l1 or not l2: # return l1 or l2 # if l1.val < l2.val: # l1.next = self.mergeTwoLists(l1.next, l2) # return l1 # else: # l2.next = self.mergeTwoLists(l1, l2.next) # return l2 # Iterative (Top Voted), Time: O(m + n), Space: O(1) def mergeTwoLists(self, l1, l2): dummy = cur = ListNode(0) while l1 and l2: if l1.val < l2.val: cur.next = l1 l1 = l1.next else: cur.next = l2 l2 = l2.next cur = cur.next cur.next = l1 or l2 return dummy.next
''' This module declares constants needed for this solution. This is to remove magic numbers ''' CRATER_CHANGE_WHEN_SUNNY = 0.9 CRATER_CHANGE_WHEN_RAINY = 1.2 CRATER_CHANGE_WHEN_WINDY = 0.0 ORBIT1_ORBIT_DISTANCE = 18 ORBIT1_CRATERS_COUNT = 20 ORBIT2_ORBIT_DISTANCE = 20 ORBIT2_CRATERS_COUNT = 10
class Solution: def singleNumber(self, nums: List[int]) -> int: ret = 0 for n in nums: ret ^= n return ret
#! /usr/bin/env python3 """Sort a list and store previous indices of values""" # enumerate is a great but little-known tool for writing nice code l = [4, 2, 3, 5, 1] print("original list: ", l) values, indices = zip(*sorted((a, b) for (b, a) in enumerate(l))) # now values contains the sorted list and indices contains # the indices of the corresponding value in the original list print("sorted list: ", values) print("original indices: ", indices) # note that this returns tuples, but if necessary they can # be converted to lists using list()
#!/usr/bin/env python3 # https://www.urionlinejudge.com.br/judge/en/problems/view/1020 def decompose(total, value): decomposed = total // value return total - decomposed * value, decomposed def main(): DAYS = int(input()) DAYS, YEARS = decompose(DAYS, 365) DAYS, MONTHS = decompose(DAYS, 30) print(YEARS, 'ano(s)') print(MONTHS, 'mes(es)') print(DAYS, 'dia(s)') # Start the execution if it's the main script if __name__ == "__main__": main()
''' Creating a very basic module in Python ''' languages = {'Basic', 'QBasic', 'Cobol', 'Pascal', 'Assembly', 'C/C++', 'Java', 'Python', 'Ruby'} values = 10, 50, 60, 11, 98, 75, 65, 32 def add(*args: float) -> float: sum = 0.0 for value in args: sum += value return sum def multiply(*args: float) -> float: prod = 1.0 for value in args: prod *= value return prod def _prime(number: int) -> bool: if number <= 1: return False elif number == 2: return True elif number % 2 == 0: return False else: for n in range(3, int(number ** 0.5), 2): if number % n == 0: return False else: return True
class person(): def __init__(self,nombre,edad,lugResi): self.name = nombre self.age=edad self.place=lugResi def datos(self): print("Nombre: ", self.name, "\nEdad: ", self.age, "\nResidencia: ", self.place ) class employee(person): def __init__(self,salario,antiguedad): #super().__init__("Alfonso",21,"CDMX")#Pero estos son valores fijos (No debemos hacer esto) self.salario=salario self.antiguedad=antiguedad #person1 = person("Alfonso",21,"CDMX") #person1.datos() #person1 = employee("Alfonso",21,"CDMX") #No funciona debido a la prioridad del constructor person1 = employee(1500,13) #Mostar que incluso el otro constructor si funciona pero el método datos no person1.datos() #Utilizar la función "super" <-PUNTUALIZAR (ir a linea 15) ''' ##SEGUNDA PARTE class employee(person): def __init__(self,salario,antiguedad,nameEmpleado,ageEmpleado,resEmpleado): super().__init__(nameEmpleado,ageEmpleado,resEmpleado)#Constructor padre self.salario=salario self.antiguedad=antiguedad #Sobreescribir el método datos def datos(self): super().datos() #Agregamos lo del employee print("Salario: ", self.salario, "\nAntiguedad: ", self.antiguedad ) person1 = employee(1500,13,"Alfonso",21,"CDMX") person1.datos() #PRINCIPIO DE SUSTITUCIÓN #-> es un o una -> un EMPLEADO es siempre una PERSONA # -> una PERSONA no siempre es una PERSONA # isinstance() - Función para comprobar este principio (Devuelve false o true) print(isinstance(person1,employee))#Pensar sobre todo ya hay muchas herencias (person1,person)) person2 = person("Alfonso",21,"CDMX") person2.datos() print(isinstance(person2,employee))#Pensar sobre todo ya hay muchas herencias (person1,person)) '''
__author__ = 'roland' class HandlerResponse(object): def __init__(self, content_processed, outside_html_action=None, tester_error_description=None, cookie_jar=None, urllib_request=None, urllib_response=None): """ :param content_processed: bool set to True if a scripted ContentHandler matches and processes a page; If False then the next ContentHandler must take over :param cookie_jar: A CookieJar instance :param urllib_response: A urllib.response.addinfourl instance :param outside_html_action: Value from outside_html_actions or None :param tester_error_description: optional text if outside_html_action is not None :param response: A semi parsed response, might be a dictionary """ self.content_processed = content_processed self.outside_html_action = outside_html_action self.cookie_jar = cookie_jar self.urllib_request = urllib_request self.urllib_response = urllib_response self.tester_error_description = tester_error_description class ContentHandler(object): """ Process the HTML contents of a response from the test target. This can either be a scripted approach, or invoke a browser. """ def __init__(self): pass def handle_response(self, http_response, auto_close_urls, conv=None, verify_ssl=True, cookie_jar=None, outside_html_actions=None): """ :param http_response: The HTTP response to handle :param auto_close_urls: A list of URLs that if encountered should lead to an immediate break in processing, like a form action. Other URLs in the page will load local resources such as css and js without returning control. :param conv: A aatest.Conversation instance :param verify_ssl: (True/False) whether the ssl certificates must be verified. Default is True :param cookie_jar: A http.cookiejar.CookieJar instance :param outside_html_actions: a dict describing buttons for the widget outside the html-area, to be used if the test must be aborted :return: A aatest.contenthandler.HandlerResponse instance """ raise NotImplemented()
#Config, Reference, and configure provided in globals cards = Config( hd_audio=Config( match=dict(), name='Auto-%(id)s-%(label)s', restart=-1, input=dict( label="input", subdevice='0', channels=2, buffer_size=512, buffer_count=4, sample_rate=48000, quality=4 ), output=dict( label="output", subdevice='0', channels=2, buffer_size=512, buffer_count=4, sample_rate=48000, quality=4 ) ) )
# 给定一个二叉树,判断它是否是高度平衡的二叉树。 # # 本题中,一棵高度平衡二叉树定义为: # # 一个二叉树每个节点 的左右两个子树的高度差的绝对值不超过1。 # # 示例 1: # # 给定二叉树 [3,9,20,null,null,15,7] # # 3 # / \ # 9 20 # / \ # 15 7 # 返回 true 。 # # 示例 2: # # 给定二叉树 [1,2,2,3,3,null,null,4,4] # # 1 # / \ # 2 2 # / \ # 3 3 # / \ # 4 4 # 返回 false 。 # # 来源:力扣(LeetCode) # 链接:https://leetcode-cn.com/problems/balanced-binary-tree # 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。 # Definition for a binary tree node. class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def tree_height(self, root: TreeNode) -> int: if not root: return 0 return max(self.tree_height(root.left), self.tree_height(root.right)) + 1 def isBalanced(self, root: TreeNode) -> bool: if not root: return True left = self.tree_height(root.left) right = self.tree_height(root.right) if abs(left - right) > 1: return False if not (root.left and root.right): return True return self.isBalanced(root.left) and self.isBalanced(root.right)
#!/usr/bin/env python3 # Copyright 2018, Rackspace US, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. TRIPLEO_MAPPING_GROUP = { # Mandatory group mappings 'hosts': ['undercloud', 'overcloud', 'Undercloud', 'Overcloud'], 'all': ['hosts'], # Infrastructure group mappings 'shared-infra_hosts': ['Controller', 'controller'], 'rabbitmq_all': ['Controller', 'controller'], 'memcached_all': ['Controller', 'controller'], 'galera_all': ['Controller', 'controller'], 'galera': ['Controller', 'controller'], 'rsyslog_all': ['Controller', 'controller'], 'utility_all': ['undercloud', 'Undercloud'], 'localhost': ['undercloud', 'Undercloud'], # OpenStack group mappings # Keystone 'keystone_all': ['Controller', 'controller'], # Nova 'nova_all': [ 'nova_placement', 'nova_conductor', 'nova_metadata', 'nova_consoleauth', 'nova_api', 'nova_migration_target', 'nova_compute', 'nova_scheduler', 'nova_libvirt', 'nova_vnc_proxy' ], 'nova_api_metadata': ['nova_metadata'], 'nova_api_os_compute': ['nova_api'], 'nova_compute': ['Compute'], 'nova_console': ['nova_consoleauth'], # Neutron 'neutron_all': [ 'neutron_metadata', 'neutron_dhcp', 'neutron_plugin_ml2', 'neutron_ovs_agent', 'neutron_api', 'neutron_l3' ], 'neutron_server': ['neutron_api'], 'neutron_dhcp_agent': ['neutron_dhcp'], 'neutron_l3_agent': ['neutron_l3'], 'neutron_linuxbridge_agent': ['neutron_ovs_agent'], 'neutron_openvswitch_agent': ['neutron_ovs_agent'], 'neutron_metadata_agent': ['neutron_metadata'], # Glance 'glance_all': ['glance_api', 'glance_registry_disabled'], # Heat 'heat_all': [ 'heat_api', 'heat_api_cloudwatch_disabled', 'heat_engine', 'heat_api_cfn' ], # Cinder 'cinder_all': ['cinder_api', 'cinder_volume', 'cinder_scheduler'], # Horizon 'horizon_all': ['horizon'], # Designate 'designate_all': [ 'designate_worker', 'designate_api', 'designate_producer', 'designate_mdns', 'designate_central', ], # Ceph 'ceph_all': ['ceph_osd', 'ceph_mon', 'ceph_rgw'], 'mons': ['ceph_mon'], 'osds': ['ceph_osd'], 'rgws': ['ceph_rgw'], # Swift - skip swift_proxy because it already exists in tripleO 'swift_all': ['swift_proxy', 'swift_storage'], 'swift_hosts': ['swift_storage'], 'swift_acc': ['swift_storage'], 'swift_cont': ['swift_storage'], 'swift_obj': ['swift_storage'], # Octavia 'octavia_all': [ 'octavia_api', 'octavia_health_manager', 'octavia_housekeeping', 'octavia_worker' ] # NOTE(npawelek): Designate is not GA in OSP13 # Designate # 'designate_all': ['designate_all'], # NOTE(npawelek): Ironic mappings are not confirmed yet. We're not # currently deploying ironic to customers due to RFEs around multi # tenancy. When this functionality is needed, we'll need to define # all the groupings properly. # # Ironic # 'ironic_all': ['ironic_api', 'ironic_compute', 'ironic_conductor'], # 'ironic_api': ['ironic_api'], # 'ironic_conductor': ['ironic_conductor'], # 'ironic_compute': ['ironic_compute'], }
def make_bio_dict(tags, start_idx=0): d = dict() i = start_idx for tag in tags: for pre_tag in ['B-', 'I-']: d[pre_tag + tag] = i i += 1 d['O'] = i return d
# -------------------------------------------------# # EXERCICIO 08 # # -------------------------------------------------# # Faça um programa que leia um valor em metros # e exiba convertido em centimetro e milimetros mt = float(input('digite um distancia em metros...:')) mm = mt * 1000 dm = mt * 10 cm = mt * 100 dam = mt / 10 km = mt / 1000 hm = mt / 100 print('{} metro(s) é {:.0f} centimetros e {:.0f} milimetros'.format(mt, cm, mm)) print('{} metro(s) é {} km e {} dam'.format(mt,km,dam))
# -*- coding: utf-8 -*- __author__ = "Sergey Aganezov" __email__ = "aganezov(at)cs.jhu.edu" __status__ = "production" version = "1.10" __all__ = ["grimm", "breakpoint_graph", "graphviz", "utils", "edge", "genome", "kbreak", "multicolor", "tree", "vertices", "utils", "distances"]
size(200, 200) stroke(0) strokeWidth(10) fill(1, 0.3, 0) polygon((40, 40), (40, 160)) polygon((60, 40), (60, 160), (130, 160)) polygon((100, 40), (160, 160), (160, 40), close=False)
# imdb sortBy functions def sortMoviesBy(movies_names_wl, args): """ This module is used to sortMovies by the dict(arg.sortBy) :param list movies_names_wl: a list of movie_names_with_links movie : [Rank, Link, Title, Year, Rating, Number of Ratings, Runtime, Director] Rank : int Link : str Title : str Year : int NoR : int Runtime : str Director : str :param Namespace args: [ top, csv, sortBy, setup, console_print] top : int csv : bool sortBy : string setup : bool console_print : bool """ try: movies_names_wl = movies_names_wl[:args.top] except: print('**Error** : cannot slice top size') keydictionary = {'Rank': 0, 'Title': 2, 'Year': 3, 'Rating': 4, 'NoR': 5, 'Runtime': 6, 'Director': 7} try: movies_names_wl.sort( key=lambda movies_names_wl: movies_names_wl[keydictionary[args.sortBy]]) except: if(args.sortBy != None): print('**Error** : cannot sortBy **') return movies_names_wl
# flake8: noqa _base_ = [ './coco.py' ] data = dict( samples_per_gpu=2, workers_per_gpu=2, train=dict(classes=('person',)), val=dict(classes=('person',)), test=dict(classes=('person',)) )
######################################################## # Copyright (c) 2015-2017 by European Commission. # # All Rights Reserved. # ######################################################## extends("BaseKPI.py") """ Expected Unserved Demand (%) ----------------------------- Indexed by * scope * delivery point * energy * test case The Expected Unserved Demand is a metric used to measure security of supply. This is the amount of electricity, gas or reserve demand that is expected not to be met by the production means during the year. It is calculated as the Loss Of Load volumes (LOL) expressed relatively to the corresponding annual demand volumes, in percentage. It can be calculated for each energy independently: .. math:: \\small EENS_{dp, energy} = \\frac {LOL_{dp, energy}}{demand_{dp, energy}} (\\%) See the 'Loss of load' KPI for further documentation about the loss of load. """ def computeIndicator(context, indexFilter, paramsIndicator, kpiDict): timeStepDuration = getTimeStepDurationInHours(context) selectedScopes = indexFilter.filterIndexList(0, getScopes()) selectedDeliveryPoints = indexFilter.filterIndexList(1, getDeliveryPoints(context)) selectedEnergies = indexFilter.filterIndexList(2, getEnergies(context, includedEnergies = PRODUCED_ENERGIES)) selectedTestCases = indexFilter.filterIndexList(3, context.getResultsIndexSet()) demandAssetsByScope = getAssetsByScope(context, selectedScopes, includeFinancialAssets=True, includedTechnologies = DEMAND_TYPES) lossOfLoadAssetsByScope = getAssetsByScope(context, selectedScopes, includeFinancialAssets=True, includedTechnologies = LOSS_OF_ENERGY_TYPES) demandDict = getDemandDict(context, selectedScopes, selectedTestCases, selectedEnergies, selectedDeliveryPoints, demandAssetsByScope, aggregation = True) lossOfLoadDict = getProductionDict(context, selectedScopes, selectedTestCases, selectedEnergies, selectedDeliveryPoints, lossOfLoadAssetsByScope, aggregation = True) for index in lossOfLoadDict: if index in demandDict: demand = demandDict[index].getSumValue() if demand != 0: kpiDict[index] = 100 * lossOfLoadDict[index].getSumValue() / demand return kpiDict def get_indexing(context) : baseIndexList = [getScopesIndexing(), getDeliveryPointsIndexing(context), getEnergiesIndexing(context, includedEnergies = PRODUCED_ENERGIES), getTestCasesIndexing(context)] return baseIndexList IndicatorLabel = "Expected Unserved Demand" IndicatorUnit = "%" IndicatorDeltaUnit = "%" IndicatorDescription = "Expected energy not served as a percentage of demand volume attached to a delivery point" IndicatorParameters = [] IndicatorIcon = "" IndicatorCategory = "Results>Loss of load" IndicatorTags = "Power System, Gas System, Power Markets"
x = int(input()) n = int(input()) pool = x for _ in range(n): pool += x - int(input()) print(pool)
def reject_outliers(data, m = 2.): d = np.abs(data - np.median(data)) mdev = np.median(d) s = d/mdev if mdev else 0. return (s < m) def mean_dup(x_): global reject_outliers if 1==len(np.unique(x_.values)): return x_.values[0] else: x = x_.values[reject_outliers(x_.values.copy())] x_mean = x.mean() mask = (x_mean*0.975 <= x) & (x <= x_mean*1.025) return x[mask].mean() def remove_duplicate(df): ''' Removes duplicates in dataframe and element samples whose composition is not 100% input format -> df = dataframe ''' features = df.columns.values.tolist() features.remove(df.columns[-1]) property_name = df.columns[-1] df = df[df[features].sum(axis=1).between(99,101)] df = df.groupby(features,as_index=False).agg(mean_dup) df = df.dropna() df = df.loc[(df[property_name])> 0] return df
class MyList: class _Node: __slots__ = ('value', 'next') def __init__(self, value, next=None): self.value = value self.next = next class _NodeIterator: def __init__(self, first): self._next_node = first def __iter__(self): return self def __next__(self): if self._next_node is None: raise StopIteration value = self._next_node.value self._next_node = self._next_node.next return value def __init__(self, iterable=None): self._head = None self._tail = None self._length = 0 if iterable is not None: self.extend(iterable) def append(self, value): node = MyList._Node(value) if len(self) == 0: self._head = self._tail = node else: self._tail.next = node self._tail = node self._length += 1 def __len__(self): return self._length def extend(self, iterable): for value in iterable: self.append(value) def __getitem__(self, index): if index < 0: index += len(self) if not 0 <= index < len(self): raise IndexError('list index out of range') node = self._head for _ in range(index): node = node.next return node.value def __iter__(self): return MyList._NodeIterator(self._head) values = MyList([4, 2, 1, 99, 9]) print(values) # print(values[0]) # print(values[1]) # print(values[2]) for el in values: print(el, end=' ')
number = 10 array = '64630 11735 14216 99233 14470 4978 73429 38120 51135 67060' array = list(map(int, array.split())) def find_mean(a): return round(sum(a)/number, 1) def find_median(a): a = sorted(a) if len(a) % 2 == 0: return round((a[number//2 - 1] + a[number//2])/2, 1) else: return a[number//2] def find_mode(a): a = sorted(a) counts = {i: a.count(i) for i in a} sorted_x = sorted(counts.items(), key=lambda z: z[1], reverse=True) return sorted_x[0][0] print(find_mean(array)) print(find_median(array)) print(find_mode(array))
""" Entrada O arquivo de entrada contém dois valores inteiros correspondentes ao código e à quantidade de um item conforme tabela acima. Saída O arquivo de saída deve conter a mensagem "Total: R$ " seguido pelo valor a ser pago, com 2 casas após o ponto decimal """ produtos = { 1: {"desc": "Cachorro Quente", "valor": 4.00}, 2: {"desc": "X-Salada", "valor": 4.50}, 3: {"desc": "X-Bacon", "valor": 5.00}, 4: {"desc": "Torrada simples", "valor": 2.00}, 5: {"desc": "Refrigerante", "valor": 1.50} } codigo, quantidade = [int(numero) for numero in input().split()] resultado = produtos[codigo]["valor"] * quantidade print(f"Total: R$ {resultado:.2f}")
velocidade = float(input('Escreva a Velocidade de seu carro:')) if velocidade <=80: print('Tenha um bom dia.Dirija com segurança') else: print('MUTADO.Você passou o limite permitodo que è 80km/h') print('Você agora vai ter que pagar de R${:.2f}!'.format((velocidade - 80 )*7)) print('Tenha um bom dia . Digija com segurança')
class ScrollProperties(object): """ Encapsulates properties related to scrolling. """ def Instance(self): """ This function has been arbitrarily put into the stubs""" return ScrollProperties() @staticmethod def __new__(self,*args): #cannot find CLR constructor """ __new__(cls: type,container: ScrollableControl) """ pass Enabled=property(lambda self: object(),lambda self,v: None,lambda self: None) """Gets or sets whether the scroll bar can be used on the container. Get: Enabled(self: ScrollProperties) -> bool Set: Enabled(self: ScrollProperties)=value """ LargeChange=property(lambda self: object(),lambda self,v: None,lambda self: None) """Gets or sets the distance to move a scroll bar in response to a large scroll command. Get: LargeChange(self: ScrollProperties) -> int Set: LargeChange(self: ScrollProperties)=value """ Maximum=property(lambda self: object(),lambda self,v: None,lambda self: None) """Gets or sets the upper limit of the scrollable range. Get: Maximum(self: ScrollProperties) -> int Set: Maximum(self: ScrollProperties)=value """ Minimum=property(lambda self: object(),lambda self,v: None,lambda self: None) """Gets or sets the lower limit of the scrollable range. Get: Minimum(self: ScrollProperties) -> int Set: Minimum(self: ScrollProperties)=value """ ParentControl=property(lambda self: object(),lambda self,v: None,lambda self: None) """Gets the control to which this scroll information applies. """ SmallChange=property(lambda self: object(),lambda self,v: None,lambda self: None) """Gets or sets the distance to move a scroll bar in response to a small scroll command. Get: SmallChange(self: ScrollProperties) -> int Set: SmallChange(self: ScrollProperties)=value """ Value=property(lambda self: object(),lambda self,v: None,lambda self: None) """Gets or sets a numeric value that represents the current position of the scroll bar box. Get: Value(self: ScrollProperties) -> int Set: Value(self: ScrollProperties)=value """ Visible=property(lambda self: object(),lambda self,v: None,lambda self: None) """Gets or sets whether the scroll bar can be seen by the user. Get: Visible(self: ScrollProperties) -> bool Set: Visible(self: ScrollProperties)=value """
# -- coding: utf-8 -- # Created by LoginRadius Development Team # Copyright 2019 LoginRadius Inc. All rights reserved. # class MultiFactorAuthenticationApi: def __init__(self, lr_object): """ :param lr_object: this is the reference to the parent LoginRadius object. """ self._lr_object = lr_object def mfa_configure_by_access_token(self, access_token, sms_template2_f_a=None): """This API is used to configure the Multi-factor authentication after login by using the access_token when MFA is set as optional on the LoginRadius site. Args: access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication. sms_template2_f_a: SMS Template Name Returns: Response containing Definition of Complete Multi-Factor Authentication Settings data 5.7 """ if(self._lr_object.is_null_or_whitespace(access_token)): raise Exception(self._lr_object.get_validation_message("access_token")) query_parameters = {} query_parameters["access_token"] = access_token query_parameters["apiKey"] = self._lr_object.get_api_key() if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)): query_parameters["smsTemplate2FA"] = sms_template2_f_a resource_path = "identity/v2/auth/account/2fa" return self._lr_object.execute("GET", resource_path, query_parameters, None) def mfa_update_setting(self, access_token, multi_factor_auth_model_with_lockout, fields=''): """This API is used to trigger the Multi-factor authentication settings after login for secure actions Args: access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication. multi_factor_auth_model_with_lockout: Model Class containing Definition of payload for MultiFactorAuthModel With Lockout API fields: The fields parameter filters the API response so that the response only includes a specific set of fields Returns: Response containing Definition for Complete profile data 5.9 """ if(self._lr_object.is_null_or_whitespace(access_token)): raise Exception(self._lr_object.get_validation_message("access_token")) if(multi_factor_auth_model_with_lockout is None): raise Exception(self._lr_object.get_validation_message("multi_factor_auth_model_with_lockout")) query_parameters = {} query_parameters["access_token"] = access_token query_parameters["apiKey"] = self._lr_object.get_api_key() if(not self._lr_object.is_null_or_whitespace(fields)): query_parameters["fields"] = fields resource_path = "identity/v2/auth/account/2fa/verification/otp" return self._lr_object.execute("PUT", resource_path, query_parameters, multi_factor_auth_model_with_lockout) def mfa_update_by_access_token(self, access_token, multi_factor_auth_model_by_google_authenticator_code, fields='', sms_template=None): """This API is used to Enable Multi-factor authentication by access token on user login Args: access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication. multi_factor_auth_model_by_google_authenticator_code: Model Class containing Definition of payload for MultiFactorAuthModel By GoogleAuthenticator Code API fields: The fields parameter filters the API response so that the response only includes a specific set of fields sms_template: SMS Template name Returns: Response containing Definition for Complete profile data 5.10 """ if(self._lr_object.is_null_or_whitespace(access_token)): raise Exception(self._lr_object.get_validation_message("access_token")) if(multi_factor_auth_model_by_google_authenticator_code is None): raise Exception(self._lr_object.get_validation_message("multi_factor_auth_model_by_google_authenticator_code")) query_parameters = {} query_parameters["access_token"] = access_token query_parameters["apiKey"] = self._lr_object.get_api_key() if(not self._lr_object.is_null_or_whitespace(fields)): query_parameters["fields"] = fields if(not self._lr_object.is_null_or_whitespace(sms_template)): query_parameters["smsTemplate"] = sms_template resource_path = "identity/v2/auth/account/2fa/verification/googleauthenticatorcode" return self._lr_object.execute("PUT", resource_path, query_parameters, multi_factor_auth_model_by_google_authenticator_code) def mfa_update_phone_number_by_token(self, access_token, phone_no2_f_a, sms_template2_f_a=None): """This API is used to update the Multi-factor authentication phone number by sending the verification OTP to the provided phone number Args: access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication. phone_no2_f_a: Phone Number For 2FA sms_template2_f_a: SMS Template Name Returns: Response containing Definition for Complete SMS data 5.11 """ if(self._lr_object.is_null_or_whitespace(access_token)): raise Exception(self._lr_object.get_validation_message("access_token")) if(self._lr_object.is_null_or_whitespace(phone_no2_f_a)): raise Exception(self._lr_object.get_validation_message("phone_no2_f_a")) query_parameters = {} query_parameters["access_token"] = access_token query_parameters["apiKey"] = self._lr_object.get_api_key() if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)): query_parameters["smsTemplate2FA"] = sms_template2_f_a body_parameters = {} body_parameters["phoneNo2FA"] = phone_no2_f_a resource_path = "identity/v2/auth/account/2fa" return self._lr_object.execute("PUT", resource_path, query_parameters, body_parameters) def mfa_reset_google_auth_by_token(self, access_token, googleauthenticator): """This API Resets the Google Authenticator configurations on a given account via the access_token Args: access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication. googleauthenticator: boolean type value,Enable google Authenticator Code. Returns: Response containing Definition of Delete Request 5.12.1 """ if(self._lr_object.is_null_or_whitespace(access_token)): raise Exception(self._lr_object.get_validation_message("access_token")) query_parameters = {} query_parameters["access_token"] = access_token query_parameters["apiKey"] = self._lr_object.get_api_key() body_parameters = {} body_parameters["googleauthenticator"] = googleauthenticator resource_path = "identity/v2/auth/account/2fa/authenticator" return self._lr_object.execute("DELETE", resource_path, query_parameters, body_parameters) def mfa_reset_sms_auth_by_token(self, access_token, otpauthenticator): """This API resets the SMS Authenticator configurations on a given account via the access_token. Args: access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication. otpauthenticator: Pass 'otpauthenticator' to remove SMS Authenticator Returns: Response containing Definition of Delete Request 5.12.2 """ if(self._lr_object.is_null_or_whitespace(access_token)): raise Exception(self._lr_object.get_validation_message("access_token")) query_parameters = {} query_parameters["access_token"] = access_token query_parameters["apiKey"] = self._lr_object.get_api_key() body_parameters = {} body_parameters["otpauthenticator"] = otpauthenticator resource_path = "identity/v2/auth/account/2fa/authenticator" return self._lr_object.execute("DELETE", resource_path, query_parameters, body_parameters) def mfa_backup_code_by_access_token(self, access_token): """This API is used to get a set of backup codes via access_token to allow the user login on a site that has Multi-factor Authentication enabled in the event that the user does not have a secondary factor available. We generate 10 codes, each code can only be consumed once. If any user attempts to go over the number of invalid login attempts configured in the Dashboard then the account gets blocked automatically Args: access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication. Returns: Response containing Definition of Complete Backup Code data 5.13 """ if(self._lr_object.is_null_or_whitespace(access_token)): raise Exception(self._lr_object.get_validation_message("access_token")) query_parameters = {} query_parameters["access_token"] = access_token query_parameters["apiKey"] = self._lr_object.get_api_key() resource_path = "identity/v2/auth/account/2fa/backupcode" return self._lr_object.execute("GET", resource_path, query_parameters, None) def mfa_reset_backup_code_by_access_token(self, access_token): """API is used to reset the backup codes on a given account via the access_token. This API call will generate 10 new codes, each code can only be consumed once Args: access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication. Returns: Response containing Definition of Complete Backup Code data 5.14 """ if(self._lr_object.is_null_or_whitespace(access_token)): raise Exception(self._lr_object.get_validation_message("access_token")) query_parameters = {} query_parameters["access_token"] = access_token query_parameters["apiKey"] = self._lr_object.get_api_key() resource_path = "identity/v2/auth/account/2fa/backupcode/reset" return self._lr_object.execute("GET", resource_path, query_parameters, None) def mfa_login_by_email(self, email, password, email_template=None, fields='', login_url=None, sms_template=None, sms_template2_f_a=None, verification_url=None): """This API can be used to login by emailid on a Multi-factor authentication enabled LoginRadius site. Args: email: user's email password: Password for the email email_template: Email template name fields: The fields parameter filters the API response so that the response only includes a specific set of fields login_url: Url where the user is logging from sms_template: SMS Template name sms_template2_f_a: SMS Template Name verification_url: Email verification url Returns: Complete user UserProfile data 9.8.1 """ if(self._lr_object.is_null_or_whitespace(email)): raise Exception(self._lr_object.get_validation_message("email")) if(self._lr_object.is_null_or_whitespace(password)): raise Exception(self._lr_object.get_validation_message("password")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() if(not self._lr_object.is_null_or_whitespace(email_template)): query_parameters["emailTemplate"] = email_template if(not self._lr_object.is_null_or_whitespace(fields)): query_parameters["fields"] = fields if(not self._lr_object.is_null_or_whitespace(login_url)): query_parameters["loginUrl"] = login_url if(not self._lr_object.is_null_or_whitespace(sms_template)): query_parameters["smsTemplate"] = sms_template if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)): query_parameters["smsTemplate2FA"] = sms_template2_f_a if(not self._lr_object.is_null_or_whitespace(verification_url)): query_parameters["verificationUrl"] = verification_url body_parameters = {} body_parameters["email"] = email body_parameters["password"] = password resource_path = "identity/v2/auth/login/2fa" return self._lr_object.execute("POST", resource_path, query_parameters, body_parameters) def mfa_login_by_user_name(self, password, username, email_template=None, fields='', login_url=None, sms_template=None, sms_template2_f_a=None, verification_url=None): """This API can be used to login by username on a Multi-factor authentication enabled LoginRadius site. Args: password: Password for the email username: Username of the user email_template: Email template name fields: The fields parameter filters the API response so that the response only includes a specific set of fields login_url: Url where the user is logging from sms_template: SMS Template name sms_template2_f_a: SMS Template Name verification_url: Email verification url Returns: Complete user UserProfile data 9.8.2 """ if(self._lr_object.is_null_or_whitespace(password)): raise Exception(self._lr_object.get_validation_message("password")) if(self._lr_object.is_null_or_whitespace(username)): raise Exception(self._lr_object.get_validation_message("username")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() if(not self._lr_object.is_null_or_whitespace(email_template)): query_parameters["emailTemplate"] = email_template if(not self._lr_object.is_null_or_whitespace(fields)): query_parameters["fields"] = fields if(not self._lr_object.is_null_or_whitespace(login_url)): query_parameters["loginUrl"] = login_url if(not self._lr_object.is_null_or_whitespace(sms_template)): query_parameters["smsTemplate"] = sms_template if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)): query_parameters["smsTemplate2FA"] = sms_template2_f_a if(not self._lr_object.is_null_or_whitespace(verification_url)): query_parameters["verificationUrl"] = verification_url body_parameters = {} body_parameters["password"] = password body_parameters["username"] = username resource_path = "identity/v2/auth/login/2fa" return self._lr_object.execute("POST", resource_path, query_parameters, body_parameters) def mfa_login_by_phone(self, password, phone, email_template=None, fields='', login_url=None, sms_template=None, sms_template2_f_a=None, verification_url=None): """This API can be used to login by Phone on a Multi-factor authentication enabled LoginRadius site. Args: password: Password for the email phone: New Phone Number email_template: Email template name fields: The fields parameter filters the API response so that the response only includes a specific set of fields login_url: Url where the user is logging from sms_template: SMS Template name sms_template2_f_a: SMS Template Name verification_url: Email verification url Returns: Complete user UserProfile data 9.8.3 """ if(self._lr_object.is_null_or_whitespace(password)): raise Exception(self._lr_object.get_validation_message("password")) if(self._lr_object.is_null_or_whitespace(phone)): raise Exception(self._lr_object.get_validation_message("phone")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() if(not self._lr_object.is_null_or_whitespace(email_template)): query_parameters["emailTemplate"] = email_template if(not self._lr_object.is_null_or_whitespace(fields)): query_parameters["fields"] = fields if(not self._lr_object.is_null_or_whitespace(login_url)): query_parameters["loginUrl"] = login_url if(not self._lr_object.is_null_or_whitespace(sms_template)): query_parameters["smsTemplate"] = sms_template if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)): query_parameters["smsTemplate2FA"] = sms_template2_f_a if(not self._lr_object.is_null_or_whitespace(verification_url)): query_parameters["verificationUrl"] = verification_url body_parameters = {} body_parameters["password"] = password body_parameters["phone"] = phone resource_path = "identity/v2/auth/login/2fa" return self._lr_object.execute("POST", resource_path, query_parameters, body_parameters) def mfa_validate_otp_by_phone(self, multi_factor_auth_model_with_lockout, second_factor_authentication_token, fields='', sms_template2_f_a=None): """This API is used to login via Multi-factor authentication by passing the One Time Password received via SMS Args: multi_factor_auth_model_with_lockout: Model Class containing Definition of payload for MultiFactorAuthModel With Lockout API second_factor_authentication_token: A Uniquely generated MFA identifier token after successful authentication fields: The fields parameter filters the API response so that the response only includes a specific set of fields sms_template2_f_a: SMS Template Name Returns: Complete user UserProfile data 9.12 """ if(multi_factor_auth_model_with_lockout is None): raise Exception(self._lr_object.get_validation_message("multi_factor_auth_model_with_lockout")) if(self._lr_object.is_null_or_whitespace(second_factor_authentication_token)): raise Exception(self._lr_object.get_validation_message("second_factor_authentication_token")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() query_parameters["secondFactorAuthenticationToken"] = second_factor_authentication_token if(not self._lr_object.is_null_or_whitespace(fields)): query_parameters["fields"] = fields if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)): query_parameters["smsTemplate2FA"] = sms_template2_f_a resource_path = "identity/v2/auth/login/2fa/verification/otp" return self._lr_object.execute("PUT", resource_path, query_parameters, multi_factor_auth_model_with_lockout) def mfa_validate_google_auth_code(self, google_authenticator_code, second_factor_authentication_token, fields='', sms_template2_f_a=None): """This API is used to login via Multi-factor-authentication by passing the google authenticator code. Args: google_authenticator_code: The code generated by google authenticator app after scanning QR code second_factor_authentication_token: A Uniquely generated MFA identifier token after successful authentication fields: The fields parameter filters the API response so that the response only includes a specific set of fields sms_template2_f_a: SMS Template Name Returns: Complete user UserProfile data 9.13 """ if(self._lr_object.is_null_or_whitespace(google_authenticator_code)): raise Exception(self._lr_object.get_validation_message("google_authenticator_code")) if(self._lr_object.is_null_or_whitespace(second_factor_authentication_token)): raise Exception(self._lr_object.get_validation_message("second_factor_authentication_token")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() query_parameters["secondFactorAuthenticationToken"] = second_factor_authentication_token if(not self._lr_object.is_null_or_whitespace(fields)): query_parameters["fields"] = fields if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)): query_parameters["smsTemplate2FA"] = sms_template2_f_a body_parameters = {} body_parameters["googleAuthenticatorCode"] = google_authenticator_code resource_path = "identity/v2/auth/login/2fa/verification/googleauthenticatorcode" return self._lr_object.execute("PUT", resource_path, query_parameters, body_parameters) def mfa_validate_backup_code(self, multi_factor_auth_model_by_backup_code, second_factor_authentication_token, fields=''): """This API is used to validate the backup code provided by the user and if valid, we return an access_token allowing the user to login incases where Multi-factor authentication (MFA) is enabled and the secondary factor is unavailable. When a user initially downloads the Backup codes, We generate 10 codes, each code can only be consumed once. if any user attempts to go over the number of invalid login attempts configured in the Dashboard then the account gets blocked automatically Args: multi_factor_auth_model_by_backup_code: Model Class containing Definition of payload for MultiFactorAuth By BackupCode API second_factor_authentication_token: A Uniquely generated MFA identifier token after successful authentication fields: The fields parameter filters the API response so that the response only includes a specific set of fields Returns: Complete user UserProfile data 9.14 """ if(multi_factor_auth_model_by_backup_code is None): raise Exception(self._lr_object.get_validation_message("multi_factor_auth_model_by_backup_code")) if(self._lr_object.is_null_or_whitespace(second_factor_authentication_token)): raise Exception(self._lr_object.get_validation_message("second_factor_authentication_token")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() query_parameters["secondFactorAuthenticationToken"] = second_factor_authentication_token if(not self._lr_object.is_null_or_whitespace(fields)): query_parameters["fields"] = fields resource_path = "identity/v2/auth/login/2fa/verification/backupcode" return self._lr_object.execute("PUT", resource_path, query_parameters, multi_factor_auth_model_by_backup_code) def mfa_update_phone_number(self, phone_no2_f_a, second_factor_authentication_token, sms_template2_f_a=None): """This API is used to update (if configured) the phone number used for Multi-factor authentication by sending the verification OTP to the provided phone number Args: phone_no2_f_a: Phone Number For 2FA second_factor_authentication_token: A Uniquely generated MFA identifier token after successful authentication sms_template2_f_a: SMS Template Name Returns: Response containing Definition for Complete SMS data 9.16 """ if(self._lr_object.is_null_or_whitespace(phone_no2_f_a)): raise Exception(self._lr_object.get_validation_message("phone_no2_f_a")) if(self._lr_object.is_null_or_whitespace(second_factor_authentication_token)): raise Exception(self._lr_object.get_validation_message("second_factor_authentication_token")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() query_parameters["secondFactorAuthenticationToken"] = second_factor_authentication_token if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)): query_parameters["smsTemplate2FA"] = sms_template2_f_a body_parameters = {} body_parameters["phoneNo2FA"] = phone_no2_f_a resource_path = "identity/v2/auth/login/2fa" return self._lr_object.execute("PUT", resource_path, query_parameters, body_parameters) def mfa_resend_otp(self, second_factor_authentication_token, sms_template2_f_a=None): """This API is used to resending the verification OTP to the provided phone number Args: second_factor_authentication_token: A Uniquely generated MFA identifier token after successful authentication sms_template2_f_a: SMS Template Name Returns: Response containing Definition for Complete SMS data 9.17 """ if(self._lr_object.is_null_or_whitespace(second_factor_authentication_token)): raise Exception(self._lr_object.get_validation_message("second_factor_authentication_token")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() query_parameters["secondFactorAuthenticationToken"] = second_factor_authentication_token if(not self._lr_object.is_null_or_whitespace(sms_template2_f_a)): query_parameters["smsTemplate2FA"] = sms_template2_f_a resource_path = "identity/v2/auth/login/2fa/resend" return self._lr_object.execute("GET", resource_path, query_parameters, None) def mfa_reset_sms_authenticator_by_uid(self, otpauthenticator, uid): """This API resets the SMS Authenticator configurations on a given account via the UID. Args: otpauthenticator: Pass 'otpauthenticator' to remove SMS Authenticator uid: UID, the unified identifier for each user account Returns: Response containing Definition of Delete Request 18.21.1 """ if(self._lr_object.is_null_or_whitespace(uid)): raise Exception(self._lr_object.get_validation_message("uid")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() query_parameters["apiSecret"] = self._lr_object.get_api_secret() query_parameters["uid"] = uid body_parameters = {} body_parameters["otpauthenticator"] = otpauthenticator resource_path = "identity/v2/manage/account/2fa/authenticator" return self._lr_object.execute("DELETE", resource_path, query_parameters, body_parameters) def mfa_reset_google_authenticator_by_uid(self, googleauthenticator, uid): """This API resets the Google Authenticator configurations on a given account via the UID. Args: googleauthenticator: boolean type value,Enable google Authenticator Code. uid: UID, the unified identifier for each user account Returns: Response containing Definition of Delete Request 18.21.2 """ if(self._lr_object.is_null_or_whitespace(uid)): raise Exception(self._lr_object.get_validation_message("uid")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() query_parameters["apiSecret"] = self._lr_object.get_api_secret() query_parameters["uid"] = uid body_parameters = {} body_parameters["googleauthenticator"] = googleauthenticator resource_path = "identity/v2/manage/account/2fa/authenticator" return self._lr_object.execute("DELETE", resource_path, query_parameters, body_parameters) def mfa_backup_code_by_uid(self, uid): """This API is used to reset the backup codes on a given account via the UID. This API call will generate 10 new codes, each code can only be consumed once. Args: uid: UID, the unified identifier for each user account Returns: Response containing Definition of Complete Backup Code data 18.25 """ if(self._lr_object.is_null_or_whitespace(uid)): raise Exception(self._lr_object.get_validation_message("uid")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() query_parameters["apiSecret"] = self._lr_object.get_api_secret() query_parameters["uid"] = uid resource_path = "identity/v2/manage/account/2fa/backupcode" return self._lr_object.execute("GET", resource_path, query_parameters, None) def mfa_reset_backup_code_by_uid(self, uid): """This API is used to reset the backup codes on a given account via the UID. This API call will generate 10 new codes, each code can only be consumed once. Args: uid: UID, the unified identifier for each user account Returns: Response containing Definition of Complete Backup Code data 18.26 """ if(self._lr_object.is_null_or_whitespace(uid)): raise Exception(self._lr_object.get_validation_message("uid")) query_parameters = {} query_parameters["apiKey"] = self._lr_object.get_api_key() query_parameters["apiSecret"] = self._lr_object.get_api_secret() query_parameters["uid"] = uid resource_path = "identity/v2/manage/account/2fa/backupcode/reset" return self._lr_object.execute("GET", resource_path, query_parameters, None)
Num1 = int(input('Primeiro número: ')) Num2 = int(input('Segundo número: ')) Num3 = int(input('Terceiro número: ')) Num4 = int(input('Quarto número: ')) Num5 = int(input('Quinto número: ')) Num6 = int(input('Sexto número: ')) lista = [Num1, Num2, Num3, Num4, Num5, Num6] soma = 0 for c in range(0, len(lista)): if (lista[c] % 2 == 0): soma += lista[c] print('A soma dos valores pares é {}'.format(soma)) somas = 0 for c in range(1, 7): num = int(input('Digite o {}° valor: '.format(c))) if (num % 2 == 0): somas += num print('A soma dos valores pares é {}'.format(somas))
"""Not sure why memory limit exceeded, but this solution works""" class Solution(object): def wordBreak(self, s, wordDict): """ :type s: str :type wordDict: List[str] :rtype: List[str] """ def postProcess(combos, s): if combos is None: return list() else: ans = list() for c in combos: words = list() start = 0 for end in c: words.append(s[start:end]) start = end ans.append(' '.join(words)) return ans dp = [None] * (len(s)+1) dp[0] = [[]] for i in xrange(1, len(dp)): combosFori = list() for j in xrange(i): if dp[j] is not None and s[j:i] in wordDict: for combo in dp[j]: combosFori.append(combo + [i]) if len(combosFori) > 0: dp[i] = combosFori return postProcess(dp[len(s)], s)
class IAMPolicies(): def __init__(self, iam): self.client = iam def _marker_handler(self, marker=None, scope='All'): if marker: response = self.client.list_policies( Scope=scope, OnlyAttached=True, PolicyUsageFilter='PermissionsPolicy', Marker=marker) else: response = self.client.list_policies( Scope=scope, OnlyAttached=True, PolicyUsageFilter='PermissionsPolicy' ) return response def get_policy_document(self, arn, version_id): response = self.client.get_policy_version(PolicyArn=arn, VersionId=version_id) document = response['PolicyVersion']['Document'] return document def get_policies(self): policies = [] marker = None for scope in ['AWS', 'Local']: while True: resp = self._marker_handler(marker, scope) for policy in resp['Policies']: policy['Scope'] = scope policies.append(policy) marker = resp.get('Marker') if not resp['IsTruncated']: break return policies
runtime_project='core' editor_project='core-Editor' runtime_project_file='Assembly-CSharp' editor_project_file='Assembly-CSharp-Editor' define='ANDROID' MONO="/Applications/Unity/MonoDevelop.app/Contents/Frameworks/Mono.framework/Versions/Current/bin/mono" MDTOOL="/Applications/Unity/MonoDevelop.app/Contents/MacOS/lib/monodevelop/bin/mdtool.exe" MONO_SOLUTION="Core.sln"
L = 0 heatmap = [] while True: try: line = [int(x) for x in input()] # Pad heatmap with 9s heat = [9] + line + [9] L = len(heat) heatmap.extend(heat) except EOFError: break index = 0 # Pad 9s in top and bottom heatmap = heatmap + (L*[9]) bigmap = (L*[9]) + heatmap + (L*[9]) # Part 1 total = [] for index, value in enumerate(heatmap, start = 0): if bigmap[index] == 9: continue me = bigmap[index] left = bigmap[index - 1] right = bigmap[index + 1] top = bigmap[index - L] bottom = bigmap[index + L] if me == left or me == right or me == top or me == bottom: continue lowest = min(me, left, right, top, bottom) if me == lowest: total.append(lowest + 1) print("SUM", sum(total)) # Part 2 # Get list of the indexes of lowest points # # From each lowest point go: # UP then as far left and right as possiple (repeat with UP steps) # Down then as far left and right as possiple (repeat with DOWN steps)
# Copyright 2019 the rules_bison authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # SPDX-License-Identifier: Apache-2.0 load( "@rules_bison//bison/internal:versions.bzl", _VERSION_URLS = "VERSION_URLS", ) _GNULIB_VERSION = "788db09a9f88abbef73c97e8d7291c40455336d8" _GNULIB_SHA256 = "4350696d531852118f3735a0e2d1091746388392c27d582f0cc241b6a39fe493" _URL_BASE = "github.com/jmillikin/rules_bison/releases/download/v0.1/bison-gnulib-{}.tar.xz".format(_GNULIB_VERSION) _GNULIB_URLS = [ "https://mirror.bazel.build/" + _URL_BASE, "https://" + _URL_BASE, ] _CONFIG_HEADER = """ #include "gnulib/lib/config.in.h" #include "gnulib/lib/arg-nonnull.h" #define PACKAGE "bison" #define PACKAGE_BUGREPORT "[email protected]" #define PACKAGE_COPYRIGHT_YEAR {BISON_COPYRIGHT_YEAR} #define PACKAGE_NAME "GNU Bison" #define PACKAGE_STRING "GNU Bison {BISON_VERSION}" #define PACKAGE_URL "http://www.gnu.org/software/bison/" #define PACKAGE_VERSION "{BISON_VERSION}" #define VERSION "{BISON_VERSION}" #define M4 "m4" #define M4_GNU_OPTION "--gnu" """ _CONFIG_FOOTER = """ #include <stdint.h> #include <stdio.h> #include <wchar.h> struct obstack; int obstack_printf(struct obstack *obs, const char *format, ...); int obstack_vprintf(struct obstack *obs, const char *format, va_list args); int strverscmp(const char *s1, const char *s2); int wcwidth(wchar_t wc); """ _CONFIGMAKE_H = """ #define LOCALEDIR "" #define PKGDATADIR "{WORKSPACE_ROOT}/data" """ def gnulib_overlay(ctx, bison_version): ctx.download_and_extract( url = _GNULIB_URLS, sha256 = _GNULIB_SHA256, output = "gnulib", stripPrefix = "gnulib-" + _GNULIB_VERSION, ) ctx.symlink(ctx.attr._gnulib_build, "gnulib/BUILD.bazel") config_header = _CONFIG_HEADER.format( BISON_VERSION = bison_version, BISON_COPYRIGHT_YEAR = _VERSION_URLS[bison_version]["copyright_year"], ) for (os, template) in [ ("darwin", ctx.attr._gnulib_config_darwin_h), ("linux", ctx.attr._gnulib_config_linux_h), ("windows", ctx.attr._gnulib_config_windows_h), ]: config_prefix = "gnulib/config-{}/".format(os) ctx.template(config_prefix + "config.h", template, substitutions = { "{GNULIB_CONFIG_HEADER}": config_header, "{GNULIB_CONFIG_FOOTER}": _CONFIG_FOOTER, }, executable = False) ctx.file(config_prefix + "configmake.h", _CONFIGMAKE_H.format( WORKSPACE_ROOT = "external/" + ctx.attr.name, )) for shim in _WINDOWS_STDLIB_SHIMS: in_h = "gnulib/lib/{}.in.h".format(shim.replace("/", "_")) out_h = "gnulib/config-windows/shim-libc/gnulib/{}.h".format(shim) ctx.template(out_h, in_h, substitutions = _WINDOWS_AC_SUBST, executable = False) # Older versions of Gnulib had a different layout for 'bitset' ctx.file("gnulib/lib/bitset_stats.h", '#include "gnulib/lib/bitset/stats.h"') ctx.file("gnulib/lib/bitsetv-print.h", '#include "gnulib/lib/bitsetv.h"') # Fix a mismatch between _Noreturn and __attribute_noreturn__ when # building with a C11-aware GCC. ctx.template("gnulib/lib/obstack.c", "gnulib/lib/obstack.c", substitutions = { "static _Noreturn void": "static _Noreturn __attribute_noreturn__ void", }, executable = False) # Ambiguous include path of timevar.def confuses Bazel's C++ header dependency # checker. Work around this by using non-ambiguous paths. ctx.template("gnulib/lib/timevar.c", "gnulib/lib/timevar.c", substitutions = { '"timevar.def"': '"lib/timevar.def"', }, executable = False) ctx.template("gnulib/lib/timevar.h", "gnulib/lib/timevar.h", substitutions = { '"timevar.def"': '"lib/timevar.def"', }, executable = False) # Force isnanl() to be defined in terms of standard isnan() macro, # instead of compiler-specific __builtin_isnan(). ctx.file("gnulib/lib/isnanl-nolibm.h", "\n".join([ "#include <math.h>", "#define isnanl isnan", ])) # Gnulib tries to detect the maximum file descriptor count by passing # an invalid value to an OS API and seeing what happens. Well, what happens # in debug mode is the binary is aborted. # # Per https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/setmaxstdio # the maximum limit of this value is 2048. Lets hope that's good enough. ctx.template("gnulib/lib/getdtablesize.c", "gnulib/lib/getdtablesize.c", substitutions = { "for (bound = 0x10000;": "for (bound = 2048;", }, executable = False) # Gnulib uses spawnvpe() to emulate fork/exec on Windows, but something # about its other environment variable shims conflicts with spawnvpe's # internal environment concatenation. Spawning M4 from Bison in a # release build consistently crashes the process. # # Bison doesn't attempt to manipulate the environment variables of its # child processes, so we can avoid the issue by disabling environment # manipulation in Gnulib's shim. ctx.template("gnulib/lib/spawn-pipe.c", "gnulib/lib/spawn-pipe.c", substitutions = { "(const char **) environ": "NULL", }, executable = False) _WINDOWS_STDLIB_SHIMS = [ "alloca", "errno", "fcntl", "getopt", "getopt-cdefs", "signal", "stdio", "string", "sys/resource", "sys/stat", "sys/time", "sys/times", "sys/types", "sys/wait", "unistd", "unitypes", "uniwidth", "wchar", "wctype", ] _WINDOWS_AC_SUBST = { "@PRAGMA_SYSTEM_HEADER@": "", "@PRAGMA_COLUMNS@": "", "@INCLUDE_NEXT@": "include", "@GUARD_PREFIX@": "GL_BISON", "@ASM_SYMBOL_PREFIX@": '""', "/* The definitions of _GL_FUNCDECL_RPL etc. are copied here. */": '#include "gnulib/lib/c++defs.h"', "/* The definition of _GL_ARG_NONNULL is copied here. */": '#include "gnulib/lib/arg-nonnull.h"', "/* The definition of _GL_WARN_ON_USE is copied here. */": '#include "gnulib/lib/warn-on-use.h"', # alloca.h # errno.h "@NEXT_ERRNO_H@": "<gnulib-system-libc/errno.h>", "@EMULTIHOP_HIDDEN@": "0", "@EMULTIHOP_VALUE@": "EMULTIHOP", "@ENOLINK_HIDDEN@": "0", "@ENOLINK_VALUE@": "ENOLINK", "@EOVERFLOW_HIDDEN@": "0", "@EOVERFLOW_VALUE@": "EOVERFLOW", # fcntl.h "@NEXT_FCNTL_H@": "<gnulib-system-libc/fcntl.h>", "@GNULIB_FCNTL@": "1", "@REPLACE_FCNTL@": "0", "@HAVE_FCNTL@": "0", "@GNULIB_OPEN@": "1", "@REPLACE_OPEN@": "0", "@GNULIB_OPENAT@": "0", "@REPLACE_OPENAT@": "0", "@HAVE_OPENAT@": "1", "@GNULIB_NONBLOCKING@": "0", # getopt.h "@HAVE_GETOPT_H@": "0", "@NEXT_GETOPT_H@": "<gnulib-system-libc/getopt.h>", # getopt-cdefs.h "@HAVE_SYS_CDEFS_H@": "0", # signal.h "@NEXT_SIGNAL_H@": "<gnulib-system-libc/signal.h>", "@GNULIB_PTHREAD_SIGMASK@": "0", "@HAVE_TYPE_VOLATILE_SIG_ATOMIC_T@": "1", "@HAVE_SIGSET_T@": "0", "@HAVE_SIGHANDLER_T@": "0", "@GNULIB_SIGNAL_H_SIGPIPE@": "1", "@REPLACE_PTHREAD_SIGMASK@": "0", "@HAVE_PTHREAD_SIGMASK@": "1", "@GNULIB_RAISE@": "1", "@REPLACE_RAISE@": "1", "@HAVE_RAISE@": "1", "@GNULIB_SIGPROCMASK@": "1", "@HAVE_POSIX_SIGNALBLOCKING@": "0", "@GNULIB_SIGACTION@": "1", "@HAVE_SIGACTION@": "0", "@HAVE_SIGINFO_T@": "0", "@HAVE_STRUCT_SIGACTION_SA_SIGACTION@": "1", # stdio.h "@NEXT_STDIO_H@": "<gnulib-system-libc/stdio.h>", "@GNULIB_RENAMEAT@": "0", "@GNULIB_PERROR@": "0", "@GNULIB_REMOVE@": "0", "@GNULIB_RENAME@": "0", "@GNULIB_DPRINTF@": "0", "@REPLACE_DPRINTF@": "0", "@HAVE_DPRINTF@": "1", "@GNULIB_FCLOSE@": "0", "@REPLACE_FCLOSE@": "0", "@GNULIB_FDOPEN@": "0", "@REPLACE_FDOPEN@": "1", "@GNULIB_FFLUSH@": "0", "@REPLACE_FFLUSH@": "0", "@GNULIB_FGETC@": "1", "@REPLACE_STDIO_READ_FUNCS@": "0", "@GNULIB_STDIO_H_NONBLOCKING@": "0", "@GNULIB_FGETS@": "1", "@GNULIB_FOPEN@": "1", "@REPLACE_FOPEN@": "0", "@GNULIB_FPRINTF_POSIX@": "0", "@GNULIB_FPRINTF@": "1", "@REPLACE_FPRINTF@": "0", "@REPLACE_STDIO_WRITE_FUNCS@": "0", "@GNULIB_STDIO_H_SIGPIPE@": "0", "@GNULIB_VFPRINTF_POSIX@": "0", "@GNULIB_FPURGE@": "1", "@REPLACE_FPURGE@": "0", "@HAVE_DECL_FPURGE@": "0", "@GNULIB_FPUTC@": "1", "@GNULIB_FREAD@": "1", "@GNULIB_FREOPEN@": "0", "@REPLACE_FREOPEN@": "0", "@GNULIB_FSCANF@": "1", "@GNULIB_FSEEK@": "0", "@REPLACE_FSEEK@": "0", "@GNULIB_FSEEKO@": "0", "@REPLACE_FSEEKO@": "0", "@HAVE_DECL_FSEEKO@": "0", "@GNULIB_FTELL@": "0", "@REPLACE_FTELL@": "0", "@GNULIB_FTELLO@": "0", "@REPLACE_FTELLO@": "0", "@HAVE_DECL_FTELLO@": "0", "@GNULIB_FWRITE@": "0", "@GNULIB_GETC@": "1", "@GNULIB_GETCHAR@": "1", "@GNULIB_GETDELIM@": "0", "@REPLACE_GETDELIM@": "0", "@HAVE_DECL_GETDELIM@": "1", "@GNULIB_GETLINE@": "0", "@REPLACE_GETLINE@": "0", "@HAVE_DECL_GETLINE@": "1", "@GNULIB_OBSTACK_PRINTF@": "1", "@GNULIB_OBSTACK_PRINTF_POSIX@": "0", "@REPLACE_OBSTACK_PRINTF@": "0", "@HAVE_DECL_OBSTACK_PRINTF@": "0", "@GNULIB_PCLOSE@": "0", "@HAVE_PCLOSE@": "1", "@REPLACE_PERROR@": "0", "@GNULIB_POPEN@": "0", "@REPLACE_POPEN@": "0", "@HAVE_POPEN@": "1", "@GNULIB_PRINTF_POSIX@": "0", "@GNULIB_PRINTF@": "1", "@REPLACE_PRINTF@": "0", "@GNULIB_PUTC@": "1", "@GNULIB_PUTCHAR@": "1", "@GNULIB_PUTS@": "1", "@REPLACE_RENAME@": "0", "@REPLACE_RENAMEAT@": "0", "@HAVE_RENAMEAT@": "1", "@GNULIB_SCANF@": "1", "@GNULIB_SNPRINTF@": "1", "@REPLACE_SNPRINTF@": "1", "@HAVE_DECL_SNPRINTF@": "1", "@GNULIB_SPRINTF_POSIX@": "0", "@REPLACE_SPRINTF@": "0", "@GNULIB_TMPFILE@": "0", "@REPLACE_TMPFILE@": "0", "@GNULIB_VASPRINTF@": "1", "@REPLACE_VASPRINTF@": "0", "@HAVE_VASPRINTF@": "0", "@GNULIB_VDPRINTF@": "0", "@REPLACE_VDPRINTF@": "0", "@HAVE_VDPRINTF@": "1", "@GNULIB_VFSCANF@": "0", "@GNULIB_FPUTS@": "1", "@REPLACE_REMOVE@": "0", "@GNULIB_VFPRINTF@": "1", "@REPLACE_VFPRINTF@": "0", "@GNULIB_VPRINTF_POSIX@": "0", "@GNULIB_VSCANF@": "0", "@GNULIB_VSNPRINTF@": "0", "@REPLACE_VSNPRINTF@": "0", "@HAVE_DECL_VSNPRINTF@": "1", "@GNULIB_VSPRINTF_POSIX@": "0", "@REPLACE_VSPRINTF@": "0", "@GNULIB_VPRINTF@": "1", "@REPLACE_VPRINTF@": "0", # string.h "@NEXT_STRING_H@": "<gnulib-system-libc/string.h>", "@GNULIB_MBSLEN@": "0", "@GNULIB_STRSIGNAL@": "1", "@GNULIB_FFSL@": "0", "@HAVE_FFSL@": "1", "@GNULIB_FFSLL@": "0", "@HAVE_FFSLL@": "1", "@GNULIB_MEMCHR@": "1", "@REPLACE_MEMCHR@": "0", "@HAVE_MEMCHR@": "1", "@GNULIB_MEMMEM@": "0", "@REPLACE_MEMMEM@": "0", "@HAVE_DECL_MEMMEM@": "1", "@GNULIB_MEMPCPY@": "0", "@HAVE_MEMPCPY@": "1", "@GNULIB_MEMRCHR@": "0", "@HAVE_DECL_MEMRCHR@": "1", "@GNULIB_RAWMEMCHR@": "1", "@HAVE_RAWMEMCHR@": "0", "@GNULIB_STPCPY@": "1", "@HAVE_STPCPY@": "0", "@GNULIB_STPNCPY@": "0", "@REPLACE_STPNCPY@": "0", "@HAVE_STPNCPY@": "1", "@GNULIB_STRCHRNUL@": "1", "@REPLACE_STRCHRNUL@": "0", "@HAVE_STRCHRNUL@": "0", "@GNULIB_STRDUP@": "0", "@REPLACE_STRDUP@": "1", "@HAVE_DECL_STRDUP@": "1", "@GNULIB_STRNCAT@": "0", "@REPLACE_STRNCAT@": "0", "@GNULIB_STRNDUP@": "1", "@REPLACE_STRNDUP@": "0", "@HAVE_DECL_STRNDUP@": "0", "@GNULIB_STRNLEN@": "1", "@REPLACE_STRNLEN@": "0", "@HAVE_DECL_STRNLEN@": "1", "@GNULIB_STRPBRK@": "0", "@HAVE_STRPBRK@": "1", "@GNULIB_STRSEP@": "0", "@HAVE_STRSEP@": "1", "@GNULIB_STRSTR@": "1", "@REPLACE_STRSTR@": "1", "@GNULIB_STRCASESTR@": "0", "@REPLACE_STRCASESTR@": "0", "@HAVE_STRCASESTR@": "1", "@GNULIB_STRTOK_R@": "0", "@REPLACE_STRTOK_R@": "0", "@UNDEFINE_STRTOK_R@": "0", "@HAVE_DECL_STRTOK_R@": "1", "@HAVE_MBSLEN@": "0", "@GNULIB_MBSNLEN@": "0", "@GNULIB_MBSCHR@": "0", "@GNULIB_MBSRCHR@": "0", "@GNULIB_MBSSTR@": "0", "@GNULIB_MBSCASECMP@": "0", "@GNULIB_MBSNCASECMP@": "0", "@GNULIB_MBSPCASECMP@": "0", "@GNULIB_MBSCASESTR@": "0", "@GNULIB_MBSCSPN@": "0", "@GNULIB_MBSPBRK@": "0", "@GNULIB_MBSSPN@": "0", "@GNULIB_MBSSEP@": "0", "@GNULIB_MBSTOK_R@": "0", "@GNULIB_STRERROR@": "1", "@REPLACE_STRERROR@": "1", "@GNULIB_STRERROR_R@": "1", "@REPLACE_STRERROR_R@": "0", "@HAVE_DECL_STRERROR_R@": "0", "@REPLACE_STRSIGNAL@": "0", "@HAVE_DECL_STRSIGNAL@": "0", "@GNULIB_STRVERSCMP@": "1", "@HAVE_STRVERSCMP@": "0", "@GNULIB_EXPLICIT_BZERO@": "0", "@HAVE_EXPLICIT_BZERO@": "1", # sys/resource.h "@HAVE_SYS_RESOURCE_H@": "0", "@GNULIB_GETRUSAGE@": "1", "@HAVE_GETRUSAGE@": "0", # sys/stat.h "@NEXT_SYS_STAT_H@": "<gnulib-system-libc/sys/stat.h>", "@WINDOWS_64_BIT_ST_SIZE@": "1", "@GNULIB_FCHMODAT@": "0", "@HAVE_FCHMODAT@": "1", "@GNULIB_FSTAT@": "1", "@REPLACE_FSTAT@": "1", "@GNULIB_FSTATAT@": "0", "@REPLACE_FSTATAT@": "0", "@HAVE_FSTATAT@": "1", "@GNULIB_FUTIMENS@": "0", "@REPLACE_FUTIMENS@": "0", "@HAVE_FUTIMENS@": "1", "@GNULIB_LCHMOD@": "0", "@HAVE_LCHMOD@": "1", "@GNULIB_LSTAT@": "1", "@HAVE_LSTAT@": "0", "@REPLACE_LSTAT@": "0", "@REPLACE_MKDIR@": "0", "@GNULIB_MKDIRAT@": "0", "@HAVE_MKDIRAT@": "1", "@GNULIB_MKFIFO@": "0", "@REPLACE_MKFIFO@": "0", "@HAVE_MKFIFO@": "1", "@GNULIB_MKFIFOAT@": "0", "@HAVE_MKFIFOAT@": "1", "@GNULIB_MKNOD@": "0", "@REPLACE_MKNOD@": "0", "@HAVE_MKNOD@": "1", "@GNULIB_MKNODAT@": "0", "@HAVE_MKNODAT@": "1", "@GNULIB_STAT@": "1", "@REPLACE_STAT@": "0", "@GNULIB_UTIMENSAT@": "0", "@REPLACE_UTIMENSAT@": "0", "@HAVE_UTIMENSAT@": "1", "@GNULIB_OVERRIDES_STRUCT_STAT@": "0", "@WINDOWS_STAT_TIMESPEC@": "0", # sys/time.h "@NEXT_SYS_TIME_H@": "<gnulib-system-libc/sys/time.h>", "@HAVE_SYS_TIME_H@": "0", "@REPLACE_STRUCT_TIMEVAL@": "1", "@HAVE_WINSOCK2_H@": "1", "@HAVE_STRUCT_TIMEVAL@": "1", "@GNULIB_GETTIMEOFDAY@": "1", "@REPLACE_GETTIMEOFDAY@": "0", "@HAVE_GETTIMEOFDAY@": "0", # sys/times.h "@HAVE_SYS_TIMES_H@": "0", "@NEXT_SYS_TIMES_H@": "<gnulib-system-libc/sys/times.h>", "@HAVE_STRUCT_TMS@": "0", "@GNULIB_TIMES@": "0", "@HAVE_TIMES@": "1", # sys/types.h "@NEXT_SYS_TYPES_H@": "<gnulib-system-libc/sys/types.h>", "@WINDOWS_STAT_INODES@": "0", # sys/wait.h "@NEXT_SYS_WAIT_H@": "<gnulib-system-libc/sys/wait.h>", "@GNULIB_WAITPID@": "1", # unistd.h "@NEXT_UNISTD_H@": "<gnulib-system-libc/unistd.h>", "@HAVE_UNISTD_H@": "0", "@GNULIB_GETHOSTNAME@": "0", "@UNISTD_H_HAVE_WINSOCK2_H@": "1", "@GNULIB_UNLINK@": "0", "@GNULIB_SYMLINKAT@": "0", "@GNULIB_UNLINKAT@": "0", "@GNULIB_CHDIR@": "1", "@GNULIB_CLOSE@": "1", "@GNULIB_DUP@": "0", "@GNULIB_DUP2@": "1", "@GNULIB_ISATTY@": "0", "@GNULIB_LSEEK@": "1", "@GNULIB_READ@": "0", "@GNULIB_WRITE@": "0", "@GNULIB_GETDOMAINNAME@": "0", "@WINDOWS_64_BIT_OFF_T@": "1", "@GNULIB_READLINK@": "1", "@HAVE_READLINK@": "0", "@GNULIB_READLINKAT@": "0", "@GNULIB_PREAD@": "0", "@GNULIB_PWRITE@": "0", "@GNULIB_UNISTD_H_GETOPT@": "1", "@GNULIB_CHOWN@": "0", "@REPLACE_CHOWN@": "0", "@HAVE_CHOWN@": "1", "@REPLACE_CLOSE@": "1", "@UNISTD_H_HAVE_WINSOCK2_H_AND_USE_SOCKETS@": "0", "@REPLACE_DUP@": "1", "@REPLACE_DUP2@": "1", "@GNULIB_DUP3@": "0", "@HAVE_DUP3@": "1", "@GNULIB_ENVIRON@": "1", "@HAVE_DECL_ENVIRON@": "1", "@GNULIB_EUIDACCESS@": "0", "@HAVE_EUIDACCESS@": "1", "@GNULIB_FACCESSAT@": "0", "@HAVE_FACCESSAT@": "1", "@GNULIB_FCHDIR@": "0", "@HAVE_FCHDIR@": "1", "@HAVE_DECL_FCHDIR@": "1", "@GNULIB_FCHOWNAT@": "0", "@REPLACE_FCHOWNAT@": "0", "@HAVE_FCHOWNAT@": "1", "@GNULIB_FDATASYNC@": "0", "@HAVE_FDATASYNC@": "1", "@HAVE_DECL_FDATASYNC@": "1", "@GNULIB_FSYNC@": "0", "@HAVE_FSYNC@": "1", "@GNULIB_FTRUNCATE@": "0", "@HAVE_FTRUNCATE@": "1", "@GNULIB_GETCWD@": "0", "@REPLACE_GETCWD@": "1", "@REPLACE_GETDOMAINNAME@": "0", "@HAVE_DECL_GETDOMAINNAME@": "1", "@GNULIB_GETDTABLESIZE@": "1", "@REPLACE_GETDTABLESIZE@": "0", "@HAVE_GETDTABLESIZE@": "0", "@GNULIB_GETGROUPS@": "0", "@HAVE_GETGROUPS@": "1", "@HAVE_GETHOSTNAME@": "1", "@GNULIB_GETLOGIN@": "0", "@HAVE_DECL_GETLOGIN@": "1", "@GNULIB_GETLOGIN_R@": "0", "@REPLACE_GETLOGIN_R@": "0", "@HAVE_DECL_GETLOGIN_R@": "1", "@GNULIB_GETPAGESIZE@": "0", "@REPLACE_GETPAGESIZE@": "0", "@HAVE_GETPAGESIZE@": "0", "@HAVE_OS_H@": "0", "@HAVE_SYS_PARAM_H@": "0", "@HAVE_DECL_GETPAGESIZE@": "0", "@GNULIB_GETUSERSHELL@": "0", "@HAVE_DECL_GETUSERSHELL@": "1", "@GNULIB_GROUP_MEMBER@": "0", "@HAVE_GROUP_MEMBER@": "1", "@REPLACE_ISATTY@": "0", "@GNULIB_LCHOWN@": "0", "@REPLACE_LCHOWN@": "0", "@HAVE_LCHOWN@": "1", "@GNULIB_LINK@": "0", "@REPLACE_LINK@": "0", "@HAVE_LINK@": "0", "@GNULIB_LINKAT@": "0", "@REPLACE_LINKAT@": "0", "@HAVE_LINKAT@": "1", "@REPLACE_LSEEK@": "0", "@GNULIB_PIPE@": "0", "@HAVE_PIPE@": "1", "@GNULIB_PIPE2@": "1", "@HAVE_PIPE2@": "0", "@REPLACE_PREAD@": "0", "@HAVE_PREAD@": "1", "@REPLACE_PWRITE@": "0", "@HAVE_PWRITE@": "1", "@REPLACE_READ@": "0", "@REPLACE_READLINK@": "0", "@REPLACE_READLINKAT@": "0", "@HAVE_READLINKAT@": "1", "@GNULIB_RMDIR@": "1", "@REPLACE_RMDIR@": "1", "@GNULIB_SETHOSTNAME@": "0", "@HAVE_SETHOSTNAME@": "1", "@HAVE_DECL_SETHOSTNAME@": "1", "@GNULIB_SLEEP@": "0", "@REPLACE_SLEEP@": "0", "@HAVE_SLEEP@": "0", "@GNULIB_SYMLINK@": "0", "@REPLACE_SYMLINK@": "0", "@HAVE_SYMLINK@": "0", "@REPLACE_SYMLINKAT@": "0", "@HAVE_SYMLINKAT@": "1", "@GNULIB_TTYNAME_R@": "0", "@REPLACE_TTYNAME_R@": "0", "@HAVE_DECL_TTYNAME_R@": "1", "@REPLACE_UNLINK@": "0", "@REPLACE_UNLINKAT@": "0", "@HAVE_UNLINKAT@": "1", "@GNULIB_USLEEP@": "0", "@REPLACE_USLEEP@": "0", "@HAVE_USLEEP@": "1", "@REPLACE_WRITE@": "1", "@HAVE_DUP2@": "1", "@REPLACE_FACCESSAT@": "0", "@REPLACE_FTRUNCATE@": "0", "@REPLACE_GETGROUPS@": "0", "@GNULIB_GETPASS@": "0", "@REPLACE_GETPASS@": "0", "@HAVE_GETPASS@": "1", "@GNULIB_TRUNCATE@": "0", "@REPLACE_TRUNCATE@": "0", "@HAVE_DECL_TRUNCATE@": "1", # wchar.h "@HAVE_WCHAR_H@": "1", "@NEXT_WCHAR_H@": "<gnulib-system-libc/wchar.h>", "@HAVE_FEATURES_H@": "0", "@HAVE_WINT_T@": "1", "@GNULIB_OVERRIDES_WINT_T@": "1", "@HAVE_MBSINIT@": "0", "@HAVE_MBRTOWC@": "1", "@REPLACE_MBSTATE_T@": "1", "@GNULIB_BTOWC@": "1", "@REPLACE_BTOWC@": "0", "@HAVE_BTOWC@": "1", "@GNULIB_WCTOB@": "0", "@REPLACE_WCTOB@": "1", "@HAVE_DECL_WCTOB@": "1", "@GNULIB_MBSINIT@": "1", "@REPLACE_MBSINIT@": "1", "@GNULIB_MBRTOWC@": "1", "@REPLACE_MBRTOWC@": "1", "@GNULIB_MBRLEN@": "0", "@REPLACE_MBRLEN@": "0", "@HAVE_MBRLEN@": "1", "@GNULIB_MBSRTOWCS@": "0", "@REPLACE_MBSRTOWCS@": "0", "@HAVE_MBSRTOWCS@": "1", "@GNULIB_MBSNRTOWCS@": "0", "@REPLACE_MBSNRTOWCS@": "0", "@HAVE_MBSNRTOWCS@": "1", "@GNULIB_WCRTOMB@": "1", "@REPLACE_WCRTOMB@": "1", "@HAVE_WCRTOMB@": "1", "@GNULIB_WCSRTOMBS@": "0", "@REPLACE_WCSRTOMBS@": "0", "@HAVE_WCSRTOMBS@": "1", "@GNULIB_WCSNRTOMBS@": "0", "@REPLACE_WCSNRTOMBS@": "0", "@HAVE_WCSNRTOMBS@": "1", "@GNULIB_WCWIDTH@": "1", "@REPLACE_WCWIDTH@": "0", "@HAVE_DECL_WCWIDTH@": "0", "@GNULIB_WMEMCHR@": "0", "@HAVE_WMEMCHR@": "1", "@GNULIB_WMEMCMP@": "0", "@HAVE_WMEMCMP@": "1", "@GNULIB_WMEMCPY@": "0", "@HAVE_WMEMCPY@": "1", "@GNULIB_WMEMMOVE@": "0", "@HAVE_WMEMMOVE@": "1", "@GNULIB_WMEMSET@": "0", "@HAVE_WMEMSET@": "1", "@GNULIB_WCSLEN@": "0", "@HAVE_WCSLEN@": "1", "@GNULIB_WCSNLEN@": "0", "@HAVE_WCSNLEN@": "1", "@GNULIB_WCSCPY@": "0", "@HAVE_WCSCPY@": "1", "@GNULIB_WCPCPY@": "0", "@HAVE_WCPCPY@": "1", "@GNULIB_WCSNCPY@": "0", "@HAVE_WCSNCPY@": "1", "@GNULIB_WCPNCPY@": "0", "@HAVE_WCPNCPY@": "1", "@GNULIB_WCSCAT@": "0", "@HAVE_WCSCAT@": "1", "@GNULIB_WCSNCAT@": "0", "@HAVE_WCSNCAT@": "1", "@GNULIB_WCSCMP@": "0", "@HAVE_WCSCMP@": "1", "@GNULIB_WCSNCMP@": "0", "@HAVE_WCSNCMP@": "1", "@GNULIB_WCSCASECMP@": "0", "@HAVE_WCSCASECMP@": "1", "@GNULIB_WCSNCASECMP@": "0", "@HAVE_WCSNCASECMP@": "1", "@GNULIB_WCSCOLL@": "0", "@HAVE_WCSCOLL@": "1", "@GNULIB_WCSXFRM@": "0", "@HAVE_WCSXFRM@": "1", "@GNULIB_WCSDUP@": "0", "@HAVE_WCSDUP@": "1", "@GNULIB_WCSCHR@": "0", "@HAVE_WCSCHR@": "1", "@GNULIB_WCSRCHR@": "0", "@HAVE_WCSRCHR@": "1", "@GNULIB_WCSCSPN@": "0", "@HAVE_WCSCSPN@": "1", "@GNULIB_WCSSPN@": "0", "@HAVE_WCSSPN@": "1", "@GNULIB_WCSPBRK@": "0", "@HAVE_WCSPBRK@": "1", "@GNULIB_WCSSTR@": "0", "@HAVE_WCSSTR@": "1", "@GNULIB_WCSTOK@": "0", "@HAVE_WCSTOK@": "1", "@GNULIB_WCSWIDTH@": "0", "@REPLACE_WCSWIDTH@": "0", "@HAVE_WCSWIDTH@": "1", "@HAVE_CRTDEFS_H@": "1", "@GNULIB_WCSFTIME@": "0", "@REPLACE_WCSFTIME@": "0", "@HAVE_WCSFTIME@": "1", # wctype.h "@NEXT_WCTYPE_H@": "<gnulib-system-libc/wctype.h>", "@HAVE_WCTYPE_H@": "1", "@HAVE_ISWCNTRL@": "1", "@REPLACE_ISWCNTRL@": "0", "@REPLACE_TOWLOWER@": "0", "@GNULIB_ISWBLANK@": "0", "@HAVE_ISWBLANK@": "1", "@REPLACE_ISWBLANK@": "0", "@HAVE_WCTYPE_T@": "1", "@GNULIB_WCTYPE@": "0", "@GNULIB_ISWCTYPE@": "0", "@HAVE_WCTRANS_T@": "1", "@GNULIB_WCTRANS@": "0", "@GNULIB_TOWCTRANS@": "0", }
test = { 'name': 'q1_19', 'points': 1, 'suites': [ { 'cases': [ { 'code': r""" >>> -1 <= observed_diff_proportion <= 1 True """, 'hidden': False, 'locked': False }, { 'code': r""" >>> # The observed difference in proportion should be about 0.219; >>> np.round(observed_diff_proportion, 3) == 0.219 True """, 'hidden': False, 'locked': False } ], 'scored': True, 'setup': '', 'teardown': '', 'type': 'doctest' } ] }
# -*- coding: utf-8 -*- """ ==================================================== Autor: Johann Gordillo Email: [email protected] ==================================================== Excepciones personalizadas para el programa. ==================================================== """ class InvalidLocationError(Exception): """ Error lanzado cuando las coordenadas de una localización proporcionadas al programa son incorrectas. """ pass class SaturatedServerError(Exception): """ Error lanzado cuando el servidor que proporciona el servicio web se encuentra saturado por excesivas peticiones. """ pass
def convert(key): if key == " ": return " " elif key == "ক": return "क" elif key == "খ": return "ख" elif key == "গ": return "ग" elif key == "ঘ": return "घ" elif key == "ঙ": return "ङ" elif key == "চ": return "च" elif key == "ছ": return "छ" elif key == "জ": return "ज" elif key == "ঝ": return "झ" elif key == "ঞ": return "ञ" elif key == "ট": return "ट" elif key == "ঠ": return "ठ" elif key == "ড": return "ड" elif key == "ঢ": return "ढ" elif key == "ণ": return "ण" elif key == "ত": return "त" elif key == "থ": return "थ" elif key == "দ": return "द" elif key == "ধ": return "ध" elif key == "ন": return "न" elif key == "প": return "प" elif key == "ফ": return "फ" elif key == "ব": return "व" elif key == "ভ": return "भ" elif key == "ম": return "म" elif key == "য": return "य" elif key == "র": return "र" elif key == "ল": return "ल" elif key == "শ": return "श" elif key == "ষ": return "ष" elif key == "স": return "स" elif key == "হ": return "ह" # somehow this particular logic doesn't work elif key == "য়": return "य" elif key == "ড়": return "र" elif key == "ঢ়": return "र" elif key == "অ": return "अ" elif key == "আ": return "आ" elif key == "ই": return "इ" elif key == "ঈ": return "ई" elif key == "উ": return "उ" elif key == "ঊ": return "ऊ" elif key == "ঋ": return "ऋ" elif key == "ৠ": return "ॠ" elif key == "এ": return "ए" elif key == "ঐ": return "ऐ" elif key == "ও": return "ओ" elif key == "ঔ": return "औ" elif key == "া": return "ा" elif key == "ি": return "ि" elif key == "ী": return "ी" elif key == "ু": return "ु" elif key == "ূ": return "ू" elif key == "ৃ": return "ृ" elif key == "ৄ": return "ृ" elif key == "ে": return "े" elif key == "ৈ": return "ै" elif key == "ো": return "ो" elif key == "ৌ": return "ौ" elif key == "ং": return "ं" elif key == "।": return "।" elif key == ",": return "," elif key == ":": return ":" elif key == ";": return ";" elif key == "০": return "०" elif key == "১": return "१" elif key == "২": return "२" elif key == "৩": return "३" elif key == "৪": return "४" elif key == "৫": return "५" elif key == "৬": return "६" elif key == "৭": return "७" elif key == "৮": return "८" elif key == "৯": return "९"
""" Problem statement: Write a function to simulate the rock, paper, scissors game between Abigail and Benson. Problem Link: https://edabit.com/challenge/p6uXeD7JC7cmxeD2Z Abigail and Benson are playing Rock, Paper, Scissors. Each game is represented by an array of length 2, where the first element represents what Abigail played and the second element represents what Benson played. Given a sequence of games, determine who wins the most number of matches. If they tie, output "Tie". R stands for Rock P stands for Paper S stands for Scissors Examples calculate_score([["R", "P"], ["R", "S"], ["S", "P"]]) ➞ "Abigail" # Ben wins the first game (Paper beats Rock). # Abigail wins the second game (Rock beats Scissors). # Abigail wins the third game (Scissors beats Paper). # Abigail wins 2/3. calculate_score([["R", "R"], ["S", "S"]]) ➞ "Tie" calculate_score([["S", "R"], ["R", "S"], ["R", "R"]]) ➞ "Tie" """ def first_player_wins(a, b): """ If tie : Returns 0 If first player wins : Returns 1 If second player wins : Returns -1 """ if a == b: return 0 elif [a, b] == ["R", "S"] or [a, b] == ["S", "P"] or [a, b] == ["P", "R"]: return 1 return -1 def calculate_score(game): abigail = benson = 0 for turn in game: decision = first_player_wins(turn[0], turn[1]) if decision == 1: abigail += 1 elif decision == -1: benson += 1 if abigail == benson: return "Tie" elif abigail > benson: return "Abigail" return "Benson"
# Enter your code for "Hello with attitude" here. name = input("What is your name? ") print("So you call yourself '" + name + "' huh?")
# https://www.codewars.com/kata/59e49b2afc3c494d5d00002a/train/python def sort_vowels(s): if isinstance(s, int) or s == None: return '' vovels = ['a', 'e', 'u', 'i', 'o'] output = [] for letter in s: if vovels.count(letter.lower()) > 0: output.append(f'|{letter}') else: output.append(f'{letter}|') return '\n'.join(output)
#!/usr/bin/python3 # The MDPs consists of a range of integers 0..stateMax which represent # the states of the MDP, a set of actions. The rewards and transition # probabilities are accessed with some of the functions below defined # for the Python classes that represent MDPs. # # - The __init__ constructor builds the state of the MDP, possibly # with additional configuration options. # - applicableActions(state) returns all actions possible in a state. # - successors(state,action) returns the information about possible # successor state of a state. It is a triple (s,p,r) where # - s is a successor state, # - p is the probability of reaching s from state, and # - r is the reward/cost when going from state to s. # - stateMax is the maximum state index. The minimum is 0. # - show() visualizes the MDP in whatever way it can be visualized. # The example MDP class provided for this exercise is a grid navigation # problem, in which an agent can move to the four cardinal directions # in a finite rectangular grid. # The move actions are nondeterministic: with 0.8 probability the move # is to the nominal direction (N,S,E,W), but with 0.1+0.1 probabilities # the move is to one of the direction 90 degrees off the nominal direction. # So, when trying to move North, with probability 0.8 the move actually # is to North, but it will be to the East with probability 0.1 and to # the West with probability 0.1. # Grid cells are associated with rewards/costs, obtained when reaching # the cell. 99 is a special number in the grid cell which indicates that # the cell cannot be entered. Moves to these 99 cells or against the outside # wall of the grid will result in the agent not moving anywhere. # The example MDP has a 'teleport' feature which may be turned on when # creating the MDP: all moves from the NE corner will lead to the SW corner. class GridMDP: def __init__(self, xs, ys, cells, teleport=False): self.xSize = xs # number of columns self.ySize = ys # number of rows self.stateMax = xs * ys - 1 # index of last (SE corner) cell self.grid = cells # List for rewards/costs of all cells self.teleport = teleport NORTH = 1 SOUTH = 2 WEST = 3 EAST = 4 ACTIONS = [NORTH, SOUTH, WEST, EAST] def turnleft(self, a): if a == self.NORTH: return self.WEST elif a == self.WEST: return self.SOUTH elif a == self.SOUTH: return self.EAST else: return self.NORTH def turnright(self, a): if a == self.NORTH: return self.EAST elif a == self.EAST: return self.SOUTH elif a == self.SOUTH: return self.WEST else: return self.NORTH def actionName(self, a): if a == self.NORTH: return "N" elif a == self.SOUTH: return "S" elif a == self.EAST: return "E" else: return "W" def possible(self, action, state): if self.grid[state] == 99: return False else: return True def applicableActions(self, state): return [x for x in self.ACTIONS if self.possible(x, state)] # For every state and action, compute list of (state',P,R) # where state' is a successor of state # P is the probability of reaching state' # R is the reward obtained when reaching state' # Triples with the same state' will be merged. # The sum of the probabilities P is always 1. def addmove(self, state, direction, prob, dict): if direction == self.NORTH and state >= self.xSize and self.grid[state - self.xSize] != 99: state2 = state - self.xSize elif direction == self.SOUTH and state <= self.stateMax - self.xSize and self.grid[state + self.xSize] != 99: state2 = state + self.xSize elif direction == self.EAST and (state + 1) % self.xSize > 0 and self.grid[state + 1] != 99: state2 = state + 1 elif direction == self.WEST and state % self.xSize > 0 and self.grid[state - 1] != 99: state2 = state - 1 else: state2 = state if self.teleport and state == self.xSize - 1: # Teleport from the NE corner state2 = self.stateMax - self.xSize + 1 # to the SW corner reward = self.grid[state2] if state2 in dict: tmp = dict[state2] dict[state2] = (tmp[0] + prob, reward) # Sum the probabilities when merging. else: dict[state2] = (prob, reward) # Compute all successor state of state, with their probabilities and rewards def successors(self, state, action): dict = {} self.addmove(state, self.turnleft(action), 0.1, dict), self.addmove(state, self.turnright(action), 0.1, dict), self.addmove(state, action, 0.8, dict) succlist = [] for state2, value in dict.items(): tmp = (state2, value[0], value[1]) succlist.append(tmp) return succlist # Show the rewards of all grid cells def show(self): print("--------------------") for y in range(0, self.ySize): for x in range(0, self.xSize): i = y * self.xSize + x if self.grid[i] == 99: # wall cell inside the grid print("##", end="") elif self.grid[i] == 0: # 0 reward cells shown as . print(" .", end="") else: print("%2d" % (self.grid[i]), end="") print("") # Show the policy/plan for a grid MDP # MDP policies represented as dictionaries with the state # indices 0..stateMax as the dictionary keys, and the actions # as the values. def showPolicy(self, policy): print("--------------------") for y in range(0, self.ySize): for x in range(0, self.xSize): i = y * self.xSize + x if self.grid[i] == 99: print("#", end="") else: print(self.actionName(policy[i]), end="") print("") def showValues(self, V): print("--------------------") for y in range(0, self.ySize): for x in range(0, self.xSize): i = y * self.xSize + x print(" %3.3f" % V[i], end='') print("")
W = int(input()) N, K = map(int, input().split()) dp = [{} for _ in range(K + 1)] dp[0][0] = 0 for _ in range(N): A, B = map(int, input().split()) for i in range(K - 1, -1, -1): for j in dp[i]: if j + A <= W: dp[i + 1].setdefault(j + A, 0) dp[i + 1][j + A] = max(dp[i + 1][j + A], dp[i][j] + B) result = 0 for i in range(K + 1): if len(dp[i]) == 0: continue result = max(result, max(dp[i].values())) print(result)
# This file is part of the DMComm project by BladeSabre. License: MIT. class ProngOutput: """Description of the outputs for the RP2040 prong circuit. :param pin_drive_signal: The first pin to use for signal output. Note that `pin_drive_low=pin_drive_signal+1` due to the rules of PIO. :param pin_weak_pull: The pin to use for the weak pull-up / pull-down. """ def __init__(self, pin_drive_signal, pin_weak_pull): #pin_drive_low must be pin_drive_signal+1 self.pin_drive_signal = pin_drive_signal self.pin_weak_pull = pin_weak_pull class ProngInput: """Description of the input for the RP2040 prong circuit. :param pin_input: The pin to use for input. An analog pin is recommended for compatibility with the Arduino version and for a possible future voltage test. """ def __init__(self, pin_input): self.pin_input = pin_input class InfraredOutput: """Description of the infrared LED output. :param pin_output: The pin to use for output. """ def __init__(self, pin_output): self.pin_output = pin_output class InfraredInputModulated: """Description of the modulated infrared input (TSOP4838 recommended). :param pin_input: The pin to use for input. """ def __init__(self, pin_input): self.pin_input = pin_input class InfraredInputRaw: """Description of the non-modulated infrared input (TSMP58000 recommended). :param pin_input: The pin to use for input. """ def __init__(self, pin_input): self.pin_input = pin_input
class RenderInterface(object): def render(self): raise NotImplementedError("Class %s doesn't implement render()" % (self.__class__.__name__)) class ViewportInterface(object): def to_dict(self): raise NotImplementedError("Class %s doesn't implement to_dict()" % (self.__class__.__name__)) def render(self): raise NotImplementedError("Class %s doesn't implement render()" % (self.__class__.__name__)) def autocompute(self): raise NotImplementedError("Class %s doesn't implement autocompute()" % (self.__class__.__name__))
num1 = int(input('digite um valor')) num2 = int(input('digite um valor')) s = num1 + num2 print('A soma entre {} e {} vale {}'.format(num1, num2, s))
def modify_phoneme_script_to_create_grapheme_script(original_dataset_path, grapheme_dataset_path): with open(original_dataset_path, 'r', encoding='utf-8-sig') as f: lines = f.readlines() new_lines = [] for line in lines: split_result = line.split('|') wav_path = split_result[0] speaker = split_result[2].rstrip() speaking_emotion = 0 content_emotion = 0 txt_path = wav_path.replace('selvas_wav', 'selvas_txt').replace('wav_trimmed_22050', 'script').replace('.wav', '.txt') with open(txt_path, 'r', encoding='utf-8-sig') as f: txt = f.readline().rstrip() # new_line = '{}|{}|{}|{}|{}'.format(wav_path,txt, speaker, speaking_emotion, content_emotion) new_line = txt new_lines.append(new_line) with open(grapheme_dataset_path, 'w', encoding='utf-8') as f: for line in new_lines: f.write(line+'\n') if __name__ == '__main__': # original_dataset_path = '/home/admin/projects/graduate/emotion_vector/filelists/selvas_main_train.txt' # grapheme_dataset_path = '/home/admin/projects/graduate/emotion_vector/filelists/grapheme/grapheme_selvas_main_train_tmp.txt' original_dataset_path = '/home/admin/projects/graduate/emotion_vector/filelists/single_language_selvas/train_file_list_pron.txt' grapheme_dataset_path = '/home/admin/projects/graduate/emotion_vector/filelists/grapheme_selvas_multi_train_tmp.txt' modify_phoneme_script_to_create_grapheme_script(original_dataset_path, grapheme_dataset_path)
# Solution def add_one(arr): output = 1; for i in range(len(arr), 0, -1): output = output + arr[i - 1] borrow = output//10 if borrow == 0: arr[i - 1] = output break else: arr[i - 1] = output % 10 output = borrow arr = [borrow] + arr index = 0 while arr[index]==0: index += 1 return arr[index:]
def interpolation_search(arr, key): low = 0 high = len(arr) - 1 while arr[high] != arr[low] and key >= arr[low] and key <= arr[high]: mid = int(low + ((key - arr[low]) * (high - low) / (arr[high] - arr[low]))) if arr[mid] == key: return mid elif arr[mid] < key: low = mid + 1 else: high = mid - 1 return -1 # input arr arr = [2, 4, 6, 8, 10, 12, 14, 16] # interpolation_search call to search 3 in arr print('6 is at index: ', interpolation_search(arr, 6)) # Output: 6 is at index: 2
# encoding: utf-8 # module Tekla.Structures.Model.History calls itself History # from Tekla.Structures.Model,Version=2017.0.0.0,Culture=neutral,PublicKeyToken=2f04dbe497b71114 # by generator 1.145 # no doc # no imports # no functions # classes class ModelHistory(object): # no doc @staticmethod def GetCurrentModificationStamp(): """ GetCurrentModificationStamp() -> ModificationStamp """ pass @staticmethod def GetDeletedObjects(ModStamp): """ GetDeletedObjects(ModStamp: ModificationStamp) -> ModelObjectEnumerator """ pass @staticmethod def GetDeletedObjectsWithType(ModStamp, Enum): """ GetDeletedObjectsWithType(ModStamp: ModificationStamp,Enum: ModelObjectEnum) -> ModelObjectEnumerator """ pass @staticmethod def GetLocalChanges(): """ GetLocalChanges() -> ModificationInfo """ pass @staticmethod def GetModifications(Name, *__args): """ GetModifications(Name: str,ObjectTypes: IEnumerable[ModelObjectEnum],PrevStamp: ModificationStamp) -> ModificationInfo GetModifications(Name: str,PrevStamp: ModificationStamp) -> ModificationInfo """ pass @staticmethod def GetModifiedObjects(ModStamp): """ GetModifiedObjects(ModStamp: ModificationStamp) -> ModelObjectEnumerator """ pass @staticmethod def GetModifiedObjectsWithType(ModStamp, Enum): """ GetModifiedObjectsWithType(ModStamp: ModificationStamp,Enum: ModelObjectEnum) -> ModelObjectEnumerator """ pass @staticmethod def GetNotSharedObjects(): """ GetNotSharedObjects() -> ModelObjectEnumerator """ pass @staticmethod def TakeModifications(Name, *__args): """ TakeModifications(Name: str,ObjectTypes: IEnumerable[ModelObjectEnum],PrevStamp: ModificationStamp) -> ModificationInfo TakeModifications(Name: str,PrevStamp: ModificationStamp) -> ModificationInfo """ pass @staticmethod def UpdateModificationStampToLatest(modificationStampKey): """ UpdateModificationStampToLatest(modificationStampKey: str) """ pass __all__ = [ "__reduce_ex__", "GetCurrentModificationStamp", "GetDeletedObjects", "GetDeletedObjectsWithType", "GetLocalChanges", "GetModifications", "GetModifiedObjects", "GetModifiedObjectsWithType", "GetNotSharedObjects", "TakeModifications", "UpdateModificationStampToLatest", ] class ModificationInfo(object): # no doc Deleted = None Modified = None ModifiedWithInfo = None class ModificationStamp(object): """ ModificationStamp() ModificationStamp(LocalStamp: int,ServerStamp: int) """ @staticmethod def __new__(self, LocalStamp=None, ServerStamp=None): """ __new__(cls: type) __new__(cls: type,LocalStamp: int,ServerStamp: int) """ pass Guid = property(lambda self: object(), lambda self, v: None, lambda self: None) """Get: Guid(self: ModificationStamp) -> str """ LocalStamp = property( lambda self: object(), lambda self, v: None, lambda self: None ) """Get: LocalStamp(self: ModificationStamp) -> int Set: LocalStamp(self: ModificationStamp)=value """ ServerStamp = property( lambda self: object(), lambda self, v: None, lambda self: None ) """Get: ServerStamp(self: ModificationStamp) -> int Set: ServerStamp(self: ModificationStamp)=value """ class ModifiedObjectInfo(object): # no doc IsAttributeChanged = None IsCreated = None IsModified = None IsNumberingChanged = None ModelObject = None
# dividebyzero.py """Simple exception handling example.""" while True: # attempt to convert and divide values try: number1 = int(input('Enter numerator: ')) number2 = int(input('Enter denominator: ')) result = number1 / number2 except ValueError: # tried to convert non-numeric value to int print('You must enter two integers\n') except ZeroDivisionError: # denominator was 0 print('Attempted to divide by zero\n') else: # executes only if no exceptions occur print(f'{number1:.3f} / {number2:.3f} = {result:.3f}') break # terminate the loop ########################################################################## # (C) Copyright 2019 by Deitel & Associates, Inc. and # # Pearson Education, Inc. All Rights Reserved. # # # # DISCLAIMER: The authors and publisher of this book have used their # # best efforts in preparing the book. These efforts include the # # development, research, and testing of the theories and programs # # to determine their effectiveness. The authors and publisher make # # no warranty of any kind, expressed or implied, with regard to these # # programs or to the documentation contained in these books. The authors # # and publisher shall not be liable in any event for incidental or # # consequential damages in connection with, or arising out of, the # # furnishing, performance, or use of these programs. # ##########################################################################
try: with open('../../.password/google-maps/api', 'r') as fp: key = fp.readlines() key = ''.join(key) except: # Insert your API key here key = 'AIzaSyDxydKN7Yt54JNmVw9opg9EcibCghjetgw'
#Ex053 Crie um programa que leia uma frase e diga se ela é um palíndromo, desconsiderando os espaços. #Ex: apos a sopa frase = str(input('Digite uma frase: ')).strip().upper() palavras = frase.strip() junto = ''.join(palavras) inverso = '' for letra in range(len(junto)-1,-1,-1): inverso += junto[letra] print(f'O inverso de {frase} é {inverso}') if inverso == junto: print('Temos um palíndromo!') else: print('A frase digitada não é um palíndromo')
#SOLUTION FOR P20 '''P20 (*) Remove the K'th element from a list. Example: * (remove-at '(a b c d) 2) (A C D)''' my_list = ['a','b','c','d','e'] pos= int(input('Element to remove = ')) if pos <= len(my_list): #CHECK IF INPUT IS IN RANGE my_list.pop(pos-1) #REMOVE THE ELEMENT AT GIVEN INDEX print(my_list) #PRINT THE LIST else: print('Invalid input ')
class PairSet(object): __slots__ = '_data', def __init__(self): self._data = set() def __contains__(self, item): return item in self._data def has(self, a, b): return (a, b) in self._data def add(self, a, b): self._data.add((a, b)) self._data.add((b, a)) return self def remove(self, a, b): self._data.discard((a, b)) self._data.discard((b, a))
class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def count_unival_subtrees(self, root: TreeNode) -> int: self.count = 0 self.is_unival(root) return self.count def is_unival(self, root: TreeNode) -> bool: # Leaf Node must be an Univalue Tree, return True if root is None: return True # Traverse tree with DFS left = self.is_unival(root.left) right = self.is_unival(root.right) # If both children are Univalue Tree and root.value is # equal to both children's values. Then the tree of root # node is an Univalue Tree if left and right: if (root.left is not None) and (root.val != root.left.val): return False if (root.right is not None) and (root.val != root.right.val): return False self.count += 1 return True return False
class Node: def __init__(self, data): self.data = data self.next = None class LinkedList: def __init__(self): self.head = None def push(self, new_data): new_node = Node(new_data) new_node.next = self.head self.head = new_node def getNth(self, llist, position): llist.getNthNode(self.head, position, llist) def getNthNode(self, head, position, llist): count = 0 if(head): if count == position: print(head.data) else: llist.getNthNode(head.next, position - 1, llist) else: print("Index Doesn't Exist") if __name__ == "__main__": llist = LinkedList() llist.push(5) llist.push(6) llist.push(98) llist.push(3) llist.push(1) llist.push(10) print("Element at Index 4 is:", end=" ") llist.getNth(llist, 4)
# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) # -- Project information ----------------------------------------------------- project = 'elasticsearch-objects-operator' copyright = '2020, 90poe & elasticsearch-objects-operator development tean' author = 'elasticsearch-objects-operator development team' # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'recommonmark', 'sphinx_markdown_tables', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] # The master toctree document. master_doc = 'index' pygments_style = 'sphinx' # Output file base name for HTML help builder. htmlhelp_basename = 'elasticsearch-objects-operatordoc' # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'elasticsearch-objects-operator', 'elasticsearch-objects-operator Documentation', [author], 1) ] # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'elasticsearch-objects-operator', 'elasticsearch-objects-operator Documentation', author, 'elasticsearch-objects-operator', 'One line description of project.', 'Miscellaneous'), ] def setup(app): app.add_stylesheet('custom.css')
def draw_line(tick_length, tick_label=""): line = "-" * tick_length if tick_label: line += " " + tick_label print(line) def draw_interval(center_length): if center_length > 0: draw_interval(center_length - 1) draw_line(center_length) draw_interval(center_length - 1) def draw_ruler(num_inches, major_length): draw_line(major_length, "0") for i in range(1, 1 + num_inches): draw_interval(major_length - 1) draw_line(major_length, str(i)) if __name__ == '__main__': draw_ruler(1, 3) draw_ruler(1, 4)
class TrainConfig(typing.NamedTuple): T: int train_size: int batch_size: int loss_func: typing.Callable class TrainData(typing.NamedTuple): feats: np.ndarray targs: np.ndarray DaRnnNet = collections.namedtuple("DaRnnNet", ["encoder", "decoder", "enc_opt", "dec_opt"])