content
stringlengths 7
1.05M
| fixed_cases
stringlengths 1
1.28M
|
---|---|
def is_even_with_return(i):
print('with return')
remainder = i % 2
return remainder == 0
print(is_even_with_return(132))
def is_even(i):
return i % 2 == 0
print("All numbers between 0 and 20: even or not")
for i in range(20):
if is_even(i):
print(i, "even")
else:
print(i, "odd")
def func_a():
print('inside func_a')
def func_b(y):
print('inside func_b')
return y
def func_c(z):
print('inside func_c')
return z()
print(func_c(func_a))
def f():
def x(a, b):
return a+b
return x
val = f()(3,4)
print(val)
| def is_even_with_return(i):
print('with return')
remainder = i % 2
return remainder == 0
print(is_even_with_return(132))
def is_even(i):
return i % 2 == 0
print('All numbers between 0 and 20: even or not')
for i in range(20):
if is_even(i):
print(i, 'even')
else:
print(i, 'odd')
def func_a():
print('inside func_a')
def func_b(y):
print('inside func_b')
return y
def func_c(z):
print('inside func_c')
return z()
print(func_c(func_a))
def f():
def x(a, b):
return a + b
return x
val = f()(3, 4)
print(val) |
class Solution:
def firstBadVersion(self, n):
if n == 1:
return 1
l, r = 2, n
while l <= r:
mid = (l + r) // 2
if isBadVersion(mid) and not isBadVersion(mid - 1):
return mid
elif isBadVersion(mid):
r = mid - 1
else:
l = mid + 1
return 1
| class Solution:
def first_bad_version(self, n):
if n == 1:
return 1
(l, r) = (2, n)
while l <= r:
mid = (l + r) // 2
if is_bad_version(mid) and (not is_bad_version(mid - 1)):
return mid
elif is_bad_version(mid):
r = mid - 1
else:
l = mid + 1
return 1 |
class Obstacle:
def __init__(self, clearance):
self.x = 10
self.y = 10
self.clearance = clearance
self.robot_radius = 0.354 / 2
self.clearance = self.robot_radius + self.clearance
self.dynamic_Obstacle = False
# self.rect1_corner1_x = 3
# self.rect1_corner1_y = 0
self.rect1_corner1_x = 0
self.rect1_corner1_y = 2.75
self.rect1_length = 3
self.rect1_width = 0.01
# self.rect2_corner1_x = 6
# self.rect2_corner1_y = 0
self.rect2_corner1_x = 0
self.rect2_corner1_y = 6.25
self.rect2_length = 3
self.rect2_width = 0.01
def isInObstacleSpace(self, x, y):
if (x < 1 or x > 9 or y < 1 or y > 9):
#print('Out of boundary !')
return 1
#rectangle obstacle 1
x1 = self.rect1_corner1_x - self.clearance
x2 = x1 + self.rect1_length + 2*self.clearance
y1 = self.rect1_corner1_y - self.clearance
y2 = y1 + self.rect1_width + 2*self.clearance
if (x >= x1 and x <= x2 and y >= y1 and y <= y2):
#print('Inside rectangle 1, avoid')
return 1
#rectangle obstacle 2
x1 = self.rect2_corner1_x - self.clearance
x2 = x1 + self.rect2_length + 2*self.clearance
y1 = self.rect2_corner1_y - self.clearance
y2 = y1 + self.rect2_width + 2*self.clearance
if (x >= x1 and x <= x2 and y >= y1 and y <= y2):
#print('Inside rectangle 1, avoid')
return 1
if self.dynamic_Obstacle == True:
x1 = self.dynamic_obs_corner_x - self.clearance
x2 = x1 + self.dynamic_obs_length + 2*self.clearance
y1 = self.dynamic_obs_corner_y - self.clearance
y2 = y1 + self.dynamic_obs_width + 2*self.clearance
if (x >= x1 and x <= x2 and y >= y1 and y <= y2):
# print('Hitting new dynamic obstacle')
return 1
return 0
def addNewObstacle(self, x, y, length, width):
self.dynamic_obs_corner_x = x
self.dynamic_obs_corner_y = y
self.dynamic_obs_length = length
self.dynamic_obs_width = width
self.dynamic_Obstacle = True
| class Obstacle:
def __init__(self, clearance):
self.x = 10
self.y = 10
self.clearance = clearance
self.robot_radius = 0.354 / 2
self.clearance = self.robot_radius + self.clearance
self.dynamic_Obstacle = False
self.rect1_corner1_x = 0
self.rect1_corner1_y = 2.75
self.rect1_length = 3
self.rect1_width = 0.01
self.rect2_corner1_x = 0
self.rect2_corner1_y = 6.25
self.rect2_length = 3
self.rect2_width = 0.01
def is_in_obstacle_space(self, x, y):
if x < 1 or x > 9 or y < 1 or (y > 9):
return 1
x1 = self.rect1_corner1_x - self.clearance
x2 = x1 + self.rect1_length + 2 * self.clearance
y1 = self.rect1_corner1_y - self.clearance
y2 = y1 + self.rect1_width + 2 * self.clearance
if x >= x1 and x <= x2 and (y >= y1) and (y <= y2):
return 1
x1 = self.rect2_corner1_x - self.clearance
x2 = x1 + self.rect2_length + 2 * self.clearance
y1 = self.rect2_corner1_y - self.clearance
y2 = y1 + self.rect2_width + 2 * self.clearance
if x >= x1 and x <= x2 and (y >= y1) and (y <= y2):
return 1
if self.dynamic_Obstacle == True:
x1 = self.dynamic_obs_corner_x - self.clearance
x2 = x1 + self.dynamic_obs_length + 2 * self.clearance
y1 = self.dynamic_obs_corner_y - self.clearance
y2 = y1 + self.dynamic_obs_width + 2 * self.clearance
if x >= x1 and x <= x2 and (y >= y1) and (y <= y2):
return 1
return 0
def add_new_obstacle(self, x, y, length, width):
self.dynamic_obs_corner_x = x
self.dynamic_obs_corner_y = y
self.dynamic_obs_length = length
self.dynamic_obs_width = width
self.dynamic_Obstacle = True |
def f(*, b):
return b
def f(a, *, b):
return a + b
def f(a, *, b, c):
return a + b + c
def f(a, *, b=c):
return a + b
def f(a, *, b=c, c):
return a + b + c
def f(a, *, b=c, c=d):
return a + b + c
def f(a, *, b=c, c, d=e):
return a + b + c + d
def f(a=None, *, b=None):
return a + b
| def f(*, b):
return b
def f(a, *, b):
return a + b
def f(a, *, b, c):
return a + b + c
def f(a, *, b=c):
return a + b
def f(a, *, b=c, c):
return a + b + c
def f(a, *, b=c, c=d):
return a + b + c
def f(a, *, b=c, c, d=e):
return a + b + c + d
def f(a=None, *, b=None):
return a + b |
# Sort the entries of medals: medals_sorted
medals_sorted = medals.sort_index(level=0)
# Print the number of Bronze medals won by Germany
print(medals_sorted.loc[('bronze','Germany')])
# Print data about silver medals
print(medals_sorted.loc['silver'])
# Create alias for pd.IndexSlice: idx
idx = pd.IndexSlice
# Print all the data on medals won by the United Kingdom
print(medals_sorted.loc[idx[:,'United Kingdom'],:]) | medals_sorted = medals.sort_index(level=0)
print(medals_sorted.loc['bronze', 'Germany'])
print(medals_sorted.loc['silver'])
idx = pd.IndexSlice
print(medals_sorted.loc[idx[:, 'United Kingdom'], :]) |
class WebSocketDefine:
Uri = "wss://sdstream.binance.com/stream"
# testnet new spec
# Uri = "wss://sdstream.binancefuture.com/stream"
class RestApiDefine:
Url = "https://dapi.binance.com"
# testnet
# Url = "https://testnet.binancefuture.com"
| class Websocketdefine:
uri = 'wss://sdstream.binance.com/stream'
class Restapidefine:
url = 'https://dapi.binance.com' |
s = input()
y, m, d = map(int, s.split('/'))
f = False
(
Heisei,
TBD,
)= (
'Heisei',
'TBD',
)
if y < 2019:
print(Heisei)
elif y == 2019:
if(m < 4):
print(Heisei)
elif m == 4:
if(d <= 30):
print(Heisei)
else :
print(TBD)
else :
print(TBD)
else :
print(TBD)
| s = input()
(y, m, d) = map(int, s.split('/'))
f = False
(heisei, tbd) = ('Heisei', 'TBD')
if y < 2019:
print(Heisei)
elif y == 2019:
if m < 4:
print(Heisei)
elif m == 4:
if d <= 30:
print(Heisei)
else:
print(TBD)
else:
print(TBD)
else:
print(TBD) |
rfm69SpiBus = 0
rfm69NSS = 5 # GPIO5 == pin 7
rfm69D0 = 9 # GPIO9 == pin 12
rfm69RST = 8 # GPIO8 == pin 11
am2302 = 22 # GPIO22 == pin 29
voltADC = 26 # GPIO26 == pin 31 | rfm69_spi_bus = 0
rfm69_nss = 5
rfm69_d0 = 9
rfm69_rst = 8
am2302 = 22
volt_adc = 26 |
# Hack 3: create your own math function
# Function is superfactorial: superfactorial is product of all factorials until n.
# OOP method
class superFactorial():
def __init__(self,n):
self.n = n
def factorial(self,y):
y = self.n if y is None else y
product = 1
for x in range(1,y+1):
product*=x
return product
def __call__(self):
product = 1
for x in range(1,self.n+1):
product*= self.factorial(x)
return product
# Imperative Method
def superfac():
x = int(input("What number should we use? "))
product = 1
for y in range(1,x+1):
secondProd = 1
for z in range(1,y+1):
secondProd*= z
product*=secondProd
print(product)
if __name__ == "__main__":
sfac = superFactorial(3)
print(sfac())
print(superfac()) | class Superfactorial:
def __init__(self, n):
self.n = n
def factorial(self, y):
y = self.n if y is None else y
product = 1
for x in range(1, y + 1):
product *= x
return product
def __call__(self):
product = 1
for x in range(1, self.n + 1):
product *= self.factorial(x)
return product
def superfac():
x = int(input('What number should we use? '))
product = 1
for y in range(1, x + 1):
second_prod = 1
for z in range(1, y + 1):
second_prod *= z
product *= secondProd
print(product)
if __name__ == '__main__':
sfac = super_factorial(3)
print(sfac())
print(superfac()) |
# -*- Mode:Python;indent-tabs-mode:nil; -*-
#
# File: psaExceptions.py
# Created: 05/09/2014
# Author: BSC
#
# Description:
# Custom execption class to manage error in the PSC
#
class psaExceptions( object ):
class confRetrievalFailed( Exception ):
pass
| class Psaexceptions(object):
class Confretrievalfailed(Exception):
pass |
# multiply a list by a number
def mul(row, num):
return [x * num for x in row]
# subtract one row from another
def sub(row_left, row_right):
return [a - b for (a, b) in zip(row_left, row_right)]
# calculate the row echelon form of the matrix
def echelonify(rw, i, m):
for j, row in enumerate(m[(i+1):]):
j += 1
# print("rw[i]:", rw[i])
if rw[i] != 0:
m[j+i] = sub(row, mul(rw, row[i] / rw[i]))
return rw
def row_echelon(m):
for i in range(len(m)): # len(m) == m x n
active_row = m[i]
echelonify(active_row, i, m)
# close to zero
m = [[(0 if (0.0000000001 > x > -0.0000000001) else x)
for x in row]for row in m]
return m
if __name__ == '__main__':
print("Enter number of rows and columns")
m, n = map(int, input().split()) # m = row and n = column
M = []
for _ in range(m):
row = list(map(int, input().split()))[:n]
M.append(row)
mat = row_echelon(M)
for row in mat:
print(' '.join((str(x) for x in row)))
# The output can be printed by dividing each element of each row by the first non-zero element of the respective row in order to get 1
| def mul(row, num):
return [x * num for x in row]
def sub(row_left, row_right):
return [a - b for (a, b) in zip(row_left, row_right)]
def echelonify(rw, i, m):
for (j, row) in enumerate(m[i + 1:]):
j += 1
if rw[i] != 0:
m[j + i] = sub(row, mul(rw, row[i] / rw[i]))
return rw
def row_echelon(m):
for i in range(len(m)):
active_row = m[i]
echelonify(active_row, i, m)
m = [[0 if 1e-10 > x > -1e-10 else x for x in row] for row in m]
return m
if __name__ == '__main__':
print('Enter number of rows and columns')
(m, n) = map(int, input().split())
m = []
for _ in range(m):
row = list(map(int, input().split()))[:n]
M.append(row)
mat = row_echelon(M)
for row in mat:
print(' '.join((str(x) for x in row))) |
ans = 0
a=input()
for _ in range(int(input())):
s=input()
for start in range(10):
for j in range(len(a)):
if a[j] != s[(start+j)%10]:
break
else:
ans+=1
break
print(ans) | ans = 0
a = input()
for _ in range(int(input())):
s = input()
for start in range(10):
for j in range(len(a)):
if a[j] != s[(start + j) % 10]:
break
else:
ans += 1
break
print(ans) |
# Easy
# Runtime: 32 ms, faster than 73.01% of Python3 online submissions for Count and Say.
# Memory Usage: 12.9 MB, less than 100.00% of Python3 online submissions for Count and Say.
class Solution:
def countAndSay(self, n: int) -> str:
def count_and_say(n):
if n == 1:
return '1'
cur_s = ''
idx = 0
cur_sum = 0
s = count_and_say(n - 1)
for i, ch in enumerate(s):
if ch != s[idx]:
cur_s += str(cur_sum) + s[idx]
cur_sum = 1
idx = i
else:
cur_sum += 1
cur_s += str(cur_sum) + s[idx]
return cur_s
return count_and_say(n) | class Solution:
def count_and_say(self, n: int) -> str:
def count_and_say(n):
if n == 1:
return '1'
cur_s = ''
idx = 0
cur_sum = 0
s = count_and_say(n - 1)
for (i, ch) in enumerate(s):
if ch != s[idx]:
cur_s += str(cur_sum) + s[idx]
cur_sum = 1
idx = i
else:
cur_sum += 1
cur_s += str(cur_sum) + s[idx]
return cur_s
return count_and_say(n) |
class PlannerEventHandler(object):
pass
def ProblemNotImplemented(self):
return False
def StartedPlanning(self):
return True
def SubmittedPipeline(self, pipeline):
return True
def RunningPipeline(self, pipeline):
return True
def CompletedPipeline(self, pipeline, result):
return True
def StartExecutingPipeline(self, pipeline):
return True
def ExecutedPipeline(self, pipeline, result):
return True
def EndedPlanning(self):
return True
| class Plannereventhandler(object):
pass
def problem_not_implemented(self):
return False
def started_planning(self):
return True
def submitted_pipeline(self, pipeline):
return True
def running_pipeline(self, pipeline):
return True
def completed_pipeline(self, pipeline, result):
return True
def start_executing_pipeline(self, pipeline):
return True
def executed_pipeline(self, pipeline, result):
return True
def ended_planning(self):
return True |
def minSubArrayLen(target, nums):
length = list()
for i in range(len(nums)):
remain = target - nums[i]
if remain <= 0:
length.append(1)
continue
for j in range(i+1, len(nums)):
remain = remain - nums[j]
if remain <= 0:
length.append(j-i+1)
break
if not length:
return 0
return min(length)
if __name__ == '__main__':
# print(minSubArrayLen(1, [1,1,1,1,1,1,1,1]))
# print(minSubArrayLen(7, [2,3,1,2,4,3]))
print(minSubArrayLen(11, [1, 2, 3, 4, 5]))
| def min_sub_array_len(target, nums):
length = list()
for i in range(len(nums)):
remain = target - nums[i]
if remain <= 0:
length.append(1)
continue
for j in range(i + 1, len(nums)):
remain = remain - nums[j]
if remain <= 0:
length.append(j - i + 1)
break
if not length:
return 0
return min(length)
if __name__ == '__main__':
print(min_sub_array_len(11, [1, 2, 3, 4, 5])) |
# -*- coding: UTF-8 -*-
class Shared(object):
'''
Class used for /hana/shared attributes.
Attributes and methods are passed to other LVM Classes.
'''
name = 'shared'
vg_physical_extent_size = '-s 1M'
vg_data_alignment = '--dataalignment 1M'
vg_args = vg_physical_extent_size + ' ' + vg_data_alignment
lv_size = '-l 100%VG'
lv_args = lv_size
fs_block_size = '-b size=4096'
fs_sector_size = '-s size=4096'
fs_type = 'xfs'
fs_mount_point = '/hana/shared'
fs_args = fs_block_size + ' ' + fs_sector_size
def __init__(self):
super(Shared, self).__init__() | class Shared(object):
"""
Class used for /hana/shared attributes.
Attributes and methods are passed to other LVM Classes.
"""
name = 'shared'
vg_physical_extent_size = '-s 1M'
vg_data_alignment = '--dataalignment 1M'
vg_args = vg_physical_extent_size + ' ' + vg_data_alignment
lv_size = '-l 100%VG'
lv_args = lv_size
fs_block_size = '-b size=4096'
fs_sector_size = '-s size=4096'
fs_type = 'xfs'
fs_mount_point = '/hana/shared'
fs_args = fs_block_size + ' ' + fs_sector_size
def __init__(self):
super(Shared, self).__init__() |
class Solution:
def longestCommonPrefix(self, strs: List[str]) -> str:
shortest = min(strs, key=len)
longest_common = ""
for idx, char in enumerate(shortest):
for word in strs:
if word[idx] != char:
return longest_common
longest_common += char
# Case where they pass us nothing
return longest_common
| class Solution:
def longest_common_prefix(self, strs: List[str]) -> str:
shortest = min(strs, key=len)
longest_common = ''
for (idx, char) in enumerate(shortest):
for word in strs:
if word[idx] != char:
return longest_common
longest_common += char
return longest_common |
class Solution:
def uniqueOccurrences(self, arr: List[int]) -> bool:
dict = {}
for i in arr :
if i in dict :
dict[i] += 1
else :
dict[i] = 1
count = 0
s = set(dict.values())
ns = len(s)
nl = len(dict.values())
if nl != ns :
return False
else :
return True
| class Solution:
def unique_occurrences(self, arr: List[int]) -> bool:
dict = {}
for i in arr:
if i in dict:
dict[i] += 1
else:
dict[i] = 1
count = 0
s = set(dict.values())
ns = len(s)
nl = len(dict.values())
if nl != ns:
return False
else:
return True |
test = { 'name': 'q1d',
'points': 1,
'suites': [ { 'cases': [ {'code': ">>> species_by_island.labels == ('species', 'Biscoe', 'Dream', 'Torgersen')\nTrue", 'hidden': False, 'locked': False},
{'code': ">>> np.all(species_by_island.column('Biscoe') == np.array([44, 0, 119]))\nTrue", 'hidden': False, 'locked': False}],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'}]}
| test = {'name': 'q1d', 'points': 1, 'suites': [{'cases': [{'code': ">>> species_by_island.labels == ('species', 'Biscoe', 'Dream', 'Torgersen')\nTrue", 'hidden': False, 'locked': False}, {'code': ">>> np.all(species_by_island.column('Biscoe') == np.array([44, 0, 119]))\nTrue", 'hidden': False, 'locked': False}], 'scored': True, 'setup': '', 'teardown': '', 'type': 'doctest'}]} |
# objects here will be mixed into the dynamically created asset type classes
# based on name.
# This lets us extend certain asset types without having to give up the generic
# dynamic meta implementation
class Attachment(object):
def set_blob(self, blob):
return self._v1_v1meta.set_attachment_blob(self, blob)
def get_blob(self):
return self._v1_v1meta.get_attachment_blob(self)
file_data = property(get_blob, set_blob)
# the special_classes mapping will be used to lookup mixins by asset type name.
special_classes = locals()
| class Attachment(object):
def set_blob(self, blob):
return self._v1_v1meta.set_attachment_blob(self, blob)
def get_blob(self):
return self._v1_v1meta.get_attachment_blob(self)
file_data = property(get_blob, set_blob)
special_classes = locals() |
LOG_EPOCH = 'epoch'
LOG_TRAIN_LOSS = 'train_loss'
LOG_TRAIN_ACC = 'train_acc'
LOG_VAL_LOSS = 'val_loss'
LOG_VAL_ACC = 'val_acc'
LOG_FIELDS = [LOG_EPOCH, LOG_TRAIN_LOSS, LOG_TRAIN_ACC, LOG_VAL_LOSS, LOG_VAL_ACC]
LOG_COLOR_HEADER = '\033[95m'
LOG_COLOR_OKBLUE = '\033[94m'
LOG_COLOR_OKCYAN = '\033[96m'
LOG_COLOR_OKGREEN = '\033[92m'
LOG_COLOR_WARNING = '\033[93m'
LOG_COLOR_FAIL = '\033[91m'
LOG_COLOR_ENDC = '\033[0m'
LOG_COLOR_BOLD = '\033[1m'
LOG_COLOR_UNDERLINE = '\033[4m'
| log_epoch = 'epoch'
log_train_loss = 'train_loss'
log_train_acc = 'train_acc'
log_val_loss = 'val_loss'
log_val_acc = 'val_acc'
log_fields = [LOG_EPOCH, LOG_TRAIN_LOSS, LOG_TRAIN_ACC, LOG_VAL_LOSS, LOG_VAL_ACC]
log_color_header = '\x1b[95m'
log_color_okblue = '\x1b[94m'
log_color_okcyan = '\x1b[96m'
log_color_okgreen = '\x1b[92m'
log_color_warning = '\x1b[93m'
log_color_fail = '\x1b[91m'
log_color_endc = '\x1b[0m'
log_color_bold = '\x1b[1m'
log_color_underline = '\x1b[4m' |
def load(h):
return ({'abbr': 1, 'code': 1, 'title': 'PRES Pressure [hPa]'},
{'abbr': 2, 'code': 2, 'title': 'psnm Pressure reduced to MSL [hPa]'},
{'abbr': 3, 'code': 3, 'title': 'tsps Pressure tendency [Pa/s]'},
{'abbr': 4, 'code': 4, 'title': 'var4 undefined'},
{'abbr': 5, 'code': 5, 'title': 'var5 undefined'},
{'abbr': 6, 'code': 6, 'title': 'geop Geopotential [dam]'},
{'abbr': 7, 'code': 7, 'title': 'zgeo Geopotential height [gpm]'},
{'abbr': 8, 'code': 8, 'title': 'gzge Geometric height [m]'},
{'abbr': 9, 'code': 9, 'title': 'var9 undefined'},
{'abbr': 10, 'code': 10, 'title': 'var10 undefined'},
{'abbr': 11, 'code': 11, 'title': 'temp ABSOLUTE TEMPERATURE [K]'},
{'abbr': 12, 'code': 12, 'title': 'vtmp VIRTUAL TEMPERATURE [K]'},
{'abbr': 13, 'code': 13, 'title': 'ptmp POTENTIAL TEMPERATURE [K]'},
{'abbr': 14,
'code': 14,
'title': 'psat PSEUDO-ADIABATIC POTENTIAL TEMPERATURE [K]'},
{'abbr': 15, 'code': 15, 'title': 'mxtp MAXIMUM TEMPERATURE [K]'},
{'abbr': 16, 'code': 16, 'title': 'mntp MINIMUM TEMPERATURE [K]'},
{'abbr': 17, 'code': 17, 'title': 'tpor DEW POINT TEMPERATURE [K]'},
{'abbr': 18, 'code': 18, 'title': 'dptd DEW POINT DEPRESSION [K]'},
{'abbr': 19, 'code': 19, 'title': 'lpsr LAPSE RATE [K/m]'},
{'abbr': 20, 'code': 20, 'title': 'var20 undefined'},
{'abbr': 21, 'code': 21, 'title': 'rds1 RADAR SPECTRA(1) [non-dim]'},
{'abbr': 22, 'code': 22, 'title': 'rds2 RADAR SPECTRA(2) [non-dim]'},
{'abbr': 23, 'code': 23, 'title': 'rds3 RADAR SPECTRA(3) [non-dim]'},
{'abbr': 24, 'code': 24, 'title': 'var24 undefined'},
{'abbr': 25, 'code': 25, 'title': 'tpan TEMPERATURE ANOMALY [K]'},
{'abbr': 26, 'code': 26, 'title': 'psan PRESSURE ANOMALY [Pa hPa]'},
{'abbr': 27, 'code': 27, 'title': 'zgan GEOPOT HEIGHT ANOMALY [m]'},
{'abbr': 28, 'code': 28, 'title': 'wvs1 WAVE SPECTRA(1) [non-dim]'},
{'abbr': 29, 'code': 29, 'title': 'wvs2 WAVE SPECTRA(2) [non-dim]'},
{'abbr': 30, 'code': 30, 'title': 'wvs3 WAVE SPECTRA(3) [non-dim]'},
{'abbr': 31, 'code': 31, 'title': 'wind WIND DIRECTION [deg]'},
{'abbr': 32, 'code': 32, 'title': 'wins WIND SPEED [m/s]'},
{'abbr': 33, 'code': 33, 'title': 'uvel ZONAL WIND (U) [m/s]'},
{'abbr': 34, 'code': 34, 'title': 'vvel MERIDIONAL WIND (V) [m/s]'},
{'abbr': 35, 'code': 35, 'title': 'fcor STREAM FUNCTION [m2/s]'},
{'abbr': 36, 'code': 36, 'title': 'potv VELOCITY POTENTIAL [m2/s]'},
{'abbr': 37, 'code': 37, 'title': 'var37 undefined'},
{'abbr': 38, 'code': 38, 'title': 'sgvv SIGMA COORD VERT VEL [sec/sec]'},
{'abbr': 39, 'code': 39, 'title': 'omeg OMEGA [Pa/s]'},
{'abbr': 40, 'code': 40, 'title': 'omg2 VERTICAL VELOCITY [m/s]'},
{'abbr': 41, 'code': 41, 'title': 'abvo ABSOLUTE VORTICITY [10**5/sec]'},
{'abbr': 42, 'code': 42, 'title': 'abdv ABSOLUTE DIVERGENCE [10**5/sec]'},
{'abbr': 43, 'code': 43, 'title': 'vort VORTICITY [1/s]'},
{'abbr': 44, 'code': 44, 'title': 'divg DIVERGENCE [1/s]'},
{'abbr': 45, 'code': 45, 'title': 'vucs VERTICAL U-COMP SHEAR [1/sec]'},
{'abbr': 46, 'code': 46, 'title': 'vvcs VERT V-COMP SHEAR [1/sec]'},
{'abbr': 47, 'code': 47, 'title': 'dirc DIRECTION OF CURRENT [deg]'},
{'abbr': 48, 'code': 48, 'title': 'spdc SPEED OF CURRENT [m/s]'},
{'abbr': 49, 'code': 49, 'title': 'ucpc U-COMPONENT OF CURRENT [m/s]'},
{'abbr': 50, 'code': 50, 'title': 'vcpc V-COMPONENT OF CURRENT [m/s]'},
{'abbr': 51, 'code': 51, 'title': 'umes SPECIFIC HUMIDITY [kg/kg]'},
{'abbr': 52, 'code': 52, 'title': 'umrl RELATIVE HUMIDITY [no Dim]'},
{'abbr': 53, 'code': 53, 'title': 'hmxr HUMIDITY MIXING RATIO [kg/kg]'},
{'abbr': 54, 'code': 54, 'title': 'agpl INST. PRECIPITABLE WATER [Kg/m2]'},
{'abbr': 55, 'code': 55, 'title': 'vapp VAPOUR PRESSURE [Pa hpa]'},
{'abbr': 56, 'code': 56, 'title': 'sadf SATURATION DEFICIT [Pa hPa]'},
{'abbr': 57, 'code': 57, 'title': 'evap EVAPORATION [Kg/m2/day]'},
{'abbr': 58, 'code': 58, 'title': 'var58 undefined'},
{'abbr': 59, 'code': 59, 'title': 'prcr PRECIPITATION RATE [kg/m2/day]'},
{'abbr': 60, 'code': 60, 'title': 'thpb THUNDER PROBABILITY [%]'},
{'abbr': 61, 'code': 61, 'title': 'prec TOTAL PRECIPITATION [Kg/m2/day]'},
{'abbr': 62,
'code': 62,
'title': 'prge LARGE SCALE PRECIPITATION [Kg/m2/day]'},
{'abbr': 63, 'code': 63, 'title': 'prcv CONVECTIVE PRECIPITATION [Kg/m2/day]'},
{'abbr': 64, 'code': 64, 'title': 'neve SNOWFALL [Kg/m2/day]'},
{'abbr': 65, 'code': 65, 'title': 'wenv WAT EQUIV ACC SNOW DEPTH [kg/m2]'},
{'abbr': 66, 'code': 66, 'title': 'nvde SNOW DEPTH [cm]'},
{'abbr': 67, 'code': 67, 'title': 'mxld MIXED LAYER DEPTH [m cm]'},
{'abbr': 68, 'code': 68, 'title': 'tthd TRANS THERMOCLINE DEPTH [m cm]'},
{'abbr': 69, 'code': 69, 'title': 'mthd MAIN THERMOCLINE DEPTH [m cm]'},
{'abbr': 70, 'code': 70, 'title': 'mtha MAIN THERMOCLINE ANOM [m cm]'},
{'abbr': 71, 'code': 71, 'title': 'cbnv CLOUD COVER [0-1]'},
{'abbr': 72, 'code': 72, 'title': 'cvnv CONVECTIVE CLOUD COVER [0-1]'},
{'abbr': 73, 'code': 73, 'title': 'lwnv LOW CLOUD COVER [0-1]'},
{'abbr': 74, 'code': 74, 'title': 'mdnv MEDIUM CLOUD COVER [0-1]'},
{'abbr': 75, 'code': 75, 'title': 'hinv HIGH CLOUD COVER [0-1]'},
{'abbr': 76, 'code': 76, 'title': 'wtnv CLOUD WATER [kg/m2]'},
{'abbr': 77, 'code': 77, 'title': 'bli BEST LIFTED INDEX (TO 500 HPA) [K]'},
{'abbr': 78, 'code': 78, 'title': 'var78 undefined'},
{'abbr': 79, 'code': 79, 'title': 'var79 undefined'},
{'abbr': 80, 'code': 80, 'title': 'var80 undefined'},
{'abbr': 81, 'code': 81, 'title': 'lsmk LAND SEA MASK [0,1]'},
{'abbr': 82, 'code': 82, 'title': 'dslm DEV SEA_LEV FROM MEAN [m]'},
{'abbr': 83, 'code': 83, 'title': 'zorl ROUGHNESS LENGTH [m]'},
{'abbr': 84, 'code': 84, 'title': 'albe ALBEDO [%]'},
{'abbr': 85, 'code': 85, 'title': 'dstp DEEP SOIL TEMPERATURE [K]'},
{'abbr': 86, 'code': 86, 'title': 'soic SOIL MOISTURE CONTENT [Kg/m2]'},
{'abbr': 87, 'code': 87, 'title': 'vege VEGETATION [%]'},
{'abbr': 88, 'code': 88, 'title': 'var88 undefined'},
{'abbr': 89, 'code': 89, 'title': 'dens DENSITY [kg/m3]'},
{'abbr': 90, 'code': 90, 'title': 'var90 Undefined'},
{'abbr': 91, 'code': 91, 'title': 'icec ICE CONCENTRATION [fraction]'},
{'abbr': 92, 'code': 92, 'title': 'icet ICE THICKNESS [m]'},
{'abbr': 93, 'code': 93, 'title': 'iced DIRECTION OF ICE DRIFT [deg]'},
{'abbr': 94, 'code': 94, 'title': 'ices SPEED OF ICE DRIFT [m/s]'},
{'abbr': 95, 'code': 95, 'title': 'iceu U-COMP OF ICE DRIFT [m/s]'},
{'abbr': 96, 'code': 96, 'title': 'icev V-COMP OF ICE DRIFT [m/s]'},
{'abbr': 97, 'code': 97, 'title': 'iceg ICE GROWTH [m]'},
{'abbr': 98, 'code': 98, 'title': 'icdv ICE DIVERGENCE [sec/sec]'},
{'abbr': 99, 'code': 99, 'title': 'var99 undefined'},
{'abbr': 100, 'code': 100, 'title': 'shcw SIG HGT COM WAVE/SWELL [m]'},
{'abbr': 101, 'code': 101, 'title': 'wwdi DIRECTION OF WIND WAVE [deg]'},
{'abbr': 102, 'code': 102, 'title': 'wwsh SIG HGHT OF WIND WAVES [m]'},
{'abbr': 103, 'code': 103, 'title': 'wwmp MEAN PERIOD WIND WAVES [sec]'},
{'abbr': 104, 'code': 104, 'title': 'swdi DIRECTION OF SWELL WAVE [deg]'},
{'abbr': 105, 'code': 105, 'title': 'swsh SIG HEIGHT SWELL WAVES [m]'},
{'abbr': 106, 'code': 106, 'title': 'swmp MEAN PERIOD SWELL WAVES [sec]'},
{'abbr': 107, 'code': 107, 'title': 'prwd PRIMARY WAVE DIRECTION [deg]'},
{'abbr': 108, 'code': 108, 'title': 'prmp PRIM WAVE MEAN PERIOD [s]'},
{'abbr': 109, 'code': 109, 'title': 'swdi SECOND WAVE DIRECTION [deg]'},
{'abbr': 110, 'code': 110, 'title': 'swmp SECOND WAVE MEAN PERIOD [s]'},
{'abbr': 111,
'code': 111,
'title': 'ocas SHORT WAVE ABSORBED AT GROUND [W/m2]'},
{'abbr': 112, 'code': 112, 'title': 'slds NET LONG WAVE AT BOTTOM [W/m2]'},
{'abbr': 113, 'code': 113, 'title': 'nswr NET SHORT-WAV RAD(TOP) [W/m2]'},
{'abbr': 114, 'code': 114, 'title': 'role OUTGOING LONG WAVE AT TOP [W/m2]'},
{'abbr': 115, 'code': 115, 'title': 'lwrd LONG-WAV RAD [W/m2]'},
{'abbr': 116,
'code': 116,
'title': 'swea SHORT WAVE ABSORBED BY EARTH/ATMOSPHERE [W/m2]'},
{'abbr': 117, 'code': 117, 'title': 'glbr GLOBAL RADIATION [W/m2 ]'},
{'abbr': 118, 'code': 118, 'title': 'var118 undefined'},
{'abbr': 119, 'code': 119, 'title': 'var119 undefined'},
{'abbr': 120, 'code': 120, 'title': 'var120 undefined'},
{'abbr': 121,
'code': 121,
'title': 'clsf LATENT HEAT FLUX FROM SURFACE [W/m2]'},
{'abbr': 122,
'code': 122,
'title': 'cssf SENSIBLE HEAT FLUX FROM SURFACE [W/m2]'},
{'abbr': 123, 'code': 123, 'title': 'blds BOUND LAYER DISSIPATION [W/m2]'},
{'abbr': 124, 'code': 124, 'title': 'var124 undefined'},
{'abbr': 125, 'code': 125, 'title': 'var125 undefined'},
{'abbr': 126, 'code': 126, 'title': 'var126 undefined'},
{'abbr': 127, 'code': 127, 'title': 'imag IMAGE [image^data]'},
{'abbr': 128, 'code': 128, 'title': 'tp2m 2 METRE TEMPERATURE [K]'},
{'abbr': 129, 'code': 129, 'title': 'dp2m 2 METRE DEWPOINT TEMPERATURE [K]'},
{'abbr': 130, 'code': 130, 'title': 'u10m 10 METRE U-WIND COMPONENT [m/s]'},
{'abbr': 131, 'code': 131, 'title': 'v10m 10 METRE V-WIND COMPONENT [m/s]'},
{'abbr': 132, 'code': 132, 'title': 'topo TOPOGRAPHY [m]'},
{'abbr': 133,
'code': 133,
'title': 'gsfp GEOMETRIC MEAN SURFACE PRESSURE [hPa]'},
{'abbr': 134, 'code': 134, 'title': 'lnsp LN SURFACE PRESSURE [hPa]'},
{'abbr': 135, 'code': 135, 'title': 'pslc SURFACE PRESSURE [hPa]'},
{'abbr': 136,
'code': 136,
'title': 'pslm M S L PRESSURE (MESINGER METHOD) [hPa]'},
{'abbr': 137, 'code': 137, 'title': 'mask MASK [-/+]'},
{'abbr': 138, 'code': 138, 'title': 'mxwu MAXIMUM U-WIND [m/s]'},
{'abbr': 139, 'code': 139, 'title': 'mxwv MAXIMUM V-WIND [m/s]'},
{'abbr': 140,
'code': 140,
'title': 'cape CONVECTIVE AVAIL. POT.ENERGY [m2/s2]'},
{'abbr': 141, 'code': 141, 'title': 'cine CONVECTIVE INHIB. ENERGY [m2/s2]'},
{'abbr': 142, 'code': 142, 'title': 'lhcv CONVECTIVE LATENT HEATING [K/s]'},
{'abbr': 143, 'code': 143, 'title': 'mscv CONVECTIVE MOISTURE SOURCE [1/s]'},
{'abbr': 144,
'code': 144,
'title': 'scvm SHALLOW CONV. MOISTURE SOURCE [1/s]'},
{'abbr': 145, 'code': 145, 'title': 'scvh SHALLOW CONVECTIVE HEATING [K/s]'},
{'abbr': 146, 'code': 146, 'title': 'mxwp MAXIMUM WIND PRESS. LVL [hPa]'},
{'abbr': 147, 'code': 147, 'title': 'ustr STORM MOTION U-COMPONENT [m/s]'},
{'abbr': 148, 'code': 148, 'title': 'vstr STORM MOTION V-COMPONENT [m/s]'},
{'abbr': 149, 'code': 149, 'title': 'cbnt MEAN CLOUD COVER [0-1]'},
{'abbr': 150, 'code': 150, 'title': 'pcbs PRESSURE AT CLOUD BASE [hPa]'},
{'abbr': 151, 'code': 151, 'title': 'pctp PRESSURE AT CLOUD TOP [hPa]'},
{'abbr': 152, 'code': 152, 'title': 'fzht FREEZING LEVEL HEIGHT [m]'},
{'abbr': 153,
'code': 153,
'title': 'fzrh FREEZING LEVEL RELATIVE HUMIDITY [%]'},
{'abbr': 154, 'code': 154, 'title': 'fdlt FLIGHT LEVELS TEMPERATURE [K]'},
{'abbr': 155, 'code': 155, 'title': 'fdlu FLIGHT LEVELS U-WIND [m/s]'},
{'abbr': 156, 'code': 156, 'title': 'fdlv FLIGHT LEVELS V-WIND [m/s]'},
{'abbr': 157, 'code': 157, 'title': 'tppp TROPOPAUSE PRESSURE [hPa]'},
{'abbr': 158, 'code': 158, 'title': 'tppt TROPOPAUSE TEMPERATURE [K]'},
{'abbr': 159, 'code': 159, 'title': 'tppu TROPOPAUSE U-WIND COMPONENT [m/s]'},
{'abbr': 160, 'code': 160, 'title': 'tppv TROPOPAUSE v-WIND COMPONENT [m/s]'},
{'abbr': 161, 'code': 161, 'title': 'var161 undefined'},
{'abbr': 162, 'code': 162, 'title': 'gvdu GRAVITY WAVE DRAG DU/DT [m/s2]'},
{'abbr': 163, 'code': 163, 'title': 'gvdv GRAVITY WAVE DRAG DV/DT [m/s2]'},
{'abbr': 164,
'code': 164,
'title': 'gvus GRAVITY WAVE DRAG SFC ZONAL STRESS [Pa]'},
{'abbr': 165,
'code': 165,
'title': 'gvvs GRAVITY WAVE DRAG SFC MERIDIONAL STRESS [Pa]'},
{'abbr': 166, 'code': 166, 'title': 'var166 undefined'},
{'abbr': 167,
'code': 167,
'title': 'dvsh DIVERGENCE OF SPECIFIC HUMIDITY [1/s]'},
{'abbr': 168, 'code': 168, 'title': 'hmfc HORIZ. MOISTURE FLUX CONV. [1/s]'},
{'abbr': 169,
'code': 169,
'title': 'vmfl VERT. INTEGRATED MOISTURE FLUX CONV. [kg/(m2*s)]'},
{'abbr': 170,
'code': 170,
'title': 'vadv VERTICAL MOISTURE ADVECTION [kg/(kg*s)]'},
{'abbr': 171,
'code': 171,
'title': 'nhcm NEG. HUM. CORR. MOISTURE SOURCE [kg/(kg*s)]'},
{'abbr': 172, 'code': 172, 'title': 'lglh LARGE SCALE LATENT HEATING [K/s]'},
{'abbr': 173, 'code': 173, 'title': 'lgms LARGE SCALE MOISTURE SOURCE [1/s]'},
{'abbr': 174, 'code': 174, 'title': 'smav SOIL MOISTURE AVAILABILITY [0-1]'},
{'abbr': 175, 'code': 175, 'title': 'tgrz SOIL TEMPERATURE OF ROOT ZONE [K]'},
{'abbr': 176, 'code': 176, 'title': 'bslh BARE SOIL LATENT HEAT [Ws/m2]'},
{'abbr': 177, 'code': 177, 'title': 'evpp POTENTIAL SFC EVAPORATION [m]'},
{'abbr': 178, 'code': 178, 'title': 'rnof RUNOFF [kg/m2/s)]'},
{'abbr': 179, 'code': 179, 'title': 'pitp INTERCEPTION LOSS [W/m2]'},
{'abbr': 180,
'code': 180,
'title': 'vpca VAPOR PRESSURE OF CANOPY AIR SPACE [mb]'},
{'abbr': 181, 'code': 181, 'title': 'qsfc SURFACE SPEC HUMIDITY [kg/kg]'},
{'abbr': 182, 'code': 182, 'title': 'ussl SOIL WETNESS OF SURFACE [0-1]'},
{'abbr': 183, 'code': 183, 'title': 'uzrs SOIL WETNESS OF ROOT ZONE [0-1]'},
{'abbr': 184,
'code': 184,
'title': 'uzds SOIL WETNESS OF DRAINAGE ZONE [0-1]'},
{'abbr': 185, 'code': 185, 'title': 'amdl STORAGE ON CANOPY [m]'},
{'abbr': 186, 'code': 186, 'title': 'amsl STORAGE ON GROUND [m]'},
{'abbr': 187, 'code': 187, 'title': 'tsfc SURFACE TEMPERATURE [K]'},
{'abbr': 188, 'code': 188, 'title': 'tems SURFACE ABSOLUTE TEMPERATURE [K]'},
{'abbr': 189,
'code': 189,
'title': 'tcas TEMPERATURE OF CANOPY AIR SPACE [K]'},
{'abbr': 190, 'code': 190, 'title': 'ctmp TEMPERATURE AT CANOPY [K]'},
{'abbr': 191,
'code': 191,
'title': 'tgsc GROUND/SURFACE COVER TEMPERATURE [K]'},
{'abbr': 192, 'code': 192, 'title': 'uves SURFACE ZONAL WIND (U) [m/s]'},
{'abbr': 193, 'code': 193, 'title': 'usst SURFACE ZONAL WIND STRESS [Pa]'},
{'abbr': 194, 'code': 194, 'title': 'vves SURFACE MERIDIONAL WIND (V) [m/s]'},
{'abbr': 195,
'code': 195,
'title': 'vsst SURFACE MERIDIONAL WIND STRESS [Pa]'},
{'abbr': 196, 'code': 196, 'title': 'suvf SURFACE MOMENTUM FLUX [W/m2]'},
{'abbr': 197, 'code': 197, 'title': 'iswf INCIDENT SHORT WAVE FLUX [W/m2]'},
{'abbr': 198, 'code': 198, 'title': 'ghfl TIME AVE GROUND HT FLX [W/m2]'},
{'abbr': 199, 'code': 199, 'title': 'var199 undefined'},
{'abbr': 200,
'code': 200,
'title': 'lwbc NET LONG WAVE AT BOTTOM (CLEAR) [W/m2]'},
{'abbr': 201,
'code': 201,
'title': 'lwtc OUTGOING LONG WAVE AT TOP (CLEAR) [W/m2]'},
{'abbr': 202,
'code': 202,
'title': 'swec SHORT WV ABSRBD BY EARTH/ATMOS (CLEAR) [W/m2]'},
{'abbr': 203,
'code': 203,
'title': 'ocac SHORT WAVE ABSORBED AT GROUND (CLEAR) [W/m2]'},
{'abbr': 204, 'code': 204, 'title': 'var204 undefined'},
{'abbr': 205, 'code': 205, 'title': 'lwrh LONG WAVE RADIATIVE HEATING [K/s]'},
{'abbr': 206, 'code': 206, 'title': 'swrh SHORT WAVE RADIATIVE HEATING [K/s]'},
{'abbr': 207,
'code': 207,
'title': 'olis DOWNWARD LONG WAVE AT BOTTOM [W/m2]'},
{'abbr': 208,
'code': 208,
'title': 'olic DOWNWARD LONG WAVE AT BOTTOM (CLEAR) [W/m2]'},
{'abbr': 209,
'code': 209,
'title': 'ocis DOWNWARD SHORT WAVE AT GROUND [W/m2]'},
{'abbr': 210,
'code': 210,
'title': 'ocic DOWNWARD SHORT WAVE AT GROUND (CLEAR) [W/m2]'},
{'abbr': 211, 'code': 211, 'title': 'oles UPWARD LONG WAVE AT BOTTOM [W/m2]'},
{'abbr': 212, 'code': 212, 'title': 'oces UPWARD SHORT WAVE AT GROUND [W/m2]'},
{'abbr': 213,
'code': 213,
'title': 'swgc UPWARD SHORT WAVE AT GROUND (CLEAR) [W/m2]'},
{'abbr': 214, 'code': 214, 'title': 'roce UPWARD SHORT WAVE AT TOP [W/m2]'},
{'abbr': 215,
'code': 215,
'title': 'swtc UPWARD SHORT WAVE AT TOP (CLEAR) [W/m2]'},
{'abbr': 216, 'code': 216, 'title': 'var216 undefined'},
{'abbr': 217, 'code': 217, 'title': 'var217 undefined'},
{'abbr': 218, 'code': 218, 'title': 'hhdf HORIZONTAL HEATING DIFFUSION [K/s]'},
{'abbr': 219,
'code': 219,
'title': 'hmdf HORIZONTAL MOISTURE DIFFUSION [1/s]'},
{'abbr': 220,
'code': 220,
'title': 'hddf HORIZONTAL DIVERGENCE DIFFUSION [1/s2]'},
{'abbr': 221,
'code': 221,
'title': 'hvdf HORIZONTAL VORTICITY DIFFUSION [1/s2]'},
{'abbr': 222,
'code': 222,
'title': 'vdms VERTICAL DIFF. MOISTURE SOURCE [1/s]'},
{'abbr': 223, 'code': 223, 'title': 'vdfu VERTICAL DIFFUSION DU/DT [m/s2]'},
{'abbr': 224, 'code': 224, 'title': 'vdfv VERTICAL DIFFUSION DV/DT [m/s2]'},
{'abbr': 225, 'code': 225, 'title': 'vdfh VERTICAL DIFFUSION HEATING [K/s]'},
{'abbr': 226, 'code': 226, 'title': 'umrs SURFACE RELATIVE HUMIDITY [no Dim]'},
{'abbr': 227,
'code': 227,
'title': 'vdcc VERTICAL DIST TOTAL CLOUD COVER [no Dim]'},
{'abbr': 228, 'code': 228, 'title': 'var228 undefined'},
{'abbr': 229, 'code': 229, 'title': 'var229 undefined'},
{'abbr': 230,
'code': 230,
'title': 'usmt TIME MEAN SURFACE ZONAL WIND (U) [m/s]'},
{'abbr': 231,
'code': 231,
'title': 'vsmt TIME MEAN SURFACE MERIDIONAL WIND (V) [m/s]'},
{'abbr': 232,
'code': 232,
'title': 'tsmt TIME MEAN SURFACE ABSOLUTE TEMPERATURE [K]'},
{'abbr': 233,
'code': 233,
'title': 'rsmt TIME MEAN SURFACE RELATIVE HUMIDITY [no Dim]'},
{'abbr': 234, 'code': 234, 'title': 'atmt TIME MEAN ABSOLUTE TEMPERATURE [K]'},
{'abbr': 235,
'code': 235,
'title': 'stmt TIME MEAN DEEP SOIL TEMPERATURE [K]'},
{'abbr': 236, 'code': 236, 'title': 'ommt TIME MEAN DERIVED OMEGA [Pa/s]'},
{'abbr': 237, 'code': 237, 'title': 'dvmt TIME MEAN DIVERGENCE [1/s]'},
{'abbr': 238, 'code': 238, 'title': 'zhmt TIME MEAN GEOPOTENTIAL HEIGHT [m]'},
{'abbr': 239,
'code': 239,
'title': 'lnmt TIME MEAN LOG SURFACE PRESSURE [ln(cbar)]'},
{'abbr': 240, 'code': 240, 'title': 'mkmt TIME MEAN MASK [-/+]'},
{'abbr': 241,
'code': 241,
'title': 'vvmt TIME MEAN MERIDIONAL WIND (V) [m/s]'},
{'abbr': 242, 'code': 242, 'title': 'omtm TIME MEAN OMEGA [cbar/s]'},
{'abbr': 243,
'code': 243,
'title': 'ptmt TIME MEAN POTENTIAL TEMPERATURE [K]'},
{'abbr': 244, 'code': 244, 'title': 'pcmt TIME MEAN PRECIP. WATER [kg/m2]'},
{'abbr': 245, 'code': 245, 'title': 'rhmt TIME MEAN RELATIVE HUMIDITY [%]'},
{'abbr': 246, 'code': 246, 'title': 'mpmt TIME MEAN SEA LEVEL PRESSURE [hPa]'},
{'abbr': 247, 'code': 247, 'title': 'simt TIME MEAN SIGMADOT [1/s]'},
{'abbr': 248,
'code': 248,
'title': 'uemt TIME MEAN SPECIFIC HUMIDITY [kg/kg]'},
{'abbr': 249, 'code': 249, 'title': 'fcmt TIME MEAN STREAM FUNCTION| m2/s]'},
{'abbr': 250, 'code': 250, 'title': 'psmt TIME MEAN SURFACE PRESSURE [hPa]'},
{'abbr': 251, 'code': 251, 'title': 'tmmt TIME MEAN SURFACE TEMPERATURE [K]'},
{'abbr': 252,
'code': 252,
'title': 'pvmt TIME MEAN VELOCITY POTENTIAL [m2/s]'},
{'abbr': 253, 'code': 253, 'title': 'tvmt TIME MEAN VIRTUAL TEMPERATURE [K]'},
{'abbr': 254, 'code': 254, 'title': 'vtmt TIME MEAN VORTICITY [1/s]'},
{'abbr': None, 'code': 255, 'title': 'uvmt TIME MEAN ZONAL WIND (U) [m/s]'})
| def load(h):
return ({'abbr': 1, 'code': 1, 'title': 'PRES Pressure [hPa]'}, {'abbr': 2, 'code': 2, 'title': 'psnm Pressure reduced to MSL [hPa]'}, {'abbr': 3, 'code': 3, 'title': 'tsps Pressure tendency [Pa/s]'}, {'abbr': 4, 'code': 4, 'title': 'var4 undefined'}, {'abbr': 5, 'code': 5, 'title': 'var5 undefined'}, {'abbr': 6, 'code': 6, 'title': 'geop Geopotential [dam]'}, {'abbr': 7, 'code': 7, 'title': 'zgeo Geopotential height [gpm]'}, {'abbr': 8, 'code': 8, 'title': 'gzge Geometric height [m]'}, {'abbr': 9, 'code': 9, 'title': 'var9 undefined'}, {'abbr': 10, 'code': 10, 'title': 'var10 undefined'}, {'abbr': 11, 'code': 11, 'title': 'temp ABSOLUTE TEMPERATURE [K]'}, {'abbr': 12, 'code': 12, 'title': 'vtmp VIRTUAL TEMPERATURE [K]'}, {'abbr': 13, 'code': 13, 'title': 'ptmp POTENTIAL TEMPERATURE [K]'}, {'abbr': 14, 'code': 14, 'title': 'psat PSEUDO-ADIABATIC POTENTIAL TEMPERATURE [K]'}, {'abbr': 15, 'code': 15, 'title': 'mxtp MAXIMUM TEMPERATURE [K]'}, {'abbr': 16, 'code': 16, 'title': 'mntp MINIMUM TEMPERATURE [K]'}, {'abbr': 17, 'code': 17, 'title': 'tpor DEW POINT TEMPERATURE [K]'}, {'abbr': 18, 'code': 18, 'title': 'dptd DEW POINT DEPRESSION [K]'}, {'abbr': 19, 'code': 19, 'title': 'lpsr LAPSE RATE [K/m]'}, {'abbr': 20, 'code': 20, 'title': 'var20 undefined'}, {'abbr': 21, 'code': 21, 'title': 'rds1 RADAR SPECTRA(1) [non-dim]'}, {'abbr': 22, 'code': 22, 'title': 'rds2 RADAR SPECTRA(2) [non-dim]'}, {'abbr': 23, 'code': 23, 'title': 'rds3 RADAR SPECTRA(3) [non-dim]'}, {'abbr': 24, 'code': 24, 'title': 'var24 undefined'}, {'abbr': 25, 'code': 25, 'title': 'tpan TEMPERATURE ANOMALY [K]'}, {'abbr': 26, 'code': 26, 'title': 'psan PRESSURE ANOMALY [Pa hPa]'}, {'abbr': 27, 'code': 27, 'title': 'zgan GEOPOT HEIGHT ANOMALY [m]'}, {'abbr': 28, 'code': 28, 'title': 'wvs1 WAVE SPECTRA(1) [non-dim]'}, {'abbr': 29, 'code': 29, 'title': 'wvs2 WAVE SPECTRA(2) [non-dim]'}, {'abbr': 30, 'code': 30, 'title': 'wvs3 WAVE SPECTRA(3) [non-dim]'}, {'abbr': 31, 'code': 31, 'title': 'wind WIND DIRECTION [deg]'}, {'abbr': 32, 'code': 32, 'title': 'wins WIND SPEED [m/s]'}, {'abbr': 33, 'code': 33, 'title': 'uvel ZONAL WIND (U) [m/s]'}, {'abbr': 34, 'code': 34, 'title': 'vvel MERIDIONAL WIND (V) [m/s]'}, {'abbr': 35, 'code': 35, 'title': 'fcor STREAM FUNCTION [m2/s]'}, {'abbr': 36, 'code': 36, 'title': 'potv VELOCITY POTENTIAL [m2/s]'}, {'abbr': 37, 'code': 37, 'title': 'var37 undefined'}, {'abbr': 38, 'code': 38, 'title': 'sgvv SIGMA COORD VERT VEL [sec/sec]'}, {'abbr': 39, 'code': 39, 'title': 'omeg OMEGA [Pa/s]'}, {'abbr': 40, 'code': 40, 'title': 'omg2 VERTICAL VELOCITY [m/s]'}, {'abbr': 41, 'code': 41, 'title': 'abvo ABSOLUTE VORTICITY [10**5/sec]'}, {'abbr': 42, 'code': 42, 'title': 'abdv ABSOLUTE DIVERGENCE [10**5/sec]'}, {'abbr': 43, 'code': 43, 'title': 'vort VORTICITY [1/s]'}, {'abbr': 44, 'code': 44, 'title': 'divg DIVERGENCE [1/s]'}, {'abbr': 45, 'code': 45, 'title': 'vucs VERTICAL U-COMP SHEAR [1/sec]'}, {'abbr': 46, 'code': 46, 'title': 'vvcs VERT V-COMP SHEAR [1/sec]'}, {'abbr': 47, 'code': 47, 'title': 'dirc DIRECTION OF CURRENT [deg]'}, {'abbr': 48, 'code': 48, 'title': 'spdc SPEED OF CURRENT [m/s]'}, {'abbr': 49, 'code': 49, 'title': 'ucpc U-COMPONENT OF CURRENT [m/s]'}, {'abbr': 50, 'code': 50, 'title': 'vcpc V-COMPONENT OF CURRENT [m/s]'}, {'abbr': 51, 'code': 51, 'title': 'umes SPECIFIC HUMIDITY [kg/kg]'}, {'abbr': 52, 'code': 52, 'title': 'umrl RELATIVE HUMIDITY [no Dim]'}, {'abbr': 53, 'code': 53, 'title': 'hmxr HUMIDITY MIXING RATIO [kg/kg]'}, {'abbr': 54, 'code': 54, 'title': 'agpl INST. PRECIPITABLE WATER [Kg/m2]'}, {'abbr': 55, 'code': 55, 'title': 'vapp VAPOUR PRESSURE [Pa hpa]'}, {'abbr': 56, 'code': 56, 'title': 'sadf SATURATION DEFICIT [Pa hPa]'}, {'abbr': 57, 'code': 57, 'title': 'evap EVAPORATION [Kg/m2/day]'}, {'abbr': 58, 'code': 58, 'title': 'var58 undefined'}, {'abbr': 59, 'code': 59, 'title': 'prcr PRECIPITATION RATE [kg/m2/day]'}, {'abbr': 60, 'code': 60, 'title': 'thpb THUNDER PROBABILITY [%]'}, {'abbr': 61, 'code': 61, 'title': 'prec TOTAL PRECIPITATION [Kg/m2/day]'}, {'abbr': 62, 'code': 62, 'title': 'prge LARGE SCALE PRECIPITATION [Kg/m2/day]'}, {'abbr': 63, 'code': 63, 'title': 'prcv CONVECTIVE PRECIPITATION [Kg/m2/day]'}, {'abbr': 64, 'code': 64, 'title': 'neve SNOWFALL [Kg/m2/day]'}, {'abbr': 65, 'code': 65, 'title': 'wenv WAT EQUIV ACC SNOW DEPTH [kg/m2]'}, {'abbr': 66, 'code': 66, 'title': 'nvde SNOW DEPTH [cm]'}, {'abbr': 67, 'code': 67, 'title': 'mxld MIXED LAYER DEPTH [m cm]'}, {'abbr': 68, 'code': 68, 'title': 'tthd TRANS THERMOCLINE DEPTH [m cm]'}, {'abbr': 69, 'code': 69, 'title': 'mthd MAIN THERMOCLINE DEPTH [m cm]'}, {'abbr': 70, 'code': 70, 'title': 'mtha MAIN THERMOCLINE ANOM [m cm]'}, {'abbr': 71, 'code': 71, 'title': 'cbnv CLOUD COVER [0-1]'}, {'abbr': 72, 'code': 72, 'title': 'cvnv CONVECTIVE CLOUD COVER [0-1]'}, {'abbr': 73, 'code': 73, 'title': 'lwnv LOW CLOUD COVER [0-1]'}, {'abbr': 74, 'code': 74, 'title': 'mdnv MEDIUM CLOUD COVER [0-1]'}, {'abbr': 75, 'code': 75, 'title': 'hinv HIGH CLOUD COVER [0-1]'}, {'abbr': 76, 'code': 76, 'title': 'wtnv CLOUD WATER [kg/m2]'}, {'abbr': 77, 'code': 77, 'title': 'bli BEST LIFTED INDEX (TO 500 HPA) [K]'}, {'abbr': 78, 'code': 78, 'title': 'var78 undefined'}, {'abbr': 79, 'code': 79, 'title': 'var79 undefined'}, {'abbr': 80, 'code': 80, 'title': 'var80 undefined'}, {'abbr': 81, 'code': 81, 'title': 'lsmk LAND SEA MASK [0,1]'}, {'abbr': 82, 'code': 82, 'title': 'dslm DEV SEA_LEV FROM MEAN [m]'}, {'abbr': 83, 'code': 83, 'title': 'zorl ROUGHNESS LENGTH [m]'}, {'abbr': 84, 'code': 84, 'title': 'albe ALBEDO [%]'}, {'abbr': 85, 'code': 85, 'title': 'dstp DEEP SOIL TEMPERATURE [K]'}, {'abbr': 86, 'code': 86, 'title': 'soic SOIL MOISTURE CONTENT [Kg/m2]'}, {'abbr': 87, 'code': 87, 'title': 'vege VEGETATION [%]'}, {'abbr': 88, 'code': 88, 'title': 'var88 undefined'}, {'abbr': 89, 'code': 89, 'title': 'dens DENSITY [kg/m3]'}, {'abbr': 90, 'code': 90, 'title': 'var90 Undefined'}, {'abbr': 91, 'code': 91, 'title': 'icec ICE CONCENTRATION [fraction]'}, {'abbr': 92, 'code': 92, 'title': 'icet ICE THICKNESS [m]'}, {'abbr': 93, 'code': 93, 'title': 'iced DIRECTION OF ICE DRIFT [deg]'}, {'abbr': 94, 'code': 94, 'title': 'ices SPEED OF ICE DRIFT [m/s]'}, {'abbr': 95, 'code': 95, 'title': 'iceu U-COMP OF ICE DRIFT [m/s]'}, {'abbr': 96, 'code': 96, 'title': 'icev V-COMP OF ICE DRIFT [m/s]'}, {'abbr': 97, 'code': 97, 'title': 'iceg ICE GROWTH [m]'}, {'abbr': 98, 'code': 98, 'title': 'icdv ICE DIVERGENCE [sec/sec]'}, {'abbr': 99, 'code': 99, 'title': 'var99 undefined'}, {'abbr': 100, 'code': 100, 'title': 'shcw SIG HGT COM WAVE/SWELL [m]'}, {'abbr': 101, 'code': 101, 'title': 'wwdi DIRECTION OF WIND WAVE [deg]'}, {'abbr': 102, 'code': 102, 'title': 'wwsh SIG HGHT OF WIND WAVES [m]'}, {'abbr': 103, 'code': 103, 'title': 'wwmp MEAN PERIOD WIND WAVES [sec]'}, {'abbr': 104, 'code': 104, 'title': 'swdi DIRECTION OF SWELL WAVE [deg]'}, {'abbr': 105, 'code': 105, 'title': 'swsh SIG HEIGHT SWELL WAVES [m]'}, {'abbr': 106, 'code': 106, 'title': 'swmp MEAN PERIOD SWELL WAVES [sec]'}, {'abbr': 107, 'code': 107, 'title': 'prwd PRIMARY WAVE DIRECTION [deg]'}, {'abbr': 108, 'code': 108, 'title': 'prmp PRIM WAVE MEAN PERIOD [s]'}, {'abbr': 109, 'code': 109, 'title': 'swdi SECOND WAVE DIRECTION [deg]'}, {'abbr': 110, 'code': 110, 'title': 'swmp SECOND WAVE MEAN PERIOD [s]'}, {'abbr': 111, 'code': 111, 'title': 'ocas SHORT WAVE ABSORBED AT GROUND [W/m2]'}, {'abbr': 112, 'code': 112, 'title': 'slds NET LONG WAVE AT BOTTOM [W/m2]'}, {'abbr': 113, 'code': 113, 'title': 'nswr NET SHORT-WAV RAD(TOP) [W/m2]'}, {'abbr': 114, 'code': 114, 'title': 'role OUTGOING LONG WAVE AT TOP [W/m2]'}, {'abbr': 115, 'code': 115, 'title': 'lwrd LONG-WAV RAD [W/m2]'}, {'abbr': 116, 'code': 116, 'title': 'swea SHORT WAVE ABSORBED BY EARTH/ATMOSPHERE [W/m2]'}, {'abbr': 117, 'code': 117, 'title': 'glbr GLOBAL RADIATION [W/m2 ]'}, {'abbr': 118, 'code': 118, 'title': 'var118 undefined'}, {'abbr': 119, 'code': 119, 'title': 'var119 undefined'}, {'abbr': 120, 'code': 120, 'title': 'var120 undefined'}, {'abbr': 121, 'code': 121, 'title': 'clsf LATENT HEAT FLUX FROM SURFACE [W/m2]'}, {'abbr': 122, 'code': 122, 'title': 'cssf SENSIBLE HEAT FLUX FROM SURFACE [W/m2]'}, {'abbr': 123, 'code': 123, 'title': 'blds BOUND LAYER DISSIPATION [W/m2]'}, {'abbr': 124, 'code': 124, 'title': 'var124 undefined'}, {'abbr': 125, 'code': 125, 'title': 'var125 undefined'}, {'abbr': 126, 'code': 126, 'title': 'var126 undefined'}, {'abbr': 127, 'code': 127, 'title': 'imag IMAGE [image^data]'}, {'abbr': 128, 'code': 128, 'title': 'tp2m 2 METRE TEMPERATURE [K]'}, {'abbr': 129, 'code': 129, 'title': 'dp2m 2 METRE DEWPOINT TEMPERATURE [K]'}, {'abbr': 130, 'code': 130, 'title': 'u10m 10 METRE U-WIND COMPONENT [m/s]'}, {'abbr': 131, 'code': 131, 'title': 'v10m 10 METRE V-WIND COMPONENT [m/s]'}, {'abbr': 132, 'code': 132, 'title': 'topo TOPOGRAPHY [m]'}, {'abbr': 133, 'code': 133, 'title': 'gsfp GEOMETRIC MEAN SURFACE PRESSURE [hPa]'}, {'abbr': 134, 'code': 134, 'title': 'lnsp LN SURFACE PRESSURE [hPa]'}, {'abbr': 135, 'code': 135, 'title': 'pslc SURFACE PRESSURE [hPa]'}, {'abbr': 136, 'code': 136, 'title': 'pslm M S L PRESSURE (MESINGER METHOD) [hPa]'}, {'abbr': 137, 'code': 137, 'title': 'mask MASK [-/+]'}, {'abbr': 138, 'code': 138, 'title': 'mxwu MAXIMUM U-WIND [m/s]'}, {'abbr': 139, 'code': 139, 'title': 'mxwv MAXIMUM V-WIND [m/s]'}, {'abbr': 140, 'code': 140, 'title': 'cape CONVECTIVE AVAIL. POT.ENERGY [m2/s2]'}, {'abbr': 141, 'code': 141, 'title': 'cine CONVECTIVE INHIB. ENERGY [m2/s2]'}, {'abbr': 142, 'code': 142, 'title': 'lhcv CONVECTIVE LATENT HEATING [K/s]'}, {'abbr': 143, 'code': 143, 'title': 'mscv CONVECTIVE MOISTURE SOURCE [1/s]'}, {'abbr': 144, 'code': 144, 'title': 'scvm SHALLOW CONV. MOISTURE SOURCE [1/s]'}, {'abbr': 145, 'code': 145, 'title': 'scvh SHALLOW CONVECTIVE HEATING [K/s]'}, {'abbr': 146, 'code': 146, 'title': 'mxwp MAXIMUM WIND PRESS. LVL [hPa]'}, {'abbr': 147, 'code': 147, 'title': 'ustr STORM MOTION U-COMPONENT [m/s]'}, {'abbr': 148, 'code': 148, 'title': 'vstr STORM MOTION V-COMPONENT [m/s]'}, {'abbr': 149, 'code': 149, 'title': 'cbnt MEAN CLOUD COVER [0-1]'}, {'abbr': 150, 'code': 150, 'title': 'pcbs PRESSURE AT CLOUD BASE [hPa]'}, {'abbr': 151, 'code': 151, 'title': 'pctp PRESSURE AT CLOUD TOP [hPa]'}, {'abbr': 152, 'code': 152, 'title': 'fzht FREEZING LEVEL HEIGHT [m]'}, {'abbr': 153, 'code': 153, 'title': 'fzrh FREEZING LEVEL RELATIVE HUMIDITY [%]'}, {'abbr': 154, 'code': 154, 'title': 'fdlt FLIGHT LEVELS TEMPERATURE [K]'}, {'abbr': 155, 'code': 155, 'title': 'fdlu FLIGHT LEVELS U-WIND [m/s]'}, {'abbr': 156, 'code': 156, 'title': 'fdlv FLIGHT LEVELS V-WIND [m/s]'}, {'abbr': 157, 'code': 157, 'title': 'tppp TROPOPAUSE PRESSURE [hPa]'}, {'abbr': 158, 'code': 158, 'title': 'tppt TROPOPAUSE TEMPERATURE [K]'}, {'abbr': 159, 'code': 159, 'title': 'tppu TROPOPAUSE U-WIND COMPONENT [m/s]'}, {'abbr': 160, 'code': 160, 'title': 'tppv TROPOPAUSE v-WIND COMPONENT [m/s]'}, {'abbr': 161, 'code': 161, 'title': 'var161 undefined'}, {'abbr': 162, 'code': 162, 'title': 'gvdu GRAVITY WAVE DRAG DU/DT [m/s2]'}, {'abbr': 163, 'code': 163, 'title': 'gvdv GRAVITY WAVE DRAG DV/DT [m/s2]'}, {'abbr': 164, 'code': 164, 'title': 'gvus GRAVITY WAVE DRAG SFC ZONAL STRESS [Pa]'}, {'abbr': 165, 'code': 165, 'title': 'gvvs GRAVITY WAVE DRAG SFC MERIDIONAL STRESS [Pa]'}, {'abbr': 166, 'code': 166, 'title': 'var166 undefined'}, {'abbr': 167, 'code': 167, 'title': 'dvsh DIVERGENCE OF SPECIFIC HUMIDITY [1/s]'}, {'abbr': 168, 'code': 168, 'title': 'hmfc HORIZ. MOISTURE FLUX CONV. [1/s]'}, {'abbr': 169, 'code': 169, 'title': 'vmfl VERT. INTEGRATED MOISTURE FLUX CONV. [kg/(m2*s)]'}, {'abbr': 170, 'code': 170, 'title': 'vadv VERTICAL MOISTURE ADVECTION [kg/(kg*s)]'}, {'abbr': 171, 'code': 171, 'title': 'nhcm NEG. HUM. CORR. MOISTURE SOURCE [kg/(kg*s)]'}, {'abbr': 172, 'code': 172, 'title': 'lglh LARGE SCALE LATENT HEATING [K/s]'}, {'abbr': 173, 'code': 173, 'title': 'lgms LARGE SCALE MOISTURE SOURCE [1/s]'}, {'abbr': 174, 'code': 174, 'title': 'smav SOIL MOISTURE AVAILABILITY [0-1]'}, {'abbr': 175, 'code': 175, 'title': 'tgrz SOIL TEMPERATURE OF ROOT ZONE [K]'}, {'abbr': 176, 'code': 176, 'title': 'bslh BARE SOIL LATENT HEAT [Ws/m2]'}, {'abbr': 177, 'code': 177, 'title': 'evpp POTENTIAL SFC EVAPORATION [m]'}, {'abbr': 178, 'code': 178, 'title': 'rnof RUNOFF [kg/m2/s)]'}, {'abbr': 179, 'code': 179, 'title': 'pitp INTERCEPTION LOSS [W/m2]'}, {'abbr': 180, 'code': 180, 'title': 'vpca VAPOR PRESSURE OF CANOPY AIR SPACE [mb]'}, {'abbr': 181, 'code': 181, 'title': 'qsfc SURFACE SPEC HUMIDITY [kg/kg]'}, {'abbr': 182, 'code': 182, 'title': 'ussl SOIL WETNESS OF SURFACE [0-1]'}, {'abbr': 183, 'code': 183, 'title': 'uzrs SOIL WETNESS OF ROOT ZONE [0-1]'}, {'abbr': 184, 'code': 184, 'title': 'uzds SOIL WETNESS OF DRAINAGE ZONE [0-1]'}, {'abbr': 185, 'code': 185, 'title': 'amdl STORAGE ON CANOPY [m]'}, {'abbr': 186, 'code': 186, 'title': 'amsl STORAGE ON GROUND [m]'}, {'abbr': 187, 'code': 187, 'title': 'tsfc SURFACE TEMPERATURE [K]'}, {'abbr': 188, 'code': 188, 'title': 'tems SURFACE ABSOLUTE TEMPERATURE [K]'}, {'abbr': 189, 'code': 189, 'title': 'tcas TEMPERATURE OF CANOPY AIR SPACE [K]'}, {'abbr': 190, 'code': 190, 'title': 'ctmp TEMPERATURE AT CANOPY [K]'}, {'abbr': 191, 'code': 191, 'title': 'tgsc GROUND/SURFACE COVER TEMPERATURE [K]'}, {'abbr': 192, 'code': 192, 'title': 'uves SURFACE ZONAL WIND (U) [m/s]'}, {'abbr': 193, 'code': 193, 'title': 'usst SURFACE ZONAL WIND STRESS [Pa]'}, {'abbr': 194, 'code': 194, 'title': 'vves SURFACE MERIDIONAL WIND (V) [m/s]'}, {'abbr': 195, 'code': 195, 'title': 'vsst SURFACE MERIDIONAL WIND STRESS [Pa]'}, {'abbr': 196, 'code': 196, 'title': 'suvf SURFACE MOMENTUM FLUX [W/m2]'}, {'abbr': 197, 'code': 197, 'title': 'iswf INCIDENT SHORT WAVE FLUX [W/m2]'}, {'abbr': 198, 'code': 198, 'title': 'ghfl TIME AVE GROUND HT FLX [W/m2]'}, {'abbr': 199, 'code': 199, 'title': 'var199 undefined'}, {'abbr': 200, 'code': 200, 'title': 'lwbc NET LONG WAVE AT BOTTOM (CLEAR) [W/m2]'}, {'abbr': 201, 'code': 201, 'title': 'lwtc OUTGOING LONG WAVE AT TOP (CLEAR) [W/m2]'}, {'abbr': 202, 'code': 202, 'title': 'swec SHORT WV ABSRBD BY EARTH/ATMOS (CLEAR) [W/m2]'}, {'abbr': 203, 'code': 203, 'title': 'ocac SHORT WAVE ABSORBED AT GROUND (CLEAR) [W/m2]'}, {'abbr': 204, 'code': 204, 'title': 'var204 undefined'}, {'abbr': 205, 'code': 205, 'title': 'lwrh LONG WAVE RADIATIVE HEATING [K/s]'}, {'abbr': 206, 'code': 206, 'title': 'swrh SHORT WAVE RADIATIVE HEATING [K/s]'}, {'abbr': 207, 'code': 207, 'title': 'olis DOWNWARD LONG WAVE AT BOTTOM [W/m2]'}, {'abbr': 208, 'code': 208, 'title': 'olic DOWNWARD LONG WAVE AT BOTTOM (CLEAR) [W/m2]'}, {'abbr': 209, 'code': 209, 'title': 'ocis DOWNWARD SHORT WAVE AT GROUND [W/m2]'}, {'abbr': 210, 'code': 210, 'title': 'ocic DOWNWARD SHORT WAVE AT GROUND (CLEAR) [W/m2]'}, {'abbr': 211, 'code': 211, 'title': 'oles UPWARD LONG WAVE AT BOTTOM [W/m2]'}, {'abbr': 212, 'code': 212, 'title': 'oces UPWARD SHORT WAVE AT GROUND [W/m2]'}, {'abbr': 213, 'code': 213, 'title': 'swgc UPWARD SHORT WAVE AT GROUND (CLEAR) [W/m2]'}, {'abbr': 214, 'code': 214, 'title': 'roce UPWARD SHORT WAVE AT TOP [W/m2]'}, {'abbr': 215, 'code': 215, 'title': 'swtc UPWARD SHORT WAVE AT TOP (CLEAR) [W/m2]'}, {'abbr': 216, 'code': 216, 'title': 'var216 undefined'}, {'abbr': 217, 'code': 217, 'title': 'var217 undefined'}, {'abbr': 218, 'code': 218, 'title': 'hhdf HORIZONTAL HEATING DIFFUSION [K/s]'}, {'abbr': 219, 'code': 219, 'title': 'hmdf HORIZONTAL MOISTURE DIFFUSION [1/s]'}, {'abbr': 220, 'code': 220, 'title': 'hddf HORIZONTAL DIVERGENCE DIFFUSION [1/s2]'}, {'abbr': 221, 'code': 221, 'title': 'hvdf HORIZONTAL VORTICITY DIFFUSION [1/s2]'}, {'abbr': 222, 'code': 222, 'title': 'vdms VERTICAL DIFF. MOISTURE SOURCE [1/s]'}, {'abbr': 223, 'code': 223, 'title': 'vdfu VERTICAL DIFFUSION DU/DT [m/s2]'}, {'abbr': 224, 'code': 224, 'title': 'vdfv VERTICAL DIFFUSION DV/DT [m/s2]'}, {'abbr': 225, 'code': 225, 'title': 'vdfh VERTICAL DIFFUSION HEATING [K/s]'}, {'abbr': 226, 'code': 226, 'title': 'umrs SURFACE RELATIVE HUMIDITY [no Dim]'}, {'abbr': 227, 'code': 227, 'title': 'vdcc VERTICAL DIST TOTAL CLOUD COVER [no Dim]'}, {'abbr': 228, 'code': 228, 'title': 'var228 undefined'}, {'abbr': 229, 'code': 229, 'title': 'var229 undefined'}, {'abbr': 230, 'code': 230, 'title': 'usmt TIME MEAN SURFACE ZONAL WIND (U) [m/s]'}, {'abbr': 231, 'code': 231, 'title': 'vsmt TIME MEAN SURFACE MERIDIONAL WIND (V) [m/s]'}, {'abbr': 232, 'code': 232, 'title': 'tsmt TIME MEAN SURFACE ABSOLUTE TEMPERATURE [K]'}, {'abbr': 233, 'code': 233, 'title': 'rsmt TIME MEAN SURFACE RELATIVE HUMIDITY [no Dim]'}, {'abbr': 234, 'code': 234, 'title': 'atmt TIME MEAN ABSOLUTE TEMPERATURE [K]'}, {'abbr': 235, 'code': 235, 'title': 'stmt TIME MEAN DEEP SOIL TEMPERATURE [K]'}, {'abbr': 236, 'code': 236, 'title': 'ommt TIME MEAN DERIVED OMEGA [Pa/s]'}, {'abbr': 237, 'code': 237, 'title': 'dvmt TIME MEAN DIVERGENCE [1/s]'}, {'abbr': 238, 'code': 238, 'title': 'zhmt TIME MEAN GEOPOTENTIAL HEIGHT [m]'}, {'abbr': 239, 'code': 239, 'title': 'lnmt TIME MEAN LOG SURFACE PRESSURE [ln(cbar)]'}, {'abbr': 240, 'code': 240, 'title': 'mkmt TIME MEAN MASK [-/+]'}, {'abbr': 241, 'code': 241, 'title': 'vvmt TIME MEAN MERIDIONAL WIND (V) [m/s]'}, {'abbr': 242, 'code': 242, 'title': 'omtm TIME MEAN OMEGA [cbar/s]'}, {'abbr': 243, 'code': 243, 'title': 'ptmt TIME MEAN POTENTIAL TEMPERATURE [K]'}, {'abbr': 244, 'code': 244, 'title': 'pcmt TIME MEAN PRECIP. WATER [kg/m2]'}, {'abbr': 245, 'code': 245, 'title': 'rhmt TIME MEAN RELATIVE HUMIDITY [%]'}, {'abbr': 246, 'code': 246, 'title': 'mpmt TIME MEAN SEA LEVEL PRESSURE [hPa]'}, {'abbr': 247, 'code': 247, 'title': 'simt TIME MEAN SIGMADOT [1/s]'}, {'abbr': 248, 'code': 248, 'title': 'uemt TIME MEAN SPECIFIC HUMIDITY [kg/kg]'}, {'abbr': 249, 'code': 249, 'title': 'fcmt TIME MEAN STREAM FUNCTION| m2/s]'}, {'abbr': 250, 'code': 250, 'title': 'psmt TIME MEAN SURFACE PRESSURE [hPa]'}, {'abbr': 251, 'code': 251, 'title': 'tmmt TIME MEAN SURFACE TEMPERATURE [K]'}, {'abbr': 252, 'code': 252, 'title': 'pvmt TIME MEAN VELOCITY POTENTIAL [m2/s]'}, {'abbr': 253, 'code': 253, 'title': 'tvmt TIME MEAN VIRTUAL TEMPERATURE [K]'}, {'abbr': 254, 'code': 254, 'title': 'vtmt TIME MEAN VORTICITY [1/s]'}, {'abbr': None, 'code': 255, 'title': 'uvmt TIME MEAN ZONAL WIND (U) [m/s]'}) |
class Settings:
params = ()
def __init__(self, params):
self.params = params
| class Settings:
params = ()
def __init__(self, params):
self.params = params |
urlChatAdd = '/chat/add'
urlUserAdd = '/chat/adduser'
urlGetUsers = '/chat/getusers/'
urlGetChats = '/chat/chats'
urlPost = '/chat/post'
urlHist = '/chat/hist'
urlAuth = '/chat/auth'
| url_chat_add = '/chat/add'
url_user_add = '/chat/adduser'
url_get_users = '/chat/getusers/'
url_get_chats = '/chat/chats'
url_post = '/chat/post'
url_hist = '/chat/hist'
url_auth = '/chat/auth' |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
names = {
'Cisco': 'cwom'
}
mappings = {
'cwom': {
'1.0': '5.8.3.1',
'1.1': '5.9.1',
'1.1.3': '5.9.3',
'1.2.0': '6.0.3',
'1.2.1': '6.0.6',
'1.2.2': '6.0.9',
'1.2.3': '6.0.11.1',
'2.0.0': '6.1.1',
'2.0.1': '6.1.6',
'2.0.2': '6.1.8',
'2.0.3': '6.1.12',
'2.1.0': '6.2.2',
'2.1.1': '6.2.7.1',
'2.1.2': '6.2.10',
'2.2': '6.3.2',
'2.2.1': '6.3.5.0.1',
'2.2.2': '6.3.7',
'2.2.3': '6.3.7.1',
'2.2.4': '6.3.10',
'2.2.5': '6.3.13',
'2.3.0': '6.4.2',
'2.3.1': '6.4.5',
'2.3.2': '6.4.6',
'2.3.3': '6.4.7',
'2.3.4': '6.4.8',
'2.3.5': '6.4.9',
'2.3.6': '6.4.10',
'2.3.7': '6.4.11',
'2.3.8': '6.4.12',
'2.3.9': '6.4.13',
'2.3.10': '6.4.14',
'2.3.11': '6.4.15',
'2.3.12': '6.4.16',
'2.3.13': '6.4.17',
'2.3.14': '6.4.18',
'2.3.15': '6.4.19',
'2.3.16': '6.4.20',
'2.3.17': '6.4.21',
'2.3.18': '6.4.22',
'2.3.19': '6.4.23',
'2.3.20': '6.4.24',
'2.3.21': '6.4.25',
'2.3.22': '6.4.26',
'2.3.23': '6.4.27',
'2.3.24': '6.4.28',
'2.3.25': '6.4.29',
'2.3.26': '6.4.30',
'2.3.27': '6.4.31',
'2.3.28': '6.4.32',
'2.3.29': '6.4.33',
'2.3.30': '6.4.34',
'2.3.31': '6.4.35',
'2.3.32': '6.4.36',
'2.3.33': '6.4.37',
'2.3.34': '6.4.38',
'3.0.1': '8.2.1'
}
}
| names = {'Cisco': 'cwom'}
mappings = {'cwom': {'1.0': '5.8.3.1', '1.1': '5.9.1', '1.1.3': '5.9.3', '1.2.0': '6.0.3', '1.2.1': '6.0.6', '1.2.2': '6.0.9', '1.2.3': '6.0.11.1', '2.0.0': '6.1.1', '2.0.1': '6.1.6', '2.0.2': '6.1.8', '2.0.3': '6.1.12', '2.1.0': '6.2.2', '2.1.1': '6.2.7.1', '2.1.2': '6.2.10', '2.2': '6.3.2', '2.2.1': '6.3.5.0.1', '2.2.2': '6.3.7', '2.2.3': '6.3.7.1', '2.2.4': '6.3.10', '2.2.5': '6.3.13', '2.3.0': '6.4.2', '2.3.1': '6.4.5', '2.3.2': '6.4.6', '2.3.3': '6.4.7', '2.3.4': '6.4.8', '2.3.5': '6.4.9', '2.3.6': '6.4.10', '2.3.7': '6.4.11', '2.3.8': '6.4.12', '2.3.9': '6.4.13', '2.3.10': '6.4.14', '2.3.11': '6.4.15', '2.3.12': '6.4.16', '2.3.13': '6.4.17', '2.3.14': '6.4.18', '2.3.15': '6.4.19', '2.3.16': '6.4.20', '2.3.17': '6.4.21', '2.3.18': '6.4.22', '2.3.19': '6.4.23', '2.3.20': '6.4.24', '2.3.21': '6.4.25', '2.3.22': '6.4.26', '2.3.23': '6.4.27', '2.3.24': '6.4.28', '2.3.25': '6.4.29', '2.3.26': '6.4.30', '2.3.27': '6.4.31', '2.3.28': '6.4.32', '2.3.29': '6.4.33', '2.3.30': '6.4.34', '2.3.31': '6.4.35', '2.3.32': '6.4.36', '2.3.33': '6.4.37', '2.3.34': '6.4.38', '3.0.1': '8.2.1'}} |
try:
with open('../../../assets/img_cogwheel_argb.bin','rb') as f:
cogwheel_img_data = f.read()
except:
try:
with open('images/img_cogwheel_rgb565.bin','rb') as f:
cogwheel_img_data = f.read()
except:
print("Could not find binary img_cogwheel file")
# create the cogwheel image data
cogwheel_img_dsc = lv.img_dsc_t(
{
"header": {"always_zero": 0, "w": 100, "h": 100, "cf": lv.img.CF.TRUE_COLOR_ALPHA},
"data": cogwheel_img_data,
"data_size": len(cogwheel_img_data),
}
)
# Create an image using the decoder
img1 = lv.img(lv.scr_act(),None)
lv.img.cache_set_size(2)
img1.align(lv.scr_act(), lv.ALIGN.CENTER, 0, -50)
img1.set_src(cogwheel_img_dsc)
img2 = lv.img(lv.scr_act(), None)
img2.set_src(lv.SYMBOL.OK+"Accept")
img2.align(img1, lv.ALIGN.OUT_BOTTOM_MID, 0, 20)
| try:
with open('../../../assets/img_cogwheel_argb.bin', 'rb') as f:
cogwheel_img_data = f.read()
except:
try:
with open('images/img_cogwheel_rgb565.bin', 'rb') as f:
cogwheel_img_data = f.read()
except:
print('Could not find binary img_cogwheel file')
cogwheel_img_dsc = lv.img_dsc_t({'header': {'always_zero': 0, 'w': 100, 'h': 100, 'cf': lv.img.CF.TRUE_COLOR_ALPHA}, 'data': cogwheel_img_data, 'data_size': len(cogwheel_img_data)})
img1 = lv.img(lv.scr_act(), None)
lv.img.cache_set_size(2)
img1.align(lv.scr_act(), lv.ALIGN.CENTER, 0, -50)
img1.set_src(cogwheel_img_dsc)
img2 = lv.img(lv.scr_act(), None)
img2.set_src(lv.SYMBOL.OK + 'Accept')
img2.align(img1, lv.ALIGN.OUT_BOTTOM_MID, 0, 20) |
int1 = int(input('informe o inteiro 1 '))
int2 = int(input('informe o inteiro 2 '))
real = float(input('informe o real '))
print('a %2.f' %((int1*2)*(int2/2)))
print('b %2.f' %((int1*3)+(real)))
print('c %2.f' %(real**3))
| int1 = int(input('informe o inteiro 1 '))
int2 = int(input('informe o inteiro 2 '))
real = float(input('informe o real '))
print('a %2.f' % (int1 * 2 * (int2 / 2)))
print('b %2.f' % (int1 * 3 + real))
print('c %2.f' % real ** 3) |
data = (
'ruk', # 0x00
'rut', # 0x01
'rup', # 0x02
'ruh', # 0x03
'rweo', # 0x04
'rweog', # 0x05
'rweogg', # 0x06
'rweogs', # 0x07
'rweon', # 0x08
'rweonj', # 0x09
'rweonh', # 0x0a
'rweod', # 0x0b
'rweol', # 0x0c
'rweolg', # 0x0d
'rweolm', # 0x0e
'rweolb', # 0x0f
'rweols', # 0x10
'rweolt', # 0x11
'rweolp', # 0x12
'rweolh', # 0x13
'rweom', # 0x14
'rweob', # 0x15
'rweobs', # 0x16
'rweos', # 0x17
'rweoss', # 0x18
'rweong', # 0x19
'rweoj', # 0x1a
'rweoc', # 0x1b
'rweok', # 0x1c
'rweot', # 0x1d
'rweop', # 0x1e
'rweoh', # 0x1f
'rwe', # 0x20
'rweg', # 0x21
'rwegg', # 0x22
'rwegs', # 0x23
'rwen', # 0x24
'rwenj', # 0x25
'rwenh', # 0x26
'rwed', # 0x27
'rwel', # 0x28
'rwelg', # 0x29
'rwelm', # 0x2a
'rwelb', # 0x2b
'rwels', # 0x2c
'rwelt', # 0x2d
'rwelp', # 0x2e
'rwelh', # 0x2f
'rwem', # 0x30
'rweb', # 0x31
'rwebs', # 0x32
'rwes', # 0x33
'rwess', # 0x34
'rweng', # 0x35
'rwej', # 0x36
'rwec', # 0x37
'rwek', # 0x38
'rwet', # 0x39
'rwep', # 0x3a
'rweh', # 0x3b
'rwi', # 0x3c
'rwig', # 0x3d
'rwigg', # 0x3e
'rwigs', # 0x3f
'rwin', # 0x40
'rwinj', # 0x41
'rwinh', # 0x42
'rwid', # 0x43
'rwil', # 0x44
'rwilg', # 0x45
'rwilm', # 0x46
'rwilb', # 0x47
'rwils', # 0x48
'rwilt', # 0x49
'rwilp', # 0x4a
'rwilh', # 0x4b
'rwim', # 0x4c
'rwib', # 0x4d
'rwibs', # 0x4e
'rwis', # 0x4f
'rwiss', # 0x50
'rwing', # 0x51
'rwij', # 0x52
'rwic', # 0x53
'rwik', # 0x54
'rwit', # 0x55
'rwip', # 0x56
'rwih', # 0x57
'ryu', # 0x58
'ryug', # 0x59
'ryugg', # 0x5a
'ryugs', # 0x5b
'ryun', # 0x5c
'ryunj', # 0x5d
'ryunh', # 0x5e
'ryud', # 0x5f
'ryul', # 0x60
'ryulg', # 0x61
'ryulm', # 0x62
'ryulb', # 0x63
'ryuls', # 0x64
'ryult', # 0x65
'ryulp', # 0x66
'ryulh', # 0x67
'ryum', # 0x68
'ryub', # 0x69
'ryubs', # 0x6a
'ryus', # 0x6b
'ryuss', # 0x6c
'ryung', # 0x6d
'ryuj', # 0x6e
'ryuc', # 0x6f
'ryuk', # 0x70
'ryut', # 0x71
'ryup', # 0x72
'ryuh', # 0x73
'reu', # 0x74
'reug', # 0x75
'reugg', # 0x76
'reugs', # 0x77
'reun', # 0x78
'reunj', # 0x79
'reunh', # 0x7a
'reud', # 0x7b
'reul', # 0x7c
'reulg', # 0x7d
'reulm', # 0x7e
'reulb', # 0x7f
'reuls', # 0x80
'reult', # 0x81
'reulp', # 0x82
'reulh', # 0x83
'reum', # 0x84
'reub', # 0x85
'reubs', # 0x86
'reus', # 0x87
'reuss', # 0x88
'reung', # 0x89
'reuj', # 0x8a
'reuc', # 0x8b
'reuk', # 0x8c
'reut', # 0x8d
'reup', # 0x8e
'reuh', # 0x8f
'ryi', # 0x90
'ryig', # 0x91
'ryigg', # 0x92
'ryigs', # 0x93
'ryin', # 0x94
'ryinj', # 0x95
'ryinh', # 0x96
'ryid', # 0x97
'ryil', # 0x98
'ryilg', # 0x99
'ryilm', # 0x9a
'ryilb', # 0x9b
'ryils', # 0x9c
'ryilt', # 0x9d
'ryilp', # 0x9e
'ryilh', # 0x9f
'ryim', # 0xa0
'ryib', # 0xa1
'ryibs', # 0xa2
'ryis', # 0xa3
'ryiss', # 0xa4
'rying', # 0xa5
'ryij', # 0xa6
'ryic', # 0xa7
'ryik', # 0xa8
'ryit', # 0xa9
'ryip', # 0xaa
'ryih', # 0xab
'ri', # 0xac
'rig', # 0xad
'rigg', # 0xae
'rigs', # 0xaf
'rin', # 0xb0
'rinj', # 0xb1
'rinh', # 0xb2
'rid', # 0xb3
'ril', # 0xb4
'rilg', # 0xb5
'rilm', # 0xb6
'rilb', # 0xb7
'rils', # 0xb8
'rilt', # 0xb9
'rilp', # 0xba
'rilh', # 0xbb
'rim', # 0xbc
'rib', # 0xbd
'ribs', # 0xbe
'ris', # 0xbf
'riss', # 0xc0
'ring', # 0xc1
'rij', # 0xc2
'ric', # 0xc3
'rik', # 0xc4
'rit', # 0xc5
'rip', # 0xc6
'rih', # 0xc7
'ma', # 0xc8
'mag', # 0xc9
'magg', # 0xca
'mags', # 0xcb
'man', # 0xcc
'manj', # 0xcd
'manh', # 0xce
'mad', # 0xcf
'mal', # 0xd0
'malg', # 0xd1
'malm', # 0xd2
'malb', # 0xd3
'mals', # 0xd4
'malt', # 0xd5
'malp', # 0xd6
'malh', # 0xd7
'mam', # 0xd8
'mab', # 0xd9
'mabs', # 0xda
'mas', # 0xdb
'mass', # 0xdc
'mang', # 0xdd
'maj', # 0xde
'mac', # 0xdf
'mak', # 0xe0
'mat', # 0xe1
'map', # 0xe2
'mah', # 0xe3
'mae', # 0xe4
'maeg', # 0xe5
'maegg', # 0xe6
'maegs', # 0xe7
'maen', # 0xe8
'maenj', # 0xe9
'maenh', # 0xea
'maed', # 0xeb
'mael', # 0xec
'maelg', # 0xed
'maelm', # 0xee
'maelb', # 0xef
'maels', # 0xf0
'maelt', # 0xf1
'maelp', # 0xf2
'maelh', # 0xf3
'maem', # 0xf4
'maeb', # 0xf5
'maebs', # 0xf6
'maes', # 0xf7
'maess', # 0xf8
'maeng', # 0xf9
'maej', # 0xfa
'maec', # 0xfb
'maek', # 0xfc
'maet', # 0xfd
'maep', # 0xfe
'maeh', # 0xff
)
| data = ('ruk', 'rut', 'rup', 'ruh', 'rweo', 'rweog', 'rweogg', 'rweogs', 'rweon', 'rweonj', 'rweonh', 'rweod', 'rweol', 'rweolg', 'rweolm', 'rweolb', 'rweols', 'rweolt', 'rweolp', 'rweolh', 'rweom', 'rweob', 'rweobs', 'rweos', 'rweoss', 'rweong', 'rweoj', 'rweoc', 'rweok', 'rweot', 'rweop', 'rweoh', 'rwe', 'rweg', 'rwegg', 'rwegs', 'rwen', 'rwenj', 'rwenh', 'rwed', 'rwel', 'rwelg', 'rwelm', 'rwelb', 'rwels', 'rwelt', 'rwelp', 'rwelh', 'rwem', 'rweb', 'rwebs', 'rwes', 'rwess', 'rweng', 'rwej', 'rwec', 'rwek', 'rwet', 'rwep', 'rweh', 'rwi', 'rwig', 'rwigg', 'rwigs', 'rwin', 'rwinj', 'rwinh', 'rwid', 'rwil', 'rwilg', 'rwilm', 'rwilb', 'rwils', 'rwilt', 'rwilp', 'rwilh', 'rwim', 'rwib', 'rwibs', 'rwis', 'rwiss', 'rwing', 'rwij', 'rwic', 'rwik', 'rwit', 'rwip', 'rwih', 'ryu', 'ryug', 'ryugg', 'ryugs', 'ryun', 'ryunj', 'ryunh', 'ryud', 'ryul', 'ryulg', 'ryulm', 'ryulb', 'ryuls', 'ryult', 'ryulp', 'ryulh', 'ryum', 'ryub', 'ryubs', 'ryus', 'ryuss', 'ryung', 'ryuj', 'ryuc', 'ryuk', 'ryut', 'ryup', 'ryuh', 'reu', 'reug', 'reugg', 'reugs', 'reun', 'reunj', 'reunh', 'reud', 'reul', 'reulg', 'reulm', 'reulb', 'reuls', 'reult', 'reulp', 'reulh', 'reum', 'reub', 'reubs', 'reus', 'reuss', 'reung', 'reuj', 'reuc', 'reuk', 'reut', 'reup', 'reuh', 'ryi', 'ryig', 'ryigg', 'ryigs', 'ryin', 'ryinj', 'ryinh', 'ryid', 'ryil', 'ryilg', 'ryilm', 'ryilb', 'ryils', 'ryilt', 'ryilp', 'ryilh', 'ryim', 'ryib', 'ryibs', 'ryis', 'ryiss', 'rying', 'ryij', 'ryic', 'ryik', 'ryit', 'ryip', 'ryih', 'ri', 'rig', 'rigg', 'rigs', 'rin', 'rinj', 'rinh', 'rid', 'ril', 'rilg', 'rilm', 'rilb', 'rils', 'rilt', 'rilp', 'rilh', 'rim', 'rib', 'ribs', 'ris', 'riss', 'ring', 'rij', 'ric', 'rik', 'rit', 'rip', 'rih', 'ma', 'mag', 'magg', 'mags', 'man', 'manj', 'manh', 'mad', 'mal', 'malg', 'malm', 'malb', 'mals', 'malt', 'malp', 'malh', 'mam', 'mab', 'mabs', 'mas', 'mass', 'mang', 'maj', 'mac', 'mak', 'mat', 'map', 'mah', 'mae', 'maeg', 'maegg', 'maegs', 'maen', 'maenj', 'maenh', 'maed', 'mael', 'maelg', 'maelm', 'maelb', 'maels', 'maelt', 'maelp', 'maelh', 'maem', 'maeb', 'maebs', 'maes', 'maess', 'maeng', 'maej', 'maec', 'maek', 'maet', 'maep', 'maeh') |
class PHPWriter:
def __init__(self, constants):
self.constants = constants
def write(self, out):
out.write("<?php\n")
out.write("/* This file was generated by generate_constants. */\n\n")
for enum in self.constants.enum_values.values():
out.write("\n")
for name, value in enum.items():
out.write("define('{}', {});\n".format(name, value))
for name, value in self.constants.constant_values.items():
out.write("define('{}', {});\n".format(name, value))
| class Phpwriter:
def __init__(self, constants):
self.constants = constants
def write(self, out):
out.write('<?php\n')
out.write('/* This file was generated by generate_constants. */\n\n')
for enum in self.constants.enum_values.values():
out.write('\n')
for (name, value) in enum.items():
out.write("define('{}', {});\n".format(name, value))
for (name, value) in self.constants.constant_values.items():
out.write("define('{}', {});\n".format(name, value)) |
Amount = float
BenefitName = str
Email = str
Donor = Email
PaymentId = str
def isnotemptyinstance(value, type):
if not isinstance(value, type):
return False # None returns false
if isinstance(value, str):
return (len(value.strip()) != 0)
elif isinstance(value, int):
return (value != 0)
elif isinstance(value, float):
return (value != 0.0)
else:
raise NotImplementedError
| amount = float
benefit_name = str
email = str
donor = Email
payment_id = str
def isnotemptyinstance(value, type):
if not isinstance(value, type):
return False
if isinstance(value, str):
return len(value.strip()) != 0
elif isinstance(value, int):
return value != 0
elif isinstance(value, float):
return value != 0.0
else:
raise NotImplementedError |
# model settings
model = dict(
type='ImageClassifier',
backbone=dict(
type='SVT',
arch='base',
in_channels=3,
out_indices=(3, ),
qkv_bias=True,
norm_cfg=dict(type='LN'),
norm_after_stage=[False, False, False, True],
drop_rate=0.0,
attn_drop_rate=0.,
drop_path_rate=0.3),
neck=dict(type='GlobalAveragePooling'),
head=dict(
type='LinearClsHead',
num_classes=1000,
in_channels=768,
loss=dict(
type='LabelSmoothLoss', label_smooth_val=0.1, mode='original'),
cal_acc=False),
init_cfg=[
dict(type='TruncNormal', layer='Linear', std=0.02, bias=0.),
dict(type='Constant', layer='LayerNorm', val=1., bias=0.)
],
train_cfg=dict(augments=[
dict(type='BatchMixup', alpha=0.8, num_classes=1000, prob=0.5),
dict(type='BatchCutMix', alpha=1.0, num_classes=1000, prob=0.5)
]))
| model = dict(type='ImageClassifier', backbone=dict(type='SVT', arch='base', in_channels=3, out_indices=(3,), qkv_bias=True, norm_cfg=dict(type='LN'), norm_after_stage=[False, False, False, True], drop_rate=0.0, attn_drop_rate=0.0, drop_path_rate=0.3), neck=dict(type='GlobalAveragePooling'), head=dict(type='LinearClsHead', num_classes=1000, in_channels=768, loss=dict(type='LabelSmoothLoss', label_smooth_val=0.1, mode='original'), cal_acc=False), init_cfg=[dict(type='TruncNormal', layer='Linear', std=0.02, bias=0.0), dict(type='Constant', layer='LayerNorm', val=1.0, bias=0.0)], train_cfg=dict(augments=[dict(type='BatchMixup', alpha=0.8, num_classes=1000, prob=0.5), dict(type='BatchCutMix', alpha=1.0, num_classes=1000, prob=0.5)])) |
nome = input('Digite seu nome: ')
def saudar(x):
print(f'Bem-vindo, {x}!')
saudar(nome) | nome = input('Digite seu nome: ')
def saudar(x):
print(f'Bem-vindo, {x}!')
saudar(nome) |
class Solution:
def sortedSquares(self, nums: List[int]) -> List[int]:
sq_nums = []
for num in nums:
sq_nums.append(num ** 2)
sq_nums.sort()
return sq_nums
| class Solution:
def sorted_squares(self, nums: List[int]) -> List[int]:
sq_nums = []
for num in nums:
sq_nums.append(num ** 2)
sq_nums.sort()
return sq_nums |
class Stack(object):
def __init__(self):
self.items = []
self.min_value = None
def push(self, item):
if not self.min_value or self.min_value > item:
self.min_value = item
self.items.append(item)
def pop(self):
self.items.pop()
def get_min_value(self):
return self.min_value
stack = Stack()
stack.push(4)
stack.push(6)
stack.push(2)
print(stack.get_min_value())
stack.push(1)
print(stack.get_min_value())
| class Stack(object):
def __init__(self):
self.items = []
self.min_value = None
def push(self, item):
if not self.min_value or self.min_value > item:
self.min_value = item
self.items.append(item)
def pop(self):
self.items.pop()
def get_min_value(self):
return self.min_value
stack = stack()
stack.push(4)
stack.push(6)
stack.push(2)
print(stack.get_min_value())
stack.push(1)
print(stack.get_min_value()) |
_base_ = [
'../../_base_/models/resnet50.py',
'../../_base_/datasets/imagenet.py',
'../../_base_/schedules/sgd_steplr-100e.py',
'../../_base_/default_runtime.py',
]
# model settings
model = dict(backbone=dict(norm_cfg=dict(type='SyncBN')))
# dataset settings
data = dict(
imgs_per_gpu=64, # total 64x4=256
train=dict(
data_source=dict(ann_file='data/imagenet/meta/train_1percent.txt')))
# optimizer
optimizer = dict(
type='SGD',
lr=0.1,
momentum=0.9,
weight_decay=5e-4,
paramwise_options={'\\Ahead.': dict(lr_mult=1)})
# learning policy
lr_config = dict(policy='step', step=[12, 16], gamma=0.2)
# runtime settings
runner = dict(type='EpochBasedRunner', max_epochs=20)
checkpoint_config = dict(interval=10)
log_config = dict(
interval=10,
hooks=[dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook')])
| _base_ = ['../../_base_/models/resnet50.py', '../../_base_/datasets/imagenet.py', '../../_base_/schedules/sgd_steplr-100e.py', '../../_base_/default_runtime.py']
model = dict(backbone=dict(norm_cfg=dict(type='SyncBN')))
data = dict(imgs_per_gpu=64, train=dict(data_source=dict(ann_file='data/imagenet/meta/train_1percent.txt')))
optimizer = dict(type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0005, paramwise_options={'\\Ahead.': dict(lr_mult=1)})
lr_config = dict(policy='step', step=[12, 16], gamma=0.2)
runner = dict(type='EpochBasedRunner', max_epochs=20)
checkpoint_config = dict(interval=10)
log_config = dict(interval=10, hooks=[dict(type='TextLoggerHook'), dict(type='TensorboardLoggerHook')]) |
def test_add_to_basket(browser):
link = 'http://selenium1py.pythonanywhere.com/catalogue/coders-at-work_207/'
browser.get(link)
assert browser.find_element_by_class_name('btn-add-to-basket').is_displayed(), f'Basket button not found'
| def test_add_to_basket(browser):
link = 'http://selenium1py.pythonanywhere.com/catalogue/coders-at-work_207/'
browser.get(link)
assert browser.find_element_by_class_name('btn-add-to-basket').is_displayed(), f'Basket button not found' |
class TaskAnswer:
# list of tuple: (vertice_source, vertice_destination, moved_value)
_steps = []
def get_steps(self) -> list:
return self._steps
def add_step(self, source: int, destination: int, value: float):
step = (source, destination, value)
self._steps.append(step)
def print(self):
for step in self._steps:
(source, destination, value) = step
print("from", source, "to", destination, "move", value)
| class Taskanswer:
_steps = []
def get_steps(self) -> list:
return self._steps
def add_step(self, source: int, destination: int, value: float):
step = (source, destination, value)
self._steps.append(step)
def print(self):
for step in self._steps:
(source, destination, value) = step
print('from', source, 'to', destination, 'move', value) |
class Solution:
def getDescentPeriods(self, prices: List[int]) -> int:
curr = result = 1
for i in range(1, len(prices)):
if prices[i] + 1 == prices[i-1]:
curr += 1
else:
curr = 1
result += curr
return result | class Solution:
def get_descent_periods(self, prices: List[int]) -> int:
curr = result = 1
for i in range(1, len(prices)):
if prices[i] + 1 == prices[i - 1]:
curr += 1
else:
curr = 1
result += curr
return result |
print("####################################################")
print("#FILENAME:\t\ta1p1.py\t\t\t #")
print("#ASSIGNMENT:\t\tHomework Assignment 1 Pt. 1#")
print("#COURSE/SECTION:\tCIS 3389.251\t\t #")
print("#DUE DATE:\t\tWednesday, 12.February 2020#")
print("####################################################\n\n\n")
cont = 'y'
while cont.lower() == 'y' or cont.lower() == 'yes':
total = 0
avg = 0
number1 = float(input("First number:\t"))
number2 = float(input("Second number:\t"))
number3 = float(input("Third number:\t"))
total = number1 + number2 + number3
avg = total/3
if number1 >= number2 and number1 >= number3:
first_largest = number1
if number2 >= number3:
second_largest = number2
third_largest = number3
else:
second_largest = number3
third_largest = number2
elif number1 >= number2 and number1 < number3:
first_largest = number3
second_largest = number1
third_largest = number2
elif number1 < number2 and number1 < number3:
third_largest = number1;
if number2 >= number3:
first_largest = number2
second_largest = number3
else:
first_largest = number3
second_largest = number2
elif number1 < number2 and number1 >= number3:
first_largest = number2
second_largest = number1
third_largest = number3
print("\n\nSecond largest number entered:\t", second_largest)
print("Average:\t\t\t", avg, "\n\n")
cont = input("Would you like to continue? ")
| print('####################################################')
print('#FILENAME:\t\ta1p1.py\t\t\t #')
print('#ASSIGNMENT:\t\tHomework Assignment 1 Pt. 1#')
print('#COURSE/SECTION:\tCIS 3389.251\t\t #')
print('#DUE DATE:\t\tWednesday, 12.February 2020#')
print('####################################################\n\n\n')
cont = 'y'
while cont.lower() == 'y' or cont.lower() == 'yes':
total = 0
avg = 0
number1 = float(input('First number:\t'))
number2 = float(input('Second number:\t'))
number3 = float(input('Third number:\t'))
total = number1 + number2 + number3
avg = total / 3
if number1 >= number2 and number1 >= number3:
first_largest = number1
if number2 >= number3:
second_largest = number2
third_largest = number3
else:
second_largest = number3
third_largest = number2
elif number1 >= number2 and number1 < number3:
first_largest = number3
second_largest = number1
third_largest = number2
elif number1 < number2 and number1 < number3:
third_largest = number1
if number2 >= number3:
first_largest = number2
second_largest = number3
else:
first_largest = number3
second_largest = number2
elif number1 < number2 and number1 >= number3:
first_largest = number2
second_largest = number1
third_largest = number3
print('\n\nSecond largest number entered:\t', second_largest)
print('Average:\t\t\t', avg, '\n\n')
cont = input('Would you like to continue? ') |
#!/usr/local/bin/python3
# Python Challenge - 1
# http://www.pythonchallenge.com/pc/def/map.html
# Keyword: ocr
def main():
'''
Hint:
K -> M
O -> Q
E -> G
Everybody thinks twice before solving this.
'''
cipher_text = ('g fmnc wms bgblr rpylqjyrc gr zw fylb. rfyrq ufyr amknsrcp'
'q ypc dmp. bmgle gr gl zw fylb gq glcddgagclr ylb rfyr\'q '
'ufw rfgq rcvr gq qm jmle. sqgle qrpgle.kyicrpylq() gq '
'pcamkkclbcb. lmu ynnjw ml rfc spj.')
plain_text = caesar_cipher(cipher_text, 2)
print('PLAIN TEXT: {}'.format(plain_text))
# Apply cipher to /map.html (get /ocr.html)
print('NEW ADDRESS PATH')
print(caesar_cipher('/map', 2))
# Challenge 23
# chall_23 = 'va gur snpr bs jung?'
# print(caesar_cipher(chall_23, 13))
# for i in range(26):
# plain_23 = caesar_cipher(chall_23, i)
# print('i: {}'.format(i))
# print('PLAIN TEXT: {}'.format(plain_23))
# Rot13: in the face of what?
def caesar_cipher(cipher_text, n):
'''
Input: string of cipher_text, n is int for alphabet rotation
Output: string of plain text, applying simple n rotation
'''
# Convert cipher_text to lowercase
cipher_lower = cipher_text.lower()
# Create cipher key dictionary
codex = {}
base = ord('a')
for i in range(26):
# Assumes a is 0, z is 25
letter = chr(base + i)
rotated_letter = chr(((i + n) % 26) + base)
codex[letter] = rotated_letter
# Build plain_text string using the codex mapping
plain_text = ''
for c in cipher_lower:
plain_text += codex.get(c, c)
return plain_text
if __name__ == '__main__':
main()
| def main():
"""
Hint:
K -> M
O -> Q
E -> G
Everybody thinks twice before solving this.
"""
cipher_text = "g fmnc wms bgblr rpylqjyrc gr zw fylb. rfyrq ufyr amknsrcpq ypc dmp. bmgle gr gl zw fylb gq glcddgagclr ylb rfyr'q ufw rfgq rcvr gq qm jmle. sqgle qrpgle.kyicrpylq() gq pcamkkclbcb. lmu ynnjw ml rfc spj."
plain_text = caesar_cipher(cipher_text, 2)
print('PLAIN TEXT: {}'.format(plain_text))
print('NEW ADDRESS PATH')
print(caesar_cipher('/map', 2))
def caesar_cipher(cipher_text, n):
"""
Input: string of cipher_text, n is int for alphabet rotation
Output: string of plain text, applying simple n rotation
"""
cipher_lower = cipher_text.lower()
codex = {}
base = ord('a')
for i in range(26):
letter = chr(base + i)
rotated_letter = chr((i + n) % 26 + base)
codex[letter] = rotated_letter
plain_text = ''
for c in cipher_lower:
plain_text += codex.get(c, c)
return plain_text
if __name__ == '__main__':
main() |
def rotate(str, d, mag):
if (d=="L"):
return str[mag:] + str[0:mag]
elif (d=="R"):
return str[len(str)-mag:] + str[0: len(str)-mag]
def checkAnagram(str1, str2):
if(sorted(str1)==sorted(str2)):
return True
else:
return False
def subString(s, n, ans):
for i in range(n):
for leng in range(i+1,n+1):
sub = s[i:leng]
if(len(sub)==len(ans)):
if(checkAnagram(sub,ans)):
return True
return False
str = input().split(" ")
str1 = str[0]
str2 = str1
q = int(str[1])
ans = ""
d = list()
mag = list()
str3 = str[2:]
for i in range(len(str3)):
if (i%2==0):
d.append(str3[i])
else:
mag.append(int(str3[i]))
#str1 = input()
#str2 = str1
#q = int(input())
#d = list()
#mag = list()
#ans = ""
#for i in range(q):
# d.append(input())
# mag.append(int(input()))
for i in range(q):
str2 = rotate(str2,d[i],mag[i])
ans = ans + str2[0]
if(subString(str1,len(str1),ans)):
print("YES")
else:
print("NO")
| def rotate(str, d, mag):
if d == 'L':
return str[mag:] + str[0:mag]
elif d == 'R':
return str[len(str) - mag:] + str[0:len(str) - mag]
def check_anagram(str1, str2):
if sorted(str1) == sorted(str2):
return True
else:
return False
def sub_string(s, n, ans):
for i in range(n):
for leng in range(i + 1, n + 1):
sub = s[i:leng]
if len(sub) == len(ans):
if check_anagram(sub, ans):
return True
return False
str = input().split(' ')
str1 = str[0]
str2 = str1
q = int(str[1])
ans = ''
d = list()
mag = list()
str3 = str[2:]
for i in range(len(str3)):
if i % 2 == 0:
d.append(str3[i])
else:
mag.append(int(str3[i]))
for i in range(q):
str2 = rotate(str2, d[i], mag[i])
ans = ans + str2[0]
if sub_string(str1, len(str1), ans):
print('YES')
else:
print('NO') |
number = int(input("Pick a number? "))
for i in range(5):
number = number + number
print(number)
| number = int(input('Pick a number? '))
for i in range(5):
number = number + number
print(number) |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
MAXIMUM_SECRET_LENGTH = 20
MAXIMUM_CONTAINER_APP_NAME_LENGTH = 40
SHORT_POLLING_INTERVAL_SECS = 3
LONG_POLLING_INTERVAL_SECS = 10
LOG_ANALYTICS_RP = "Microsoft.OperationalInsights"
CONTAINER_APPS_RP = "Microsoft.App"
MAX_ENV_PER_LOCATION = 2
MICROSOFT_SECRET_SETTING_NAME = "microsoft-provider-authentication-secret"
FACEBOOK_SECRET_SETTING_NAME = "facebook-provider-authentication-secret"
GITHUB_SECRET_SETTING_NAME = "github-provider-authentication-secret"
GOOGLE_SECRET_SETTING_NAME = "google-provider-authentication-secret"
MSA_SECRET_SETTING_NAME = "msa-provider-authentication-secret"
TWITTER_SECRET_SETTING_NAME = "twitter-provider-authentication-secret"
APPLE_SECRET_SETTING_NAME = "apple-provider-authentication-secret"
UNAUTHENTICATED_CLIENT_ACTION = ['RedirectToLoginPage', 'AllowAnonymous', 'RejectWith401', 'RejectWith404']
FORWARD_PROXY_CONVENTION = ['NoProxy', 'Standard', 'Custom']
CHECK_CERTIFICATE_NAME_AVAILABILITY_TYPE = "Microsoft.App/managedEnvironments/certificates"
| maximum_secret_length = 20
maximum_container_app_name_length = 40
short_polling_interval_secs = 3
long_polling_interval_secs = 10
log_analytics_rp = 'Microsoft.OperationalInsights'
container_apps_rp = 'Microsoft.App'
max_env_per_location = 2
microsoft_secret_setting_name = 'microsoft-provider-authentication-secret'
facebook_secret_setting_name = 'facebook-provider-authentication-secret'
github_secret_setting_name = 'github-provider-authentication-secret'
google_secret_setting_name = 'google-provider-authentication-secret'
msa_secret_setting_name = 'msa-provider-authentication-secret'
twitter_secret_setting_name = 'twitter-provider-authentication-secret'
apple_secret_setting_name = 'apple-provider-authentication-secret'
unauthenticated_client_action = ['RedirectToLoginPage', 'AllowAnonymous', 'RejectWith401', 'RejectWith404']
forward_proxy_convention = ['NoProxy', 'Standard', 'Custom']
check_certificate_name_availability_type = 'Microsoft.App/managedEnvironments/certificates' |
# Author=====>>>Nipun Garg<<<=====
# Problem Statement - Given number of jobs and number of applicants
# And for each applicant given that wether each applicant is
# eligible to get the job or not in the form of matrix
# Return 1 if a person can get the job
def dfs(graph, applicant, visited, result,nApplicants,nJobs):
for i in range(0,nJobs):
if(graph[applicant][i]==1 and (not visited[i])):
visited[i]=1
if( result[i]<0 or dfs(graph, result[i], visited, result, nApplicants, nJobs)):
result[i]=applicant
return 1
return 0
#Return maximum people that can get the job
def bipartite(graph,nApplicants,nJobs):
result = []
for i in range(0,nJobs):
result.append(-1)
retval=0
for i in range(nApplicants):
visited = []
for j in range(nApplicants):
visited.append(0)
if(dfs(graph, i, visited, result, nApplicants, nJobs)):
retval+=1
return retval
#Main function
if __name__ == '__main__':
# Total number of applicant and total number of jobs
nApplicants = input("Enter the number of applicants : ")
nJobs = input("Enter the number of jobs : ")
graph = []
#Taking input if a user can have a job then its value for job is 1
for i in range(nApplicants):
print("Enter the status(1/0) for applicant - "+str(i+1)+" for "+str(nJobs)+" Jobs!")
temp=[]
for j in range(nJobs):
temp.append(input("For job - "+str(j+1)+" : "))
graph.append(temp)
# print(graph)
print("Maximum applicants that can have job is : "+str(bipartite(graph, nApplicants, nJobs))) | def dfs(graph, applicant, visited, result, nApplicants, nJobs):
for i in range(0, nJobs):
if graph[applicant][i] == 1 and (not visited[i]):
visited[i] = 1
if result[i] < 0 or dfs(graph, result[i], visited, result, nApplicants, nJobs):
result[i] = applicant
return 1
return 0
def bipartite(graph, nApplicants, nJobs):
result = []
for i in range(0, nJobs):
result.append(-1)
retval = 0
for i in range(nApplicants):
visited = []
for j in range(nApplicants):
visited.append(0)
if dfs(graph, i, visited, result, nApplicants, nJobs):
retval += 1
return retval
if __name__ == '__main__':
n_applicants = input('Enter the number of applicants : ')
n_jobs = input('Enter the number of jobs : ')
graph = []
for i in range(nApplicants):
print('Enter the status(1/0) for applicant - ' + str(i + 1) + ' for ' + str(nJobs) + ' Jobs!')
temp = []
for j in range(nJobs):
temp.append(input('For job - ' + str(j + 1) + ' : '))
graph.append(temp)
print('Maximum applicants that can have job is : ' + str(bipartite(graph, nApplicants, nJobs))) |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def tree2str(self, t: TreeNode) -> str:
if t is None:
return ""
if t.left is None and t.right is None:
return str(t.val)+""
if t.right is None:
return str(t.val)+"("+str(self.tree2str(t.left))+")"
return str(t.val)+"("+str(self.tree2str(t.left)
)+")"+"("+str(self.tree2str(t.right))+")"
| class Solution:
def tree2str(self, t: TreeNode) -> str:
if t is None:
return ''
if t.left is None and t.right is None:
return str(t.val) + ''
if t.right is None:
return str(t.val) + '(' + str(self.tree2str(t.left)) + ')'
return str(t.val) + '(' + str(self.tree2str(t.left)) + ')' + '(' + str(self.tree2str(t.right)) + ')' |
# get distinct characters and their count in a String
string = input("Enter String: ")
c = 0
for i in range(65, 91):
c = 0
for j in range(0, len(string)):
if(string[j] == chr(i)):
c += 1
if c > 0:
print("", chr(i), " is ", c, " times.")
c = 0
for i in range(97, 123):
c = 0
for j in range(0, len(string)):
if(string[j] == chr(i)):
c += 1
if c > 0:
print("", chr(i), " is ", c, " times.")
| string = input('Enter String: ')
c = 0
for i in range(65, 91):
c = 0
for j in range(0, len(string)):
if string[j] == chr(i):
c += 1
if c > 0:
print('', chr(i), ' is ', c, ' times.')
c = 0
for i in range(97, 123):
c = 0
for j in range(0, len(string)):
if string[j] == chr(i):
c += 1
if c > 0:
print('', chr(i), ' is ', c, ' times.') |
class SearchPath:
def __init__(self, path=None):
if path is None:
self._path = []
else:
self._path = path
def branch_off(self, label, p):
path = self._path + [(label, p)]
return SearchPath(path)
@property
def labels(self):
return [label for label, p in self._path]
@property
def likelihood(self):
if self._path:
probs = [p for label, p in self._path]
res = 1
for p in probs:
res *= p
return res
return 0
class PathBuilder:
def __init__(self, roots):
self._paths = []
for label, p in roots:
search_path = SearchPath()
search_path = search_path.branch_off(label, p)
self._paths.append(search_path)
def make_step(self, pmfs):
if len(pmfs) != len(self._paths):
raise WrongNumberOfPMFsException()
candidates = []
for i in range(len(self._paths)):
search_path = self._paths[i]
pmf = pmfs[i]
for label, p in enumerate(pmf):
candidates.append(search_path.branch_off(label, p))
self._paths = self._best_paths(candidates, limit=len(pmfs))
def _best_paths(self, paths, limit):
return sorted(paths, key=lambda c: c.likelihood, reverse=True)[:limit]
@property
def best_path(self):
best_path = self._best_paths(self._paths, limit=1)[0]
return best_path.labels
@property
def paths(self):
res = []
for search_path in self._paths:
res.append(search_path.labels)
return res
class WrongNumberOfPMFsException(Exception):
pass
class StatesKeeper:
def __init__(self, initial_state):
self._paths = {}
self._initial_state = initial_state
def store(self, path, state):
self._paths[tuple(path)] = state
def retrieve(self, path):
if path:
return self._paths[tuple(path)]
else:
return self._initial_state
class BaseBeamSearch:
def __init__(self, start_of_seq, end_of_seq, beam_size=3, max_len=150):
self._sos = start_of_seq
self._eos = end_of_seq
self._beam_size = beam_size
self._max_len = max_len
def _without_last(self, path):
return path[:-1]
def _remove_special(self, path):
path = path[1:]
if path[-1] == self._eos:
return self._without_last(path)
return path
def _split_path(self, path):
prefix = self._without_last(path)
last_one = path[-1]
return prefix, last_one
def generate_sequence(self):
y0 = self._sos
decoder_state = self.get_initial_state()
keeper = StatesKeeper(decoder_state)
builder = PathBuilder([(y0, 1.0)])
for _ in range(self._max_len):
pmfs = []
for path in builder.paths:
prefix, label = self._split_path(path)
state = keeper.retrieve(prefix)
next_pmf, next_state = self.decode_next(label, state)
keeper.store(path, next_state)
pmfs.append(next_pmf)
builder.make_step(pmfs)
if builder.best_path[-1] == self._eos:
break
return self._remove_special(builder.best_path)
def get_initial_state(self):
raise NotImplementedError
def decode_next(self, prev_y, prev_state):
raise NotImplementedError
class BeamCandidate:
def __init__(self, full_sequence, character, likelihood, state):
self.full_sequence = full_sequence
self.character = character
self.likelihood = likelihood
self.state = state
def branch_off(self, character, likelihood, state):
seq = self.full_sequence + character
return BeamCandidate(seq, character, likelihood, state)
# todo: consider better implementation for StatesKeeper
| class Searchpath:
def __init__(self, path=None):
if path is None:
self._path = []
else:
self._path = path
def branch_off(self, label, p):
path = self._path + [(label, p)]
return search_path(path)
@property
def labels(self):
return [label for (label, p) in self._path]
@property
def likelihood(self):
if self._path:
probs = [p for (label, p) in self._path]
res = 1
for p in probs:
res *= p
return res
return 0
class Pathbuilder:
def __init__(self, roots):
self._paths = []
for (label, p) in roots:
search_path = search_path()
search_path = search_path.branch_off(label, p)
self._paths.append(search_path)
def make_step(self, pmfs):
if len(pmfs) != len(self._paths):
raise wrong_number_of_pm_fs_exception()
candidates = []
for i in range(len(self._paths)):
search_path = self._paths[i]
pmf = pmfs[i]
for (label, p) in enumerate(pmf):
candidates.append(search_path.branch_off(label, p))
self._paths = self._best_paths(candidates, limit=len(pmfs))
def _best_paths(self, paths, limit):
return sorted(paths, key=lambda c: c.likelihood, reverse=True)[:limit]
@property
def best_path(self):
best_path = self._best_paths(self._paths, limit=1)[0]
return best_path.labels
@property
def paths(self):
res = []
for search_path in self._paths:
res.append(search_path.labels)
return res
class Wrongnumberofpmfsexception(Exception):
pass
class Stateskeeper:
def __init__(self, initial_state):
self._paths = {}
self._initial_state = initial_state
def store(self, path, state):
self._paths[tuple(path)] = state
def retrieve(self, path):
if path:
return self._paths[tuple(path)]
else:
return self._initial_state
class Basebeamsearch:
def __init__(self, start_of_seq, end_of_seq, beam_size=3, max_len=150):
self._sos = start_of_seq
self._eos = end_of_seq
self._beam_size = beam_size
self._max_len = max_len
def _without_last(self, path):
return path[:-1]
def _remove_special(self, path):
path = path[1:]
if path[-1] == self._eos:
return self._without_last(path)
return path
def _split_path(self, path):
prefix = self._without_last(path)
last_one = path[-1]
return (prefix, last_one)
def generate_sequence(self):
y0 = self._sos
decoder_state = self.get_initial_state()
keeper = states_keeper(decoder_state)
builder = path_builder([(y0, 1.0)])
for _ in range(self._max_len):
pmfs = []
for path in builder.paths:
(prefix, label) = self._split_path(path)
state = keeper.retrieve(prefix)
(next_pmf, next_state) = self.decode_next(label, state)
keeper.store(path, next_state)
pmfs.append(next_pmf)
builder.make_step(pmfs)
if builder.best_path[-1] == self._eos:
break
return self._remove_special(builder.best_path)
def get_initial_state(self):
raise NotImplementedError
def decode_next(self, prev_y, prev_state):
raise NotImplementedError
class Beamcandidate:
def __init__(self, full_sequence, character, likelihood, state):
self.full_sequence = full_sequence
self.character = character
self.likelihood = likelihood
self.state = state
def branch_off(self, character, likelihood, state):
seq = self.full_sequence + character
return beam_candidate(seq, character, likelihood, state) |
class Bot:
'''
state - state of the game
returns a move
'''
def move(self, state, symbol):
raise NotImplementedError('Abstractaaa')
def get_name(self):
raise NotImplementedError('Abstractaaa')
| class Bot:
"""
state - state of the game
returns a move
"""
def move(self, state, symbol):
raise not_implemented_error('Abstractaaa')
def get_name(self):
raise not_implemented_error('Abstractaaa') |
__author__ = 'shukkkur'
'''
https://codeforces.com/problemset/problem/581/A
A. Vasya the Hipster
'''
red, blue = map(int, input().split())
total = red + blue
a = min(red, blue)
total -= 2 * a
b = total // 2
print(a, b)
| __author__ = 'shukkkur'
'\nhttps://codeforces.com/problemset/problem/581/A\nA. Vasya the Hipster\n'
(red, blue) = map(int, input().split())
total = red + blue
a = min(red, blue)
total -= 2 * a
b = total // 2
print(a, b) |
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def dnsmasq_dependencies():
http_archive(
name = "dnsmasq",
urls = ["http://www.thekelleys.org.uk/dnsmasq/dnsmasq-2.78.tar.xz"],
sha256 = "89949f438c74b0c7543f06689c319484bd126cc4b1f8c745c742ab397681252b",
build_file = "//dnsmasq:BUILD.import",
)
| load('@bazel_tools//tools/build_defs/repo:http.bzl', 'http_archive')
def dnsmasq_dependencies():
http_archive(name='dnsmasq', urls=['http://www.thekelleys.org.uk/dnsmasq/dnsmasq-2.78.tar.xz'], sha256='89949f438c74b0c7543f06689c319484bd126cc4b1f8c745c742ab397681252b', build_file='//dnsmasq:BUILD.import') |
file = open("input")
lines = file.readlines()
pattern_len = len(lines[0])
def part1(lines, right, down):
count = 0
pattern_len = len(lines[0])
x = 0
y = 0
while y < len(lines) - down:
x += right
y += down
if lines[y][x % (pattern_len - 1)] == "#":
count += 1
return count
def part2(lines):
return part1(lines, 1, 1) * part1(lines, 3, 1) * part1(lines, 5, 1) * part1(lines, 7, 1) * part1(lines, 1, 2)
print("Part 1: " + str(part1(lines, 3, 1)))
print("Part 2: " + str(part2(lines)))
| file = open('input')
lines = file.readlines()
pattern_len = len(lines[0])
def part1(lines, right, down):
count = 0
pattern_len = len(lines[0])
x = 0
y = 0
while y < len(lines) - down:
x += right
y += down
if lines[y][x % (pattern_len - 1)] == '#':
count += 1
return count
def part2(lines):
return part1(lines, 1, 1) * part1(lines, 3, 1) * part1(lines, 5, 1) * part1(lines, 7, 1) * part1(lines, 1, 2)
print('Part 1: ' + str(part1(lines, 3, 1)))
print('Part 2: ' + str(part2(lines))) |
# input the length of array
n = int(input())
# input the elements of array
ar = [int(x) for x in input().strip().split(' ')]
c = [0]*100
for a in ar :
c[a] += 1
s = ''
# print the sorted list as a single line of space-separated elements
for x in range(0,100) :
for i in range(0,c[x]) :
s += ' ' + str(x)
print(s[1:])
| n = int(input())
ar = [int(x) for x in input().strip().split(' ')]
c = [0] * 100
for a in ar:
c[a] += 1
s = ''
for x in range(0, 100):
for i in range(0, c[x]):
s += ' ' + str(x)
print(s[1:]) |
full_dict = {
'daterecieved': 'entry daterecieved',
'poploadslip' : 'entry poploadslip',
'count' : 'entry 1' ,
'tm9_ticket' : 'entry tm9_ticket',
'disposition_fmanum' : 'entry disposition_fmanum',
'owner' : 'entry ownerName',
'haulingcontractor' : 'entry hauled by',
'numpcsreceived' : 'entry num of pieces',
'blocknum' : 'entry Block Number'
}
DB_list = ['daterecieved',
'poploadslip',
'count',
'sampleloads' ,
'tm9_ticket',
'owner' ,
'disposition_fmanum' ,
'blocknum',
'haulingcontractor',
]
indxSample = [0,1,2,4,6]
keys = [DB_list[i] for i in indxSample]
A ={x:full_dict[x] for x in keys}
print(A)
| full_dict = {'daterecieved': 'entry daterecieved', 'poploadslip': 'entry poploadslip', 'count': 'entry 1', 'tm9_ticket': 'entry tm9_ticket', 'disposition_fmanum': 'entry disposition_fmanum', 'owner': 'entry ownerName', 'haulingcontractor': 'entry hauled by', 'numpcsreceived': 'entry num of pieces', 'blocknum': 'entry Block Number'}
db_list = ['daterecieved', 'poploadslip', 'count', 'sampleloads', 'tm9_ticket', 'owner', 'disposition_fmanum', 'blocknum', 'haulingcontractor']
indx_sample = [0, 1, 2, 4, 6]
keys = [DB_list[i] for i in indxSample]
a = {x: full_dict[x] for x in keys}
print(A) |
# HEAD
# Python Functions - *args
# DESCRIPTION
# Describes
# capturing all arguments as *args (tuple)
#
# RESOURCES
#
# Arguments (any number during invocation) can also be
# caught as a sequence of arguments - tuple using *args
# Order does matter for unnamed arguments list and makes for
# index of argument in list even with *args
# # # Note the * above when passing as argument
# sequence to function
# Can be named args or any name; it does not matter
def printUnnamedArgs(*args):
# Note the missing * during access
print("3. printUnnamedArgs", args)
for x in enumerate(args):
print(x)
# Can pass any number of arguments below now
# Follows order of arguments
# Argument's index is the order of arguments passed
printUnnamedArgs([1, 2, 3], [4, 5, 6])
| def print_unnamed_args(*args):
print('3. printUnnamedArgs', args)
for x in enumerate(args):
print(x)
print_unnamed_args([1, 2, 3], [4, 5, 6]) |
class Luhn:
def __init__(self, card_num: str):
self._reversed_card_num = card_num.replace(' ', '')[::-1]
self._even_digits = self._reversed_card_num[1::2]
self._odd_digits = self._reversed_card_num[::2]
def valid(self) -> bool:
if str.isnumeric(self._reversed_card_num) and len(self._reversed_card_num) > 1:
return self._sum_card() % 10 == 0
else:
return False
def _sum_card(self) -> int:
even_digits_sum = 0
for digit in self._even_digits:
x = int(digit) * 2
even_digits_sum += x if x <= 9 else x - 9
return even_digits_sum + sum([int(x) for x in self._odd_digits])
| class Luhn:
def __init__(self, card_num: str):
self._reversed_card_num = card_num.replace(' ', '')[::-1]
self._even_digits = self._reversed_card_num[1::2]
self._odd_digits = self._reversed_card_num[::2]
def valid(self) -> bool:
if str.isnumeric(self._reversed_card_num) and len(self._reversed_card_num) > 1:
return self._sum_card() % 10 == 0
else:
return False
def _sum_card(self) -> int:
even_digits_sum = 0
for digit in self._even_digits:
x = int(digit) * 2
even_digits_sum += x if x <= 9 else x - 9
return even_digits_sum + sum([int(x) for x in self._odd_digits]) |
# Copyright 2019-present, GraphQL Foundation
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
def title(s):
'''Capitalize the first character of s.'''
return s[0].capitalize() + s[1:]
def camel(s):
'''Lowercase the first character of s.'''
return s[0].lower() + s[1:]
def snake(s):
'''Convert from title or camelCase to snake_case.'''
if len(s) < 2:
return s.lower()
out = s[0].lower()
for c in s[1:]:
if c.isupper():
out += '_'
c = c.lower()
out += c
return out
changes = {
'OperationDefinition': 'Operation',
'IntValue': 'Int',
'FloatValue': 'Float',
'StringValue': 'String',
'BooleanValue': 'Boolean',
'VariableValue': 'Variable',
'TypeCondition': 'NamedType',
'EnumValue': 'Enum',
'ListValue': 'List',
'ObjectValue': 'InputObject'
}
def short(s):
'''Make some substitution to get work default Tarantool cartridge graphQL query executor.'''
for k, v in list(changes.items()):
if s == k:
s = v
return s[0].lower() + s[1:]
| def title(s):
"""Capitalize the first character of s."""
return s[0].capitalize() + s[1:]
def camel(s):
"""Lowercase the first character of s."""
return s[0].lower() + s[1:]
def snake(s):
"""Convert from title or camelCase to snake_case."""
if len(s) < 2:
return s.lower()
out = s[0].lower()
for c in s[1:]:
if c.isupper():
out += '_'
c = c.lower()
out += c
return out
changes = {'OperationDefinition': 'Operation', 'IntValue': 'Int', 'FloatValue': 'Float', 'StringValue': 'String', 'BooleanValue': 'Boolean', 'VariableValue': 'Variable', 'TypeCondition': 'NamedType', 'EnumValue': 'Enum', 'ListValue': 'List', 'ObjectValue': 'InputObject'}
def short(s):
"""Make some substitution to get work default Tarantool cartridge graphQL query executor."""
for (k, v) in list(changes.items()):
if s == k:
s = v
return s[0].lower() + s[1:] |
class Solution:
def binary_to_decimal(self, n):
return int(n, 2)
def grayCode(self, A):
num_till_now = [0, 1]
if A == 1:
return num_till_now
results = []
for i in range(1, A):
rev = num_till_now.copy()
rev.reverse()
num_till_now = num_till_now + rev
lent = len(num_till_now)
for j in range(len(num_till_now)):
if j >= lent//2:
num_till_now[j] = "1" + str(num_till_now[j])
else:
num_till_now[j] = "0" + str(num_till_now[j])
for i in num_till_now:
results.append(self.binary_to_decimal(i))
return results
number = 16
s = Solution()
ss = s.grayCode(number)
print(ss)
| class Solution:
def binary_to_decimal(self, n):
return int(n, 2)
def gray_code(self, A):
num_till_now = [0, 1]
if A == 1:
return num_till_now
results = []
for i in range(1, A):
rev = num_till_now.copy()
rev.reverse()
num_till_now = num_till_now + rev
lent = len(num_till_now)
for j in range(len(num_till_now)):
if j >= lent // 2:
num_till_now[j] = '1' + str(num_till_now[j])
else:
num_till_now[j] = '0' + str(num_till_now[j])
for i in num_till_now:
results.append(self.binary_to_decimal(i))
return results
number = 16
s = solution()
ss = s.grayCode(number)
print(ss) |
def include_in_html(content_to_include, input_includename, html_filepath):
with open(html_filepath, "r") as f:
line_list = f.readlines()
res = []
includename = None
initial_spaces = 0
for line in line_list:
line = line.strip("\n")
if line.strip(" ")[:14] == "<!-- #include " or line.strip(" ")[:13] == "<!--#include " :
if includename != None:
print("Error, includename != None in new '<!-- #include ' section.")
res.append(line)
initial_spaces = line.split("<!-- #include")[0].count(" ")
includename = line.split("#include ")[-1]
includename = includename.split("-->")[0].strip(" ")
if includename != input_includename:
includename = None
continue
elif line.strip(" ")[:9] == "<!-- #end":
if includename == input_includename:
lines_to_append = content_to_include.split("\n")
for el in lines_to_append:
if el == "":
continue
res.append(" "*(2+initial_spaces) + el)
#res.append(content_to_include)
includename = None
if includename == None:
res.append(line)
with open(html_filepath, "w") as f:
print("\n".join(res), file=f, end="") | def include_in_html(content_to_include, input_includename, html_filepath):
with open(html_filepath, 'r') as f:
line_list = f.readlines()
res = []
includename = None
initial_spaces = 0
for line in line_list:
line = line.strip('\n')
if line.strip(' ')[:14] == '<!-- #include ' or line.strip(' ')[:13] == '<!--#include ':
if includename != None:
print("Error, includename != None in new '<!-- #include ' section.")
res.append(line)
initial_spaces = line.split('<!-- #include')[0].count(' ')
includename = line.split('#include ')[-1]
includename = includename.split('-->')[0].strip(' ')
if includename != input_includename:
includename = None
continue
elif line.strip(' ')[:9] == '<!-- #end':
if includename == input_includename:
lines_to_append = content_to_include.split('\n')
for el in lines_to_append:
if el == '':
continue
res.append(' ' * (2 + initial_spaces) + el)
includename = None
if includename == None:
res.append(line)
with open(html_filepath, 'w') as f:
print('\n'.join(res), file=f, end='') |
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"Defaults for rules_typescript repository not meant to be used downstream"
load(
"@build_bazel_rules_typescript//:defs.bzl",
_karma_web_test = "karma_web_test",
_karma_web_test_suite = "karma_web_test_suite",
_ts_library = "ts_library",
_ts_web_test = "ts_web_test",
_ts_web_test_suite = "ts_web_test_suite",
)
# We can't use the defaults for ts_library compiler and ts_web_test_suite karma
# internally because the defaults are .js dependencies on the npm packages that are
# published and internally we are building the things themselves to publish to npm
INTERNAL_TS_LIBRARY_COMPILER = "@build_bazel_rules_typescript//internal:tsc_wrapped_bin"
INTERNAL_KARMA_BIN = "@build_bazel_rules_typescript//internal/karma:karma_bin"
def karma_web_test(karma = INTERNAL_KARMA_BIN, **kwargs):
_karma_web_test(karma = karma, **kwargs)
def karma_web_test_suite(karma = INTERNAL_KARMA_BIN, **kwargs):
_karma_web_test_suite(karma = karma, **kwargs)
def ts_library(compiler = INTERNAL_TS_LIBRARY_COMPILER, **kwargs):
_ts_library(compiler = compiler, **kwargs)
def ts_web_test(karma = INTERNAL_KARMA_BIN, **kwargs):
_ts_web_test(karma = karma, **kwargs)
def ts_web_test_suite(karma = INTERNAL_KARMA_BIN, **kwargs):
_ts_web_test_suite(karma = karma, **kwargs)
| """Defaults for rules_typescript repository not meant to be used downstream"""
load('@build_bazel_rules_typescript//:defs.bzl', _karma_web_test='karma_web_test', _karma_web_test_suite='karma_web_test_suite', _ts_library='ts_library', _ts_web_test='ts_web_test', _ts_web_test_suite='ts_web_test_suite')
internal_ts_library_compiler = '@build_bazel_rules_typescript//internal:tsc_wrapped_bin'
internal_karma_bin = '@build_bazel_rules_typescript//internal/karma:karma_bin'
def karma_web_test(karma=INTERNAL_KARMA_BIN, **kwargs):
_karma_web_test(karma=karma, **kwargs)
def karma_web_test_suite(karma=INTERNAL_KARMA_BIN, **kwargs):
_karma_web_test_suite(karma=karma, **kwargs)
def ts_library(compiler=INTERNAL_TS_LIBRARY_COMPILER, **kwargs):
_ts_library(compiler=compiler, **kwargs)
def ts_web_test(karma=INTERNAL_KARMA_BIN, **kwargs):
_ts_web_test(karma=karma, **kwargs)
def ts_web_test_suite(karma=INTERNAL_KARMA_BIN, **kwargs):
_ts_web_test_suite(karma=karma, **kwargs) |
class Simple(object):
def __init__(self, x):
self.x = x
self.y = 6
def get_x(self):
return self.x
class WithCollection(object):
def __init__(self):
self.l = list()
self.d = dict()
def get_l(self):
return self.l | class Simple(object):
def __init__(self, x):
self.x = x
self.y = 6
def get_x(self):
return self.x
class Withcollection(object):
def __init__(self):
self.l = list()
self.d = dict()
def get_l(self):
return self.l |
s = open('input.txt','r').read()
s = [k for k in s.split("\n")]
aller = {}
count = {}
for line in s:
allergens = line.split("contains ")[1].split(", ")
allergens[-1] = allergens[-1][:-1]
ing = line.split(" (")[0].split(" ")
for i in ing:
count[i] = 1 if i not in count else count[i] + 1
for allergen in allergens:
if allergen not in aller:
aller[allergen] = set(ing)
else:
aller[allergen] = aller[allergen].intersection(set(ing))
used = set()
while True:
found = False
for allergen in aller:
aller[allergen] = aller[allergen].difference(used)
if len(aller[allergen]) == 1:
used.add(list(aller[allergen])[0])
found = True
break
if not found:break
ans = 0
for x in count:
if x not in used:
ans += count[x]
#print(x,count[x])
print(ans)
| s = open('input.txt', 'r').read()
s = [k for k in s.split('\n')]
aller = {}
count = {}
for line in s:
allergens = line.split('contains ')[1].split(', ')
allergens[-1] = allergens[-1][:-1]
ing = line.split(' (')[0].split(' ')
for i in ing:
count[i] = 1 if i not in count else count[i] + 1
for allergen in allergens:
if allergen not in aller:
aller[allergen] = set(ing)
else:
aller[allergen] = aller[allergen].intersection(set(ing))
used = set()
while True:
found = False
for allergen in aller:
aller[allergen] = aller[allergen].difference(used)
if len(aller[allergen]) == 1:
used.add(list(aller[allergen])[0])
found = True
break
if not found:
break
ans = 0
for x in count:
if x not in used:
ans += count[x]
print(ans) |
t = int(input())
for i in range(t):
n = input()
rev_n = int(n[::-1])
print(rev_n) | t = int(input())
for i in range(t):
n = input()
rev_n = int(n[::-1])
print(rev_n) |
courses={}
while True:
command=input()
if command!="end":
command=command.split(" : ")
doesCourseExist=False
for j in courses:
if j==command[0]:
doesCourseExist=True
break
if doesCourseExist==False:
courses[command[0]]=[command[1]]
else:
courses[command[0]].append(command[1])
else:
for j in courses:
print(f"{j}: {len(courses[j])}")
for k in range(0,len(courses[j])):
print(f"-- {courses[j][k]}")
break
| courses = {}
while True:
command = input()
if command != 'end':
command = command.split(' : ')
does_course_exist = False
for j in courses:
if j == command[0]:
does_course_exist = True
break
if doesCourseExist == False:
courses[command[0]] = [command[1]]
else:
courses[command[0]].append(command[1])
else:
for j in courses:
print(f'{j}: {len(courses[j])}')
for k in range(0, len(courses[j])):
print(f'-- {courses[j][k]}')
break |
def main():
object_a_mass = float(input("Object A mass: "))
object_b_mass = float(input("Object B mass: "))
distance = float(input("Distance between both: "))
G = 6.67408 * (10**11)
print(G*(object_a_mass*object_b_mass)/ (distance ** 2))
if __name__ == '__main__':
main()
| def main():
object_a_mass = float(input('Object A mass: '))
object_b_mass = float(input('Object B mass: '))
distance = float(input('Distance between both: '))
g = 6.67408 * 10 ** 11
print(G * (object_a_mass * object_b_mass) / distance ** 2)
if __name__ == '__main__':
main() |
PB_PACKAGE = __package__
NODE_TAG = 'p_baker_node'
MATERIAL_TAG = 'p_baker_material'
MATERIAL_TAG_VERTEX = 'p_baker_material_vertex'
NODE_INPUTS = [
'Color',
'Subsurface',
'Subsurface Color',
'Metallic',
'Specular',
'Specular Tint',
'Roughness',
'Anisotropic',
'Anisotropic Rotation',
'Sheen',
'Sheen Tint',
'Clearcoat',
'Clearcoat Roughness',
'IOR',
'Transmission',
'Transmission Roughness',
'Emission',
'Alpha',
'Normal',
'Clearcoat Normal',
'Tangent'
]
# for new material to have images nicely sorted
NODE_INPUTS_SORTED = [
'Color',
'Ambient Occlusion',
'Subsurface',
'Subsurface Radius',
'Subsurface Color',
'Metallic',
'Specular',
'Specular Tint',
'Roughness',
'Glossiness',
'Anisotropic',
'Anisotropic Rotation',
'Sheen',
'Sheen Tint',
'Clearcoat',
'Clearcoat Roughness',
'IOR',
'Transmission',
'Transmission Roughness',
'Emission',
'Alpha',
'Normal',
'Clearcoat Normal',
'Tangent',
'Bump',
'Displacement',
'Diffuse',
'Wireframe',
'Material ID'
]
NORMAL_INPUTS = {'Normal', 'Clearcoat Normal', 'Tangent'}
ALPHA_NODES = {
# "Alpha":'BSDF_TRANSPARENT',
"Translucent_Alpha": 'BSDF_TRANSLUCENT',
"Glass_Alpha": 'BSDF_GLASS'
}
BSDF_NODES = {
'BSDF_PRINCIPLED',
'BSDF_DIFFUSE',
'BSDF_TOON',
'BSDF_VELVET',
'BSDF_GLOSSY',
'BSDF_TRANSPARENT',
'BSDF_TRANSLUCENT',
'BSDF_GLASS'
}
IMAGE_FILE_FORMAT_ENDINGS = {
"BMP": "bmp",
"PNG": "png",
"JPEG": "jpg",
"TIFF": "tif",
"TARGA": "tga",
"OPEN_EXR": "exr",
}
# signs not allowed in file names or paths
NOT_ALLOWED_SIGNS = ['\\', '/', ':', '*', '?', '"', '<', '>', '|']
| pb_package = __package__
node_tag = 'p_baker_node'
material_tag = 'p_baker_material'
material_tag_vertex = 'p_baker_material_vertex'
node_inputs = ['Color', 'Subsurface', 'Subsurface Color', 'Metallic', 'Specular', 'Specular Tint', 'Roughness', 'Anisotropic', 'Anisotropic Rotation', 'Sheen', 'Sheen Tint', 'Clearcoat', 'Clearcoat Roughness', 'IOR', 'Transmission', 'Transmission Roughness', 'Emission', 'Alpha', 'Normal', 'Clearcoat Normal', 'Tangent']
node_inputs_sorted = ['Color', 'Ambient Occlusion', 'Subsurface', 'Subsurface Radius', 'Subsurface Color', 'Metallic', 'Specular', 'Specular Tint', 'Roughness', 'Glossiness', 'Anisotropic', 'Anisotropic Rotation', 'Sheen', 'Sheen Tint', 'Clearcoat', 'Clearcoat Roughness', 'IOR', 'Transmission', 'Transmission Roughness', 'Emission', 'Alpha', 'Normal', 'Clearcoat Normal', 'Tangent', 'Bump', 'Displacement', 'Diffuse', 'Wireframe', 'Material ID']
normal_inputs = {'Normal', 'Clearcoat Normal', 'Tangent'}
alpha_nodes = {'Translucent_Alpha': 'BSDF_TRANSLUCENT', 'Glass_Alpha': 'BSDF_GLASS'}
bsdf_nodes = {'BSDF_PRINCIPLED', 'BSDF_DIFFUSE', 'BSDF_TOON', 'BSDF_VELVET', 'BSDF_GLOSSY', 'BSDF_TRANSPARENT', 'BSDF_TRANSLUCENT', 'BSDF_GLASS'}
image_file_format_endings = {'BMP': 'bmp', 'PNG': 'png', 'JPEG': 'jpg', 'TIFF': 'tif', 'TARGA': 'tga', 'OPEN_EXR': 'exr'}
not_allowed_signs = ['\\', '/', ':', '*', '?', '"', '<', '>', '|'] |
def nrange(start, stop, step=1):
while start < stop:
yield start
start += step
@profile
def ncall():
for i in nrange(1,1000000):
pass
if __name__ == "__main__":
ncall()
| def nrange(start, stop, step=1):
while start < stop:
yield start
start += step
@profile
def ncall():
for i in nrange(1, 1000000):
pass
if __name__ == '__main__':
ncall() |
string = "abcdefgabc"
string_list = []
for letter in string:
string_list.append(letter)
print(string_list)
string_list_no_duplicate = set(string_list)
string_list_no_duplicate = list(string_list_no_duplicate)
string_list_no_duplicate.sort()
print(string_list_no_duplicate)
for letters in string_list_no_duplicate:
string_count = string_list.count(letters)
print(f'{letters}, {string_count}')
# Suggested Solution
# dict = {}
# for s in string:
# dict[s] = dict.get(s,0)+1
#
# print('\n'.join.[f'{k}, {v}' for k, v in dict.items()])
#
| string = 'abcdefgabc'
string_list = []
for letter in string:
string_list.append(letter)
print(string_list)
string_list_no_duplicate = set(string_list)
string_list_no_duplicate = list(string_list_no_duplicate)
string_list_no_duplicate.sort()
print(string_list_no_duplicate)
for letters in string_list_no_duplicate:
string_count = string_list.count(letters)
print(f'{letters}, {string_count}') |
def load(task_id, file_id, cmds):
global responses
code = reverse_upload(task_id, file_id)
name = cmds
if agent.get_Encryption_key() == "":
dynfs[name] = code
else:
dynfs[name] = encrypt_code(code)
response = {
'task_id': task_id,
"user_output": "Module successfully added",
'commands': [
{
"action": "add",
"cmd": name
}
],
'completed': True
}
responses.append(response)
print("\t- Load Done")
return | def load(task_id, file_id, cmds):
global responses
code = reverse_upload(task_id, file_id)
name = cmds
if agent.get_Encryption_key() == '':
dynfs[name] = code
else:
dynfs[name] = encrypt_code(code)
response = {'task_id': task_id, 'user_output': 'Module successfully added', 'commands': [{'action': 'add', 'cmd': name}], 'completed': True}
responses.append(response)
print('\t- Load Done')
return |
for _ in range(int(input())):
n = int(input())
r = [int(i) for i in input().split()]
o = max(r)
if r.count(o)==len(r):
print(-1)
continue
kq = -1
for i in range(1,len(r)):
if r[i]==o and (r[i]>r[i-1]):
kq = i+1
for i in range(len(r)-1):
if r[i]==o and(r[i]>r[i+1]):
kq = i+1
print(kq)
| for _ in range(int(input())):
n = int(input())
r = [int(i) for i in input().split()]
o = max(r)
if r.count(o) == len(r):
print(-1)
continue
kq = -1
for i in range(1, len(r)):
if r[i] == o and r[i] > r[i - 1]:
kq = i + 1
for i in range(len(r) - 1):
if r[i] == o and r[i] > r[i + 1]:
kq = i + 1
print(kq) |
WIDTH = 128
HEIGHT = 128
# Must be more than ALIEN_SIZE, used to pad alien rows and columns
ALIEN_BLOCK_SIZE = 8
# Alien constants are global as their spacing is used to separate them
ALIENS_PER_ROW = int(WIDTH / ALIEN_BLOCK_SIZE) - 6
ALIEN_ROWS = int(HEIGHT / (2 * ALIEN_BLOCK_SIZE))
# How often to move the aliens intially, how much to step the alien time down with each shift
ALIEN_START_TIME = 4
ALIEN_TIME_STEP = 0.2
ALIEN_MINIMUM_TIME = 1
# How likely an alien is to fire in a time step
ALIEN_START_FIRE_PROBABILITY = 0.01
ALIEN_FIRE_PROBABILITY_STEP = 0.005
ALIEN_MAXIMUM_FIRE_PROBABILITY = 0.03
# Bunker constants
NUMBER_OF_BUNKERS = 4
BUNKER_WIDTH = 8
BUNKER_HEIGHT = 8
BUNKER_MAP = [
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 0, 0, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 1],
] | width = 128
height = 128
alien_block_size = 8
aliens_per_row = int(WIDTH / ALIEN_BLOCK_SIZE) - 6
alien_rows = int(HEIGHT / (2 * ALIEN_BLOCK_SIZE))
alien_start_time = 4
alien_time_step = 0.2
alien_minimum_time = 1
alien_start_fire_probability = 0.01
alien_fire_probability_step = 0.005
alien_maximum_fire_probability = 0.03
number_of_bunkers = 4
bunker_width = 8
bunker_height = 8
bunker_map = [[1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 0, 0, 1, 1, 1], [1, 1, 0, 0, 0, 0, 1, 1], [1, 1, 0, 0, 0, 0, 1, 1], [1, 1, 0, 0, 0, 0, 1, 1], [1, 1, 0, 0, 0, 0, 1, 1]] |
fp = open('abcd.txt', 'r')
line_offset = []
offset = 0
for line in fp:
line_offset.append(offset)
offset += len(line)
print(line_offset)
for each in line_offset:
fp.seek(each)
print(fp.readline()[:-1])
| fp = open('abcd.txt', 'r')
line_offset = []
offset = 0
for line in fp:
line_offset.append(offset)
offset += len(line)
print(line_offset)
for each in line_offset:
fp.seek(each)
print(fp.readline()[:-1]) |
# ----------------------------------------------------------------------------
# Copyright 2019-2022 Diligent Graphics LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# In no event and under no legal theory, whether in tort (including negligence),
# contract, or otherwise, unless required by applicable law (such as deliberate
# and grossly negligent acts) or agreed to in writing, shall any Contributor be
# liable for any damages, including any direct, indirect, special, incidental,
# or consequential damages of any character arising as a result of this License or
# out of the use or inability to use the software (including but not limited to damages
# for loss of goodwill, work stoppage, computer failure or malfunction, or any and
# all other commercial damages or losses), even if such Contributor has been advised
# of the possibility of such damages.
# ----------------------------------------------------------------------------
CXX_REGISTERED_STRUCT = {
"Version",
"RenderTargetBlendDesc",
"BlendStateDesc",
"StencilOpDesc",
"DepthStencilStateDesc",
"RasterizerStateDesc",
"InputLayoutDesc",
"LayoutElement",
"SampleDesc",
"ShaderResourceVariableDesc",
"PipelineResourceDesc",
"PipelineResourceSignatureDesc",
"SamplerDesc",
"ImmutableSamplerDesc",
"PipelineResourceLayoutDesc",
"PipelineStateDesc",
"GraphicsPipelineDesc",
"RayTracingPipelineDesc",
"TilePipelineDesc",
"RenderPassAttachmentDesc",
"AttachmentReference",
"ShadingRateAttachment",
"SubpassDesc",
"SubpassDependencyDesc",
"RenderPassDesc",
"ShaderDesc",
"ShaderMacro",
"ShaderResourceDesc",
"ShaderCreateInfo",
"RenderDeviceInfo",
"GraphicsAdapterInfo",
"DeviceFeatures",
"AdapterMemoryInfo",
"RayTracingProperties",
"WaveOpProperties",
"BufferProperties",
"TextureProperties",
"SamplerProperties",
"MeshShaderProperties",
"ShadingRateProperties",
"ComputeShaderProperties",
"DrawCommandProperties",
"SparseResourceProperties",
"ShadingRateMode",
"CommandQueueInfo",
"NDCAttribs",
"SerializationDeviceD3D11Info",
"SerializationDeviceD3D12Info",
"SerializationDeviceVkInfo",
"SerializationDeviceMtlInfo",
"SerializationDeviceCreateInfo",
}
CXX_REGISTERD_BASE_STRUCT = {
"DeviceObjectAttribs" : {"name": "Name", 'type': "const char *", "meta": "string"}
}
CXX_REGISTERED_ENUM = {
"BLEND_FACTOR",
"BLEND_OPERATION",
"COLOR_MASK",
"LOGIC_OPERATION",
"COLOR_MASK",
"STENCIL_OP",
"COMPARISON_FUNCTION",
"FILL_MODE",
"CULL_MODE",
"INPUT_ELEMENT_FREQUENCY",
"VALUE_TYPE",
"TEXTURE_FORMAT",
"PRIMITIVE_TOPOLOGY",
"RESOURCE_STATE",
"ACCESS_FLAGS",
"ATTACHMENT_LOAD_OP",
"ATTACHMENT_STORE_OP",
"PIPELINE_TYPE",
"PIPELINE_STAGE_FLAGS",
"PIPELINE_SHADING_RATE_FLAGS",
"PIPELINE_RESOURCE_FLAGS",
"PSO_CREATE_FLAGS",
"SAMPLER_FLAGS",
"FILTER_TYPE",
"TEXTURE_ADDRESS_MODE",
"SHADER_TYPE",
"SHADER_SOURCE_LANGUAGE",
"SHADER_COMPILER",
"SHADER_RESOURCE_TYPE",
"SHADER_RESOURCE_VARIABLE_TYPE",
"SHADER_RESOURCE_VARIABLE_TYPE_FLAGS",
"SHADER_VARIABLE_FLAGS",
"ADAPTER_TYPE",
"ADAPTER_VENDOR",
"BIND_FLAGS",
"CPU_ACCESS_FLAGS",
"WAVE_FEATURE",
"RAY_TRACING_CAP_FLAGS",
"COMMAND_QUEUE_TYPE",
"SPARSE_RESOURCE_CAP_FLAGS",
"DRAW_COMMAND_CAP_FLAGS",
"SHADING_RATE_CAP_FLAGS",
"SHADING_RATE_COMBINER",
"SHADING_RATE_TEXTURE_ACCESS",
"SHADING_RATE_FORMAT",
"RENDER_DEVICE_TYPE",
"DEVICE_FEATURE_STATE",
"SHADING_RATE",
"SAMPLE_COUNT"
}
CXX_SUFFIX_FILE = "Parser"
CXX_EXTENSION_FILE = "hpp"
CXX_LICENCE = '''/*
* Copyright 2019-2022 Diligent Graphics LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* In no event and under no legal theory, whether in tort (including negligence),
* contract, or otherwise, unless required by applicable law (such as deliberate
* and grossly negligent acts) or agreed to in writing, shall any Contributor be
* liable for any damages, including any direct, indirect, special, incidental,
* or consequential damages of any character arising as a result of this License or
* out of the use or inability to use the software (including but not limited to damages
* for loss of goodwill, work stoppage, computer failure or malfunction, or any and
* all other commercial damages or losses), even if such Contributor has been advised
* of the possibility of such damages.
*/
'''
| cxx_registered_struct = {'Version', 'RenderTargetBlendDesc', 'BlendStateDesc', 'StencilOpDesc', 'DepthStencilStateDesc', 'RasterizerStateDesc', 'InputLayoutDesc', 'LayoutElement', 'SampleDesc', 'ShaderResourceVariableDesc', 'PipelineResourceDesc', 'PipelineResourceSignatureDesc', 'SamplerDesc', 'ImmutableSamplerDesc', 'PipelineResourceLayoutDesc', 'PipelineStateDesc', 'GraphicsPipelineDesc', 'RayTracingPipelineDesc', 'TilePipelineDesc', 'RenderPassAttachmentDesc', 'AttachmentReference', 'ShadingRateAttachment', 'SubpassDesc', 'SubpassDependencyDesc', 'RenderPassDesc', 'ShaderDesc', 'ShaderMacro', 'ShaderResourceDesc', 'ShaderCreateInfo', 'RenderDeviceInfo', 'GraphicsAdapterInfo', 'DeviceFeatures', 'AdapterMemoryInfo', 'RayTracingProperties', 'WaveOpProperties', 'BufferProperties', 'TextureProperties', 'SamplerProperties', 'MeshShaderProperties', 'ShadingRateProperties', 'ComputeShaderProperties', 'DrawCommandProperties', 'SparseResourceProperties', 'ShadingRateMode', 'CommandQueueInfo', 'NDCAttribs', 'SerializationDeviceD3D11Info', 'SerializationDeviceD3D12Info', 'SerializationDeviceVkInfo', 'SerializationDeviceMtlInfo', 'SerializationDeviceCreateInfo'}
cxx_registerd_base_struct = {'DeviceObjectAttribs': {'name': 'Name', 'type': 'const char *', 'meta': 'string'}}
cxx_registered_enum = {'BLEND_FACTOR', 'BLEND_OPERATION', 'COLOR_MASK', 'LOGIC_OPERATION', 'COLOR_MASK', 'STENCIL_OP', 'COMPARISON_FUNCTION', 'FILL_MODE', 'CULL_MODE', 'INPUT_ELEMENT_FREQUENCY', 'VALUE_TYPE', 'TEXTURE_FORMAT', 'PRIMITIVE_TOPOLOGY', 'RESOURCE_STATE', 'ACCESS_FLAGS', 'ATTACHMENT_LOAD_OP', 'ATTACHMENT_STORE_OP', 'PIPELINE_TYPE', 'PIPELINE_STAGE_FLAGS', 'PIPELINE_SHADING_RATE_FLAGS', 'PIPELINE_RESOURCE_FLAGS', 'PSO_CREATE_FLAGS', 'SAMPLER_FLAGS', 'FILTER_TYPE', 'TEXTURE_ADDRESS_MODE', 'SHADER_TYPE', 'SHADER_SOURCE_LANGUAGE', 'SHADER_COMPILER', 'SHADER_RESOURCE_TYPE', 'SHADER_RESOURCE_VARIABLE_TYPE', 'SHADER_RESOURCE_VARIABLE_TYPE_FLAGS', 'SHADER_VARIABLE_FLAGS', 'ADAPTER_TYPE', 'ADAPTER_VENDOR', 'BIND_FLAGS', 'CPU_ACCESS_FLAGS', 'WAVE_FEATURE', 'RAY_TRACING_CAP_FLAGS', 'COMMAND_QUEUE_TYPE', 'SPARSE_RESOURCE_CAP_FLAGS', 'DRAW_COMMAND_CAP_FLAGS', 'SHADING_RATE_CAP_FLAGS', 'SHADING_RATE_COMBINER', 'SHADING_RATE_TEXTURE_ACCESS', 'SHADING_RATE_FORMAT', 'RENDER_DEVICE_TYPE', 'DEVICE_FEATURE_STATE', 'SHADING_RATE', 'SAMPLE_COUNT'}
cxx_suffix_file = 'Parser'
cxx_extension_file = 'hpp'
cxx_licence = '/*\n * Copyright 2019-2022 Diligent Graphics LLC\n * \n * Licensed under the Apache License, Version 2.0 (the "License");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n * http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an "AS IS" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * In no event and under no legal theory, whether in tort (including negligence), \n * contract, or otherwise, unless required by applicable law (such as deliberate \n * and grossly negligent acts) or agreed to in writing, shall any Contributor be\n * liable for any damages, including any direct, indirect, special, incidental, \n * or consequential damages of any character arising as a result of this License or \n * out of the use or inability to use the software (including but not limited to damages \n * for loss of goodwill, work stoppage, computer failure or malfunction, or any and \n * all other commercial damages or losses), even if such Contributor has been advised \n * of the possibility of such damages.\n */\n' |
#
# PySNMP MIB module Cajun-ROOT (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Cajun-ROOT
# Produced by pysmi-0.3.4 at Mon Apr 29 17:08:17 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ModuleIdentity, Bits, Counter32, NotificationType, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, ObjectIdentity, Unsigned32, iso, Counter64, enterprises, IpAddress, Integer32, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "Bits", "Counter32", "NotificationType", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "ObjectIdentity", "Unsigned32", "iso", "Counter64", "enterprises", "IpAddress", "Integer32", "Gauge32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
lucent = MibIdentifier((1, 3, 6, 1, 4, 1, 1751))
products = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 1))
mibs = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2))
cajunRtrProduct = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 1, 43))
cajunRtr = ModuleIdentity((1, 3, 6, 1, 4, 1, 1751, 2, 43))
if mibBuilder.loadTexts: cajunRtr.setLastUpdated('9904220000Z')
if mibBuilder.loadTexts: cajunRtr.setOrganization("Lucent's Concord Technology Center (CTC) ")
cjnSystem = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 1))
cjnProtocol = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2))
cjnMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3))
cjnCli = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 1, 1))
cjnDload = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 1, 2))
cjnIpv4 = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 1))
cjnIpv6 = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 2))
cjnIpx = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 3))
cjnAtalk = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 4))
cjnIpv4Serv = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 5))
cjnIpv6Serv = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 6))
cjnIpxServ = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 7))
cjnAtalkServ = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 8))
cjnOspf = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 9))
cjnRip = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 10))
cjnIgmp = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 11))
cjnRtm = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 12))
cjnDvmrp = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 13))
cjnPimSm = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 14))
cjnPimDm = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 15))
cjnRsvp = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 16))
cjnSnmp = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 17))
cjnBgp = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 18))
cjnLrrp = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 19))
cjnIpxRip = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 20))
cjnIpxSap = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 21))
cjnIpIfMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 1))
cjnIpxIfMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 2))
cjnAtalkIfMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 3))
cjnResourceMgr = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 4))
cjnIpAListMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 5))
cjnIpForwardCtlMgt = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 6))
cjnIpFwdMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 7))
mibBuilder.exportSymbols("Cajun-ROOT", cjnIpForwardCtlMgt=cjnIpForwardCtlMgt, cjnSnmp=cjnSnmp, cjnIpv6=cjnIpv6, cjnAtalkServ=cjnAtalkServ, mibs=mibs, cjnIpIfMgmt=cjnIpIfMgmt, cjnDvmrp=cjnDvmrp, PYSNMP_MODULE_ID=cajunRtr, products=products, cjnRsvp=cjnRsvp, cjnIpv6Serv=cjnIpv6Serv, cjnResourceMgr=cjnResourceMgr, cjnIgmp=cjnIgmp, cjnOspf=cjnOspf, cjnBgp=cjnBgp, cjnIpxIfMgmt=cjnIpxIfMgmt, cjnAtalkIfMgmt=cjnAtalkIfMgmt, cjnMgmt=cjnMgmt, cjnRtm=cjnRtm, cajunRtr=cajunRtr, cjnPimSm=cjnPimSm, cjnIpFwdMgmt=cjnIpFwdMgmt, cjnLrrp=cjnLrrp, cjnIpxRip=cjnIpxRip, cjnAtalk=cjnAtalk, cjnIpAListMgmt=cjnIpAListMgmt, cajunRtrProduct=cajunRtrProduct, cjnCli=cjnCli, cjnIpv4Serv=cjnIpv4Serv, cjnPimDm=cjnPimDm, cjnIpxServ=cjnIpxServ, cjnRip=cjnRip, cjnDload=cjnDload, cjnIpx=cjnIpx, cjnProtocol=cjnProtocol, lucent=lucent, cjnIpv4=cjnIpv4, cjnSystem=cjnSystem, cjnIpxSap=cjnIpxSap)
| (integer, object_identifier, octet_string) = mibBuilder.importSymbols('ASN1', 'Integer', 'ObjectIdentifier', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_range_constraint, constraints_intersection, single_value_constraint, constraints_union, value_size_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueRangeConstraint', 'ConstraintsIntersection', 'SingleValueConstraint', 'ConstraintsUnion', 'ValueSizeConstraint')
(module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup')
(module_identity, bits, counter32, notification_type, time_ticks, mib_scalar, mib_table, mib_table_row, mib_table_column, mib_identifier, object_identity, unsigned32, iso, counter64, enterprises, ip_address, integer32, gauge32) = mibBuilder.importSymbols('SNMPv2-SMI', 'ModuleIdentity', 'Bits', 'Counter32', 'NotificationType', 'TimeTicks', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'MibIdentifier', 'ObjectIdentity', 'Unsigned32', 'iso', 'Counter64', 'enterprises', 'IpAddress', 'Integer32', 'Gauge32')
(display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention')
lucent = mib_identifier((1, 3, 6, 1, 4, 1, 1751))
products = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 1))
mibs = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2))
cajun_rtr_product = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 1, 43))
cajun_rtr = module_identity((1, 3, 6, 1, 4, 1, 1751, 2, 43))
if mibBuilder.loadTexts:
cajunRtr.setLastUpdated('9904220000Z')
if mibBuilder.loadTexts:
cajunRtr.setOrganization("Lucent's Concord Technology Center (CTC) ")
cjn_system = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 1))
cjn_protocol = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2))
cjn_mgmt = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3))
cjn_cli = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 1, 1))
cjn_dload = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 1, 2))
cjn_ipv4 = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 1))
cjn_ipv6 = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 2))
cjn_ipx = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 3))
cjn_atalk = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 4))
cjn_ipv4_serv = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 5))
cjn_ipv6_serv = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 6))
cjn_ipx_serv = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 7))
cjn_atalk_serv = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 8))
cjn_ospf = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 9))
cjn_rip = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 10))
cjn_igmp = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 11))
cjn_rtm = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 12))
cjn_dvmrp = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 13))
cjn_pim_sm = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 14))
cjn_pim_dm = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 15))
cjn_rsvp = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 16))
cjn_snmp = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 17))
cjn_bgp = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 18))
cjn_lrrp = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 19))
cjn_ipx_rip = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 20))
cjn_ipx_sap = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 2, 21))
cjn_ip_if_mgmt = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 1))
cjn_ipx_if_mgmt = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 2))
cjn_atalk_if_mgmt = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 3))
cjn_resource_mgr = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 4))
cjn_ip_a_list_mgmt = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 5))
cjn_ip_forward_ctl_mgt = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 6))
cjn_ip_fwd_mgmt = mib_identifier((1, 3, 6, 1, 4, 1, 1751, 2, 43, 3, 7))
mibBuilder.exportSymbols('Cajun-ROOT', cjnIpForwardCtlMgt=cjnIpForwardCtlMgt, cjnSnmp=cjnSnmp, cjnIpv6=cjnIpv6, cjnAtalkServ=cjnAtalkServ, mibs=mibs, cjnIpIfMgmt=cjnIpIfMgmt, cjnDvmrp=cjnDvmrp, PYSNMP_MODULE_ID=cajunRtr, products=products, cjnRsvp=cjnRsvp, cjnIpv6Serv=cjnIpv6Serv, cjnResourceMgr=cjnResourceMgr, cjnIgmp=cjnIgmp, cjnOspf=cjnOspf, cjnBgp=cjnBgp, cjnIpxIfMgmt=cjnIpxIfMgmt, cjnAtalkIfMgmt=cjnAtalkIfMgmt, cjnMgmt=cjnMgmt, cjnRtm=cjnRtm, cajunRtr=cajunRtr, cjnPimSm=cjnPimSm, cjnIpFwdMgmt=cjnIpFwdMgmt, cjnLrrp=cjnLrrp, cjnIpxRip=cjnIpxRip, cjnAtalk=cjnAtalk, cjnIpAListMgmt=cjnIpAListMgmt, cajunRtrProduct=cajunRtrProduct, cjnCli=cjnCli, cjnIpv4Serv=cjnIpv4Serv, cjnPimDm=cjnPimDm, cjnIpxServ=cjnIpxServ, cjnRip=cjnRip, cjnDload=cjnDload, cjnIpx=cjnIpx, cjnProtocol=cjnProtocol, lucent=lucent, cjnIpv4=cjnIpv4, cjnSystem=cjnSystem, cjnIpxSap=cjnIpxSap) |
# Add 5 to number
add5 = lambda n : n + 5
print(add5(2))
print(add5(7))
print()
# Square number
sqr = lambda n : n * n
print(sqr(2))
print(sqr(7))
print()
# Next integer
nextInt = lambda n : int(n) + 1
print(nextInt(2.7))
print(nextInt(7.2))
print()
# Previous integer of half
prevInt = lambda n : int(n // 2)
print(prevInt(2.7))
print(prevInt(7.2))
print()
# Division lambda
div = lambda dvsr : lambda dvdn : dvdn / dvsr
print(div(5)(10))
print(div(3)(27)) | add5 = lambda n: n + 5
print(add5(2))
print(add5(7))
print()
sqr = lambda n: n * n
print(sqr(2))
print(sqr(7))
print()
next_int = lambda n: int(n) + 1
print(next_int(2.7))
print(next_int(7.2))
print()
prev_int = lambda n: int(n // 2)
print(prev_int(2.7))
print(prev_int(7.2))
print()
div = lambda dvsr: lambda dvdn: dvdn / dvsr
print(div(5)(10))
print(div(3)(27)) |
state = '10011111011011001'
disk_length = 35651584
def mutate(a):
b = ''.join(['1' if x == '0' else '0' for x in reversed(a)])
return a + '0' + b
def checksum(a):
result = ''
i = 0
while i < len(a) - 1:
if a[i] == a[i+1]:
result += '1'
else:
result += '0'
i += 2
if len(result) % 2 != 1:
result = checksum(result)
return result
while len(state) < disk_length:
state = mutate(state)
state = state[:disk_length]
print(checksum(state))
| state = '10011111011011001'
disk_length = 35651584
def mutate(a):
b = ''.join(['1' if x == '0' else '0' for x in reversed(a)])
return a + '0' + b
def checksum(a):
result = ''
i = 0
while i < len(a) - 1:
if a[i] == a[i + 1]:
result += '1'
else:
result += '0'
i += 2
if len(result) % 2 != 1:
result = checksum(result)
return result
while len(state) < disk_length:
state = mutate(state)
state = state[:disk_length]
print(checksum(state)) |
x = 1
while x < 10:
y = 1
while y < 10:
print("%4d" % (x*y), end="")
y += 1
print()
x += 1
| x = 1
while x < 10:
y = 1
while y < 10:
print('%4d' % (x * y), end='')
y += 1
print()
x += 1 |
def saisie_liste():
cest_un_nombre=True
premier_nombre = input("Entrer un nombre : ")
somme=int(premier_nombre)
min=int(premier_nombre)
max=int(premier_nombre)
nombre_int = 0
n=0
moyenne = 0
while cest_un_nombre==True:
n += 1
nombre=input("Entrer un nombre : ")
if nombre=="":
print("Ce n'est pas un nombre")
cest_un_nombre=False
else:
nombre_int = int(nombre)
somme += nombre_int
moyenne = int(somme / n)
if(min>nombre_int):
min = nombre_int
elif(max<nombre_int):
max = nombre_int
else:
pass
print("Moyenne actuelle : "+ str(moyenne))
print("Min : "+str(min))
print("Max : "+str(max))
saisie_liste()
| def saisie_liste():
cest_un_nombre = True
premier_nombre = input('Entrer un nombre : ')
somme = int(premier_nombre)
min = int(premier_nombre)
max = int(premier_nombre)
nombre_int = 0
n = 0
moyenne = 0
while cest_un_nombre == True:
n += 1
nombre = input('Entrer un nombre : ')
if nombre == '':
print("Ce n'est pas un nombre")
cest_un_nombre = False
else:
nombre_int = int(nombre)
somme += nombre_int
moyenne = int(somme / n)
if min > nombre_int:
min = nombre_int
elif max < nombre_int:
max = nombre_int
else:
pass
print('Moyenne actuelle : ' + str(moyenne))
print('Min : ' + str(min))
print('Max : ' + str(max))
saisie_liste() |
AF_INET = 2
AF_INET6 = 10
IPPROTO_IP = 0
IPPROTO_TCP = 6
IPPROTO_UDP = 17
IP_ADD_MEMBERSHIP = 3
SOCK_DGRAM = 2
SOCK_RAW = 3
SOCK_STREAM = 1
SOL_SOCKET = 4095
SO_REUSEADDR = 4
def getaddrinfo():
pass
def socket():
pass
| af_inet = 2
af_inet6 = 10
ipproto_ip = 0
ipproto_tcp = 6
ipproto_udp = 17
ip_add_membership = 3
sock_dgram = 2
sock_raw = 3
sock_stream = 1
sol_socket = 4095
so_reuseaddr = 4
def getaddrinfo():
pass
def socket():
pass |
fixed_rows = []
with open('runs/expert/baseline_pass_full_doc_rerank', 'r') as fi:
for line in fi:
line = line.strip().split()
if line:
fixed_score = -float(line[4])
line[4] = str(fixed_score)
fixed_rows.append('\t'.join(line))
with open('runs/expert/baseline_pass_full_doc_rerank', 'w') as fo:
for row in fixed_rows:
fo.write(row + '\n')
| fixed_rows = []
with open('runs/expert/baseline_pass_full_doc_rerank', 'r') as fi:
for line in fi:
line = line.strip().split()
if line:
fixed_score = -float(line[4])
line[4] = str(fixed_score)
fixed_rows.append('\t'.join(line))
with open('runs/expert/baseline_pass_full_doc_rerank', 'w') as fo:
for row in fixed_rows:
fo.write(row + '\n') |
def insertion_sort(l):
for i in range(1, len(l)):
j = i-1
key = l[i]
while (l[j] > key) and (j >= 0):
l[j+1] = l[j]
j -= 1
l[j+1] = key
numbers = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
insertion_sort(numbers)
print(numbers)
| def insertion_sort(l):
for i in range(1, len(l)):
j = i - 1
key = l[i]
while l[j] > key and j >= 0:
l[j + 1] = l[j]
j -= 1
l[j + 1] = key
numbers = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
insertion_sort(numbers)
print(numbers) |
# Question: https://projecteuler.net/problem=120
# The coefficients of a^(odd) cancel out, so there might be a pattern ...
# n | X_n = (a-1)^n + (a+1)^n | mod a^2
#-----|----------------------------|--------
# 1 | 2a | 2a
# 2 | 2a^2 + 2 | 2
# 3 | 2a^3 + 6a | 6a
# 4 | 2a^4 + 6a^2 + 2 | 2
# 5 | 2a^5 + 20a^3 + 10a | 10a
# 6 | 2a^6 + 30a^4 + 30a^2 + 2 | 2
# 7 | 2a^7 + 42a^5 + 70a^3 + 14a | 14a
# So, if n is even, X^n = 2 (mod a^2)
# if n is odd, X^n = 2na (mod a^2)
# For a given 'a', what is the maximum x such that 2na = x (mod a^2) where n is an abitrary positive integer?
# We know that 2na is even, so if a if odd, the highest possible value of x is a^2 - 1
# if a is even, the highest possible value of x is a^2 - 2
# If a is even, then there exists k such that a = 2k. pick n = k, we have 2na = 2ka = a^2 = 0 (mod a^2)
# n = k - 1, we have 2na = a^2 - 2a (mod a^2)
# n = k - 2, we have 2na = a^2 - 4a (mod a^2)
# ...
# n = k - k, we have 2na = a^2 - 2ka = a^2 - a^2 = 0 (mod a^2)
# so the modulo group is {0, a^2 - 2ka}
# If a is odd, then there exists k such that a = 2k + 1. Pick n = 2k+1, then 2na = 2(2k+1)a = 2a^2 = 0 (mod a^2)
# ...
# n = k+2, then 2na = 2(k+2)a = (2k+1)a + 3a = a^2 + 3a = 3a = a^2 - a^2 + 3a = a^2 - (2k-2)a (mod a^2)
# n = k+1, then 2na = 2(k+1)a = (2k+1)a + a = a^2 + a = a = a^2 - (2k)a (mod a^2)
# start here -> n = k, then 2na = 2ka = (2k+1)a - a = a^2 - a (mod a^2)
# n = k-1, then 2na = 2(k-1)a = (2k+1)a - 3a = a^2 - 3a (mod a^2)
# n = k-2, then 2na = 2(k-2)a = (2k+1)a - 5a = a^2 - 5a (mod a^2)
# ...
# n = k-k, then 2na = 0 (mod a^2)
# so the modulo group is {0, a^2 - ka}
# So, if 'a' is odd, r_max = max(2, a^2 - a). Since a >= 3, r_max = a^2 - a
# if 'a' is even, r_max = max(2, a^2 - 2a). Since a >= 3, r_max = a^2 - 2a
# So, sum_{3,n}(r_max) = [sum_{1,n}(a^2-a)] - [sum_{3<=a<=n, 'a' even} (a)] - {a=1}(a^2-a) - {a=2}(a^2-a)
# = [sum_{1,n}(a^2-a)] - (2*[sum_{1<=i<=floor(n/2)} (i)] - 2) - {a=1}(a^2-a) - {a=2}(a^2-a)
# = 1/6 * n * (n+1) * (2n+1) - 1/2 * n * (n+1) - (2*n/2*(n/2+1) - 2) - 0 - 2
# = 1/3 * (n-1) * n * (n+1) - 1/4*n*(n+2)
N = 1000
result = (N-1)*N*(N+1) // 3 - N * (N+2)//4
print(result)
| n = 1000
result = (N - 1) * N * (N + 1) // 3 - N * (N + 2) // 4
print(result) |
n = int(input().strip())
for i in range(0,n):
for y in range(0,n):
if(y<n-i-1):
print(' ', end='')
elif(y>=n-i-1 and y!=n-1):
print('#',end='')
else:
print('#')
| n = int(input().strip())
for i in range(0, n):
for y in range(0, n):
if y < n - i - 1:
print(' ', end='')
elif y >= n - i - 1 and y != n - 1:
print('#', end='')
else:
print('#') |
class Node:
def __init__(self, value):
self.value = value
self.next = None
class Stack:
def __init__(self):
self.top = None
def push(self, value):
node = Node(value)
if self.top:
node.next = self.top
self.top = node
else:
self.top = node
def pop(self):
try:
deleted_value = self.top.value
temp = self.top.next
self.top = temp
temp.next = None
return deleted_value
except:
return "This is empty stack"
def peek(self):
try:
return self.top.value
except:
return "This is empty stack"
def isEmpty(self):
if self.top == None:
return False
else:
return True
class Queue:
def __init__(self):
self.front = None
self.rear = None
def enqueue(self, value):
node = Node(value)
if self.front == None:
self.front = node
self.rear = node
else:
self.rear.next = node
self.rear = node
def dequeue(self):
try:
removed = self.front
self.front = self.front.next
self.size -= 1
return removed.value
except:
return "The Queue is empty"
def peek(self):
try:
return self.front.value
except:
return "This is Empty queue"
def isEmpty(self):
if self.front == None and self.rear == None:
return True
else:
return False
def length(self):
length = 0
while self.front:
length += 1
self.front = self.front.next
return length
# if __name__=="__main__" :
# pass
# q = Queue()
# q.enqueue(4)
# q.enqueue(4)
# print(q.dequeue())
| class Node:
def __init__(self, value):
self.value = value
self.next = None
class Stack:
def __init__(self):
self.top = None
def push(self, value):
node = node(value)
if self.top:
node.next = self.top
self.top = node
else:
self.top = node
def pop(self):
try:
deleted_value = self.top.value
temp = self.top.next
self.top = temp
temp.next = None
return deleted_value
except:
return 'This is empty stack'
def peek(self):
try:
return self.top.value
except:
return 'This is empty stack'
def is_empty(self):
if self.top == None:
return False
else:
return True
class Queue:
def __init__(self):
self.front = None
self.rear = None
def enqueue(self, value):
node = node(value)
if self.front == None:
self.front = node
self.rear = node
else:
self.rear.next = node
self.rear = node
def dequeue(self):
try:
removed = self.front
self.front = self.front.next
self.size -= 1
return removed.value
except:
return 'The Queue is empty'
def peek(self):
try:
return self.front.value
except:
return 'This is Empty queue'
def is_empty(self):
if self.front == None and self.rear == None:
return True
else:
return False
def length(self):
length = 0
while self.front:
length += 1
self.front = self.front.next
return length |
class Node:
def __init__(self, condition, body):
self.condition = condition
self.body = body
def visit(self, context):
rvalue = None
while self.condition.visit(context):
rvalue = self.body.visit(context)
return rvalue
| class Node:
def __init__(self, condition, body):
self.condition = condition
self.body = body
def visit(self, context):
rvalue = None
while self.condition.visit(context):
rvalue = self.body.visit(context)
return rvalue |
__all__ = [
'arch_blocks',
'get_mask',
'get_param_groups',
'logger',
'losses',
'lr_schedulers',
'optimizers_L1L2',
'tensorflow_logger',
] | __all__ = ['arch_blocks', 'get_mask', 'get_param_groups', 'logger', 'losses', 'lr_schedulers', 'optimizers_L1L2', 'tensorflow_logger'] |
class Solution:
def solve(self, courses):
n = len(courses)
def helper(start):
visited[start] = 1
for v in courses[start]:
if visited[v]==1:
return True
elif visited[v]==0:
if helper(v):
return True
visited[start] = 2
return False
visited = [0]*n
for i in range(n):
# print(visited)
if visited[i]==0 and helper(i):
# print(visited)
return False
return True | class Solution:
def solve(self, courses):
n = len(courses)
def helper(start):
visited[start] = 1
for v in courses[start]:
if visited[v] == 1:
return True
elif visited[v] == 0:
if helper(v):
return True
visited[start] = 2
return False
visited = [0] * n
for i in range(n):
if visited[i] == 0 and helper(i):
return False
return True |
chipper = input('Input Message: ')
plain = ''
for alphabet in chipper:
temp = ord(alphabet)-1
plain += chr(temp)
print(plain)
| chipper = input('Input Message: ')
plain = ''
for alphabet in chipper:
temp = ord(alphabet) - 1
plain += chr(temp)
print(plain) |
def dec1(def1):
def exec():
print("Executing now")
def1()
print("Executed")
return exec
@dec1
def who_is_sandy():
print("Sandy is good programmer")
#who_is_sandy = dec1(who_is_sandy) #Decorative function is dec1 another term is @dec1
who_is_sandy()
| def dec1(def1):
def exec():
print('Executing now')
def1()
print('Executed')
return exec
@dec1
def who_is_sandy():
print('Sandy is good programmer')
who_is_sandy() |
# container with most water
# https://leetcode.com/problems/container-with-most-water/
# the function maxArea -> take in a list of integers and return an integer
# 3 variables to keep track of the current max area, left and right pointers
# left pointer initialized to the first elements of the list
# right pointer initialized to the last elements of the list
# current max area initialized to 0
# height will be the lower of the two elements at the left and right pointers
# width will be the difference between the right pointer and left pointer
# compute the area between the 2 pointer and compare result with current max area, if result is greater than current max area, update current max area to result
# compare the height of the 2 pointer and shift the pointer that is shorter
# [to compensate for the reduction in width, we want to move the pointer that is shorter to a taller line]
# recompute current max area
class Solution:
def maxArea(self, height: list[int]) -> int:
current_max_area = 0
left = 0
right = len(height)-1
while (left < right):
area = (right - left) * min(height[left], height[right])
if area > current_max_area:
current_max_area = area
if height[left] < height[right]:
left += 1
else:
right -= 1
return current_max_area
a = [1,8,6,2,5,4,8,3,7]
sol = Solution()
print(sol.maxArea(a))
| class Solution:
def max_area(self, height: list[int]) -> int:
current_max_area = 0
left = 0
right = len(height) - 1
while left < right:
area = (right - left) * min(height[left], height[right])
if area > current_max_area:
current_max_area = area
if height[left] < height[right]:
left += 1
else:
right -= 1
return current_max_area
a = [1, 8, 6, 2, 5, 4, 8, 3, 7]
sol = solution()
print(sol.maxArea(a)) |
print("------------------------------------")
print("********* Woorden switchen *********")
print("------------------------------------")
# Input temperatuur in Celsius
woord1 = input("Woord 1: ")
woord2 = input("Woord 2: ")
# Output
print()
print("Woord 1: " + woord1.upper())
print("Woord 2: " + woord2.upper())
print()
# Switchen van woorden
woord1, woord2 = woord2, woord1
# Output
print()
print("Woord 1: " + woord1.upper())
print("Woord 2: " + woord2.upper())
print()
# Workaround wachten tot enter
input("Druk op Enter om door te gaan...")
| print('------------------------------------')
print('********* Woorden switchen *********')
print('------------------------------------')
woord1 = input('Woord 1: ')
woord2 = input('Woord 2: ')
print()
print('Woord 1: ' + woord1.upper())
print('Woord 2: ' + woord2.upper())
print()
(woord1, woord2) = (woord2, woord1)
print()
print('Woord 1: ' + woord1.upper())
print('Woord 2: ' + woord2.upper())
print()
input('Druk op Enter om door te gaan...') |
# * =======================
# *
# * Author: Matthew Moccaro
# * File: Network_Programming.py
# * Type: Python Source File
# *
# * Creation Date: 1/2/19
# *
# * Description: Python
# * source file for the
# * network programming
# * project.
# *
# * ======================
print("Network Programming For Python")
| print('Network Programming For Python') |
__author__ = 'mstipanov'
class ApiRequestErrorDetails(object):
messageId = ""
text = ""
variables = ""
additionalDescription = ""
def __init__(self, text=""):
self.text = text
def __str__(self):
return "ApiRequestErrorDetails: {" \
"messageId = \"" + str(self.messageId) + "\", " \
"text = \"" + str(self.text) + "\", " \
"variables = \"" + str(
self.variables) + "\", " \
"additionalDescription = \"" + str(self.additionalDescription) + "\"" \
"}"
class ApiRequestError(object):
clientCorrelator = ""
serviceException = ApiRequestErrorDetails()
def __init__(self, clientCorrelator="", serviceException=ApiRequestErrorDetails()):
self.clientCorrelator = clientCorrelator
self.serviceException = serviceException
def __str__(self):
return "ApiRequestError: {" \
"clientCorrelator = \"" + str(self.clientCorrelator) + "\", " \
"serviceException = " + str(
self.serviceException) + "" \
"}"
class ApiException(Exception):
requestError = ApiRequestError()
def __init__(self, requestError=ApiRequestError()):
self.requestError = requestError
def __str__(self):
return "ApiException: {" \
"requestError = " + str(self.requestError) + "" \
"}" | __author__ = 'mstipanov'
class Apirequesterrordetails(object):
message_id = ''
text = ''
variables = ''
additional_description = ''
def __init__(self, text=''):
self.text = text
def __str__(self):
return 'ApiRequestErrorDetails: {messageId = "' + str(self.messageId) + '", text = "' + str(self.text) + '", variables = "' + str(self.variables) + '", additionalDescription = "' + str(self.additionalDescription) + '"}'
class Apirequesterror(object):
client_correlator = ''
service_exception = api_request_error_details()
def __init__(self, clientCorrelator='', serviceException=api_request_error_details()):
self.clientCorrelator = clientCorrelator
self.serviceException = serviceException
def __str__(self):
return 'ApiRequestError: {clientCorrelator = "' + str(self.clientCorrelator) + '", serviceException = ' + str(self.serviceException) + '}'
class Apiexception(Exception):
request_error = api_request_error()
def __init__(self, requestError=api_request_error()):
self.requestError = requestError
def __str__(self):
return 'ApiException: {requestError = ' + str(self.requestError) + '}' |
# -*- coding: utf-8 -*-
# Jupyter Extension points
def _jupyter_nbextension_paths():
return [
dict(
section="notebook",
# the path is relative to the `my_fancy_module` directory
src="resources/nbextension",
# directory in the `nbextension/` namespace
dest="nbsafety",
# _also_ in the `nbextension/` namespace
require="nbsafety/index",
)
]
def load_jupyter_server_extension(nbapp):
pass
| def _jupyter_nbextension_paths():
return [dict(section='notebook', src='resources/nbextension', dest='nbsafety', require='nbsafety/index')]
def load_jupyter_server_extension(nbapp):
pass |
class BatteryAndInverter:
name = "battery and inverter"
params = [
{
"key": "capacity_dc_kwh",
"label": "",
"units": "kwh",
"private": False,
"value": 4000,
"confidence": 0,
"notes": "",
"source": "FAKE"
},
{
"key": "capacity_dc_kw",
"label": "",
"units": "kw",
"private": False,
"value": 4000,
"confidence": 0,
"notes": "",
"source": "FAKE"
},
{
"key": "roundtrip_efficiency",
"label": "",
"units": "decimal percent",
"private": False,
"value": 0.95,
"confidence": 0,
"notes": "",
"source": "FAKE"
},
{
"key": "wh_per_kg",
"label": "",
"units": "Wh/kg",
"private": False,
"value": 200,
"confidence": 0,
"notes": "",
"source": "FAKE"
},
{
"key": "m3_per_kwh",
"label": "",
"units": "m3/kWh",
"private": False,
"value": 0.0001,
"confidence": 0,
"notes": "",
"source": "FAKE"
}
]
states = [
{
"key": "available_dc_kwh",
"label": "",
"units": "kwh",
"private": False,
"value": 4000,
"confidence": 0,
"notes": "",
"source": ""
},
{
"key": "generated_dc_kwh",
"label": "",
"units": "kwh",
"private": False,
"value": 0,
"confidence": 0,
"notes": "The is the way that generators send kwh to battery",
"source": ""
},
{
"key": "mass",
"label": "",
"units": "kg",
"private": True,
"value": 0,
"confidence": 0,
"notes": "",
"source": ""
},
{
"key": "volume",
"label": "",
"units": "m3",
"private": True,
"value": 0,
"confidence": 0,
"notes": "",
"source": ""
}
]
@staticmethod
def run_step(states, params, utils):
if states.mass == 0:
inverter_mass = 0 # TODO: Incorporate inverter mass
states.mass = 1 / ( params.wh_per_kg / 1000) * params.capacity_dc_kwh + inverter_mass
states.volume = params.m3_per_kwh * params.capacity_dc_kwh
if states.available_dc_kwh < 0:
utils.terminate_sim_with_error("available_dc_kwh was negative")
if states.available_dc_kwh == 0:
utils.log_warning("Available AC kWh is zero!")
# Due to current limitations in modeling setup
# Apply the full round trip battery efficiency for
# energy added to the battery instead of part when added in
# and part when added out
states.available_dc_kwh += states.generated_dc_kwh * params.roundtrip_efficiency
# TODO: Check whether this shoudl be ac or dc
if states.available_dc_kwh > params.capacity_dc_kwh:
states.available_dc_kwh = params.capacity_dc_kwh
# Reset the input DC bus so PV etc can be added in next sim tick
states.generated_dc_kwh = 0
# Hack for clipping by max available power
states.available_dc_kwh = min(states.available_dc_kwh, params.capacity_dc_kw)
| class Batteryandinverter:
name = 'battery and inverter'
params = [{'key': 'capacity_dc_kwh', 'label': '', 'units': 'kwh', 'private': False, 'value': 4000, 'confidence': 0, 'notes': '', 'source': 'FAKE'}, {'key': 'capacity_dc_kw', 'label': '', 'units': 'kw', 'private': False, 'value': 4000, 'confidence': 0, 'notes': '', 'source': 'FAKE'}, {'key': 'roundtrip_efficiency', 'label': '', 'units': 'decimal percent', 'private': False, 'value': 0.95, 'confidence': 0, 'notes': '', 'source': 'FAKE'}, {'key': 'wh_per_kg', 'label': '', 'units': 'Wh/kg', 'private': False, 'value': 200, 'confidence': 0, 'notes': '', 'source': 'FAKE'}, {'key': 'm3_per_kwh', 'label': '', 'units': 'm3/kWh', 'private': False, 'value': 0.0001, 'confidence': 0, 'notes': '', 'source': 'FAKE'}]
states = [{'key': 'available_dc_kwh', 'label': '', 'units': 'kwh', 'private': False, 'value': 4000, 'confidence': 0, 'notes': '', 'source': ''}, {'key': 'generated_dc_kwh', 'label': '', 'units': 'kwh', 'private': False, 'value': 0, 'confidence': 0, 'notes': 'The is the way that generators send kwh to battery', 'source': ''}, {'key': 'mass', 'label': '', 'units': 'kg', 'private': True, 'value': 0, 'confidence': 0, 'notes': '', 'source': ''}, {'key': 'volume', 'label': '', 'units': 'm3', 'private': True, 'value': 0, 'confidence': 0, 'notes': '', 'source': ''}]
@staticmethod
def run_step(states, params, utils):
if states.mass == 0:
inverter_mass = 0
states.mass = 1 / (params.wh_per_kg / 1000) * params.capacity_dc_kwh + inverter_mass
states.volume = params.m3_per_kwh * params.capacity_dc_kwh
if states.available_dc_kwh < 0:
utils.terminate_sim_with_error('available_dc_kwh was negative')
if states.available_dc_kwh == 0:
utils.log_warning('Available AC kWh is zero!')
states.available_dc_kwh += states.generated_dc_kwh * params.roundtrip_efficiency
if states.available_dc_kwh > params.capacity_dc_kwh:
states.available_dc_kwh = params.capacity_dc_kwh
states.generated_dc_kwh = 0
states.available_dc_kwh = min(states.available_dc_kwh, params.capacity_dc_kw) |
# Generate a mask for the upper triangle
mask = np.zeros_like(corr, dtype=np.bool)
mask[np.triu_indices_from(mask)] = True
# Set up the matplotlib figure
f, ax = plt.subplots(figsize=(20, 18))
# Draw the heatmap with the mask and correct aspect ratio
sns.heatmap(corr, mask=mask, cmap="YlGnBu", vmax=.30, center=0,
square=True, linewidths=.5, cbar_kws={"shrink": .5}) | mask = np.zeros_like(corr, dtype=np.bool)
mask[np.triu_indices_from(mask)] = True
(f, ax) = plt.subplots(figsize=(20, 18))
sns.heatmap(corr, mask=mask, cmap='YlGnBu', vmax=0.3, center=0, square=True, linewidths=0.5, cbar_kws={'shrink': 0.5}) |
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class LbaasToBigIP(object):
def __init__(self, benchmark, benchmark_filter):
self.benchmark_name = None
self.benchmark = None
self.benchmark_filter = None
self.benchmark_projects = None
self.subject_name = None
self.subject = None
self.subject_filter = None
self.subject_projects = None
self.validate_subject(benchmark)
self.init_benchmark(benchmark, benchmark_filter)
def compare_to(self, subject, subject_filter):
self.validate_subject(subject)
self.init_subject(subject, subject_filter)
def validate_subject(self, subject):
if not isinstance(subject, dict):
raise Exception("Comparator must be a dcit type")
if len(subject) != 1:
raise Exception("Only one Comparator should be "
"provided at a time")
def init_subject(self, subject, subject_filter):
self.subject_name = subject.keys()[0]
self.subject = subject.values()[0]
self.subject_filter = subject_filter
projects = self.subject.get_projects_on_device()
self.subject_projects = self.subject_filter.get_ids(
projects
)
def init_benchmark(self, benchmark, benchmark_filter):
self.benchmark_name = benchmark.keys()[0]
self.benchmark = benchmark.values()[0]
self.benchmark_filter = benchmark_filter
projects = \
self.benchmark.get_projects_on_device()
self.benchmark_projects = set(projects)
def get_common_resources_diff(self, bm_resources,
sub_method,
resource_type=None):
sub_resources = []
bm_res = self.benchmark_filter.get_resources(
bm_resources)
bm_ids = set(bm_res.keys())
for project in self.subject_projects:
sub_resources += sub_method(
project
)
sub_ids = self.subject_filter.get_ids(
sub_resources)
diff = bm_ids - sub_ids
result = self.benchmark_filter.convert_common_resources(
diff, bm_res, resource_type=resource_type
)
return result
def get_missing_projects(self):
res = self.benchmark_projects - self.subject_projects
diff = self.benchmark_filter.convert_projects(
res
)
return diff
def get_missing_loadbalancers(self):
lb_resources = []
sub_resources = []
missing = []
converted_lb = {}
for project in self.benchmark_projects:
lb_resources += self.benchmark.get_agent_project_loadbalancers(
project
)
for project in self.subject_projects:
sub_resources += self.subject.get_project_loadbalancers(
project
)
bigip_lbs = self.subject_filter.filter_loadbalancers(sub_resources)
for lb in lb_resources:
if lb.id not in bigip_lbs:
converted_lb = self.benchmark_filter.convert_loadbalancers(
lb, ""
)
missing.append(converted_lb)
else:
bigip_ip = bigip_lbs[lb.id]
if lb.vip_address != bigip_ip:
converted_lb = self.benchmark_filter.convert_loadbalancers(
lb, bigip_ip
)
missing.append(converted_lb)
return missing
def get_missing_listeners(self):
lb_resources = []
for project in self.benchmark_projects:
lb_resources += self.benchmark.get_agent_project_loadbalancers(
project
)
ls_resources = []
lb_ids = [lb.id for lb in lb_resources]
ls_resources += self.benchmark.get_listeners_by_lb_ids(lb_ids)
sub_method = self.subject.get_project_listeners
diff = self.get_common_resources_diff(
ls_resources, sub_method, "listener"
)
return diff
def get_missing_pools(self):
lb_resources = []
for project in self.benchmark_projects:
lb_resources += self.benchmark.get_agent_project_loadbalancers(
project
)
pl_resources = []
lb_ids = [lb.id for lb in lb_resources]
pl_resources += self.benchmark.get_pools_by_lb_ids(lb_ids)
sub_method = self.subject.get_project_pools
diff = self.get_common_resources_diff(
pl_resources, sub_method, "pool"
)
return diff
def get_missing_members(self):
bm_lbs = []
bm_pools = []
sub_pools = []
missing_mb = []
for project in self.benchmark_projects:
bm_lbs += self.benchmark.get_agent_project_loadbalancers(
project
)
lb_ids = [lb.id for lb in bm_lbs]
bm_pools += self.benchmark.get_pools_by_lb_ids(lb_ids)
bm_mbs = self.benchmark_filter.filter_pool_members(bm_pools)
for project in self.subject_projects:
sub_pools += self.subject.get_project_pools(
project
)
sub_mbs = self.subject_filter.filter_pool_members(sub_pools)
for pool_id, members in bm_mbs.items():
if pool_id not in sub_mbs:
if members:
missing_mb += self.benchmark_filter.convert_members(
pool_id, members)
continue
for mb in members:
if not mb["address_port"] in sub_mbs[pool_id]:
mb['bigip_ips'] = sub_mbs[pool_id]
missing_mb += self.benchmark_filter.convert_members(
pool_id, [mb])
return missing_mb
| class Lbaastobigip(object):
def __init__(self, benchmark, benchmark_filter):
self.benchmark_name = None
self.benchmark = None
self.benchmark_filter = None
self.benchmark_projects = None
self.subject_name = None
self.subject = None
self.subject_filter = None
self.subject_projects = None
self.validate_subject(benchmark)
self.init_benchmark(benchmark, benchmark_filter)
def compare_to(self, subject, subject_filter):
self.validate_subject(subject)
self.init_subject(subject, subject_filter)
def validate_subject(self, subject):
if not isinstance(subject, dict):
raise exception('Comparator must be a dcit type')
if len(subject) != 1:
raise exception('Only one Comparator should be provided at a time')
def init_subject(self, subject, subject_filter):
self.subject_name = subject.keys()[0]
self.subject = subject.values()[0]
self.subject_filter = subject_filter
projects = self.subject.get_projects_on_device()
self.subject_projects = self.subject_filter.get_ids(projects)
def init_benchmark(self, benchmark, benchmark_filter):
self.benchmark_name = benchmark.keys()[0]
self.benchmark = benchmark.values()[0]
self.benchmark_filter = benchmark_filter
projects = self.benchmark.get_projects_on_device()
self.benchmark_projects = set(projects)
def get_common_resources_diff(self, bm_resources, sub_method, resource_type=None):
sub_resources = []
bm_res = self.benchmark_filter.get_resources(bm_resources)
bm_ids = set(bm_res.keys())
for project in self.subject_projects:
sub_resources += sub_method(project)
sub_ids = self.subject_filter.get_ids(sub_resources)
diff = bm_ids - sub_ids
result = self.benchmark_filter.convert_common_resources(diff, bm_res, resource_type=resource_type)
return result
def get_missing_projects(self):
res = self.benchmark_projects - self.subject_projects
diff = self.benchmark_filter.convert_projects(res)
return diff
def get_missing_loadbalancers(self):
lb_resources = []
sub_resources = []
missing = []
converted_lb = {}
for project in self.benchmark_projects:
lb_resources += self.benchmark.get_agent_project_loadbalancers(project)
for project in self.subject_projects:
sub_resources += self.subject.get_project_loadbalancers(project)
bigip_lbs = self.subject_filter.filter_loadbalancers(sub_resources)
for lb in lb_resources:
if lb.id not in bigip_lbs:
converted_lb = self.benchmark_filter.convert_loadbalancers(lb, '')
missing.append(converted_lb)
else:
bigip_ip = bigip_lbs[lb.id]
if lb.vip_address != bigip_ip:
converted_lb = self.benchmark_filter.convert_loadbalancers(lb, bigip_ip)
missing.append(converted_lb)
return missing
def get_missing_listeners(self):
lb_resources = []
for project in self.benchmark_projects:
lb_resources += self.benchmark.get_agent_project_loadbalancers(project)
ls_resources = []
lb_ids = [lb.id for lb in lb_resources]
ls_resources += self.benchmark.get_listeners_by_lb_ids(lb_ids)
sub_method = self.subject.get_project_listeners
diff = self.get_common_resources_diff(ls_resources, sub_method, 'listener')
return diff
def get_missing_pools(self):
lb_resources = []
for project in self.benchmark_projects:
lb_resources += self.benchmark.get_agent_project_loadbalancers(project)
pl_resources = []
lb_ids = [lb.id for lb in lb_resources]
pl_resources += self.benchmark.get_pools_by_lb_ids(lb_ids)
sub_method = self.subject.get_project_pools
diff = self.get_common_resources_diff(pl_resources, sub_method, 'pool')
return diff
def get_missing_members(self):
bm_lbs = []
bm_pools = []
sub_pools = []
missing_mb = []
for project in self.benchmark_projects:
bm_lbs += self.benchmark.get_agent_project_loadbalancers(project)
lb_ids = [lb.id for lb in bm_lbs]
bm_pools += self.benchmark.get_pools_by_lb_ids(lb_ids)
bm_mbs = self.benchmark_filter.filter_pool_members(bm_pools)
for project in self.subject_projects:
sub_pools += self.subject.get_project_pools(project)
sub_mbs = self.subject_filter.filter_pool_members(sub_pools)
for (pool_id, members) in bm_mbs.items():
if pool_id not in sub_mbs:
if members:
missing_mb += self.benchmark_filter.convert_members(pool_id, members)
continue
for mb in members:
if not mb['address_port'] in sub_mbs[pool_id]:
mb['bigip_ips'] = sub_mbs[pool_id]
missing_mb += self.benchmark_filter.convert_members(pool_id, [mb])
return missing_mb |
# Declaring the gotopt2 dependencies
load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies", "go_repository")
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
load("@bazel_bats//:deps.bzl", "bazel_bats_dependencies")
# Include this into any dependencies that want to compile gotopt2 from source.
# This declaration must be updated every time the dependencies in the workspace
# change.
def gotopt2_dependencies():
go_repository(
name = "com_github_golang_glog",
commit = "23def4e6c14b",
importpath = "github.com/golang/glog",
)
go_repository(
name = "com_github_google_go_cmp",
importpath = "github.com/google/go-cmp",
tag = "v0.2.0",
)
go_repository(
name = "in_gopkg_check_v1",
commit = "20d25e280405",
importpath = "gopkg.in/check.v1",
)
go_repository(
name = "in_gopkg_yaml_v2",
importpath = "gopkg.in/yaml.v2",
tag = "v2.2.2",
)
git_repository(
name = "bazel_bats",
remote = "https://github.com/filmil/bazel-bats",
commit = "78da0822ea339bd0292b5cc0b5de6930d91b3254",
shallow_since = "1569564445 -0700",
)
bazel_bats_dependencies()
| load('@bazel_gazelle//:deps.bzl', 'gazelle_dependencies', 'go_repository')
load('@bazel_tools//tools/build_defs/repo:git.bzl', 'git_repository')
load('@bazel_bats//:deps.bzl', 'bazel_bats_dependencies')
def gotopt2_dependencies():
go_repository(name='com_github_golang_glog', commit='23def4e6c14b', importpath='github.com/golang/glog')
go_repository(name='com_github_google_go_cmp', importpath='github.com/google/go-cmp', tag='v0.2.0')
go_repository(name='in_gopkg_check_v1', commit='20d25e280405', importpath='gopkg.in/check.v1')
go_repository(name='in_gopkg_yaml_v2', importpath='gopkg.in/yaml.v2', tag='v2.2.2')
git_repository(name='bazel_bats', remote='https://github.com/filmil/bazel-bats', commit='78da0822ea339bd0292b5cc0b5de6930d91b3254', shallow_since='1569564445 -0700')
bazel_bats_dependencies() |
A = []
B = []
for i in range (10):
A.append(int(input()))
U = int(input())
for j in range (len(A)):
if A[j] == U:
B.append(j)
print(B)
| a = []
b = []
for i in range(10):
A.append(int(input()))
u = int(input())
for j in range(len(A)):
if A[j] == U:
B.append(j)
print(B) |
def differentSymbolsNaive(s):
diffArray = []
for i in range(len(list(s))):
if list(s)[i] not in diffArray:
diffArray.append(list(s)[i])
return len(diffArray) | def different_symbols_naive(s):
diff_array = []
for i in range(len(list(s))):
if list(s)[i] not in diffArray:
diffArray.append(list(s)[i])
return len(diffArray) |
s = input()
s = s.upper()
c = 0
for i in ("ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
for j in s:
if(i!=j):
c = 0
else:
c = 1
break
if(c==0):
break
if c==1:
print("Pangram exists")
else:
print("Pangram doesn't exists")
| s = input()
s = s.upper()
c = 0
for i in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ':
for j in s:
if i != j:
c = 0
else:
c = 1
break
if c == 0:
break
if c == 1:
print('Pangram exists')
else:
print("Pangram doesn't exists") |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.