content
stringlengths 7
1.05M
|
---|
class Configuracoes:
"""Armazena as configuracoes do jogo estrela."""
def __init__(self):
"""Inicializa as configuracoes do jogo."""
# Configuracoes de tela
self.tela_largura = 1200
self.tela_altura = 600
self.cor_fundo = (46, 46, 46)
|
"""Project Euler problem 9"""
def calculate(perimeter):
"""Returns the product a*b*c of a Pythagorean triplet for which a + b + c == perimeter"""
for a in range(1, perimeter):
if a > perimeter:
break
for b in range(1, perimeter):
if a + b > perimeter:
break
for c in range(1, perimeter):
if a + b + c > perimeter:
break
if a + b + c == perimeter and a ** 2 + b ** 2 == c ** 2:
answer = a * b * c
return answer
if __name__ == "__main__":
print(calculate(1000))
|
__author__ = "Jeremy Lainé"
__email__ = "[email protected]"
__license__ = "BSD"
__summary__ = "Python wrapper for the ls-qpack QPACK library"
__title__ = "pylsqpack"
__uri__ = "https://github.com/aiortc/pylsqpack"
__version__ = "0.3.5"
|
## Copyright 2020 Google LLC
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## https://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""To be documented."""
def grpc_web_dependencies():
"""An utility method to load all dependencies of `gRPC-Web`."""
fail("Loading dependencies through grpc_web_dependencies() is not supported yet.")
def grpc_web_toolchains():
"""An utility method to load all gRPC-Web toolchains."""
native.register_toolchains(
"@com_github_grpc_grpc_web//bazel:closure_toolchain",
)
|
#=========================================================================================
class Job():
"""Represent job to-do in schedule"""
def __init__(self, aItineraryName, aItineraryColor, aTaskNumber, aItineraryNumber, aMachineName, aDuration):
self.itinerary = aItineraryName
self.machine = aMachineName
self.startTime = 0
self.duration = aDuration
self.endTime = 0
self.colorOfItinerary = aItineraryColor
self.idOperation = aTaskNumber
self.idItinerary = aItineraryNumber
self.completed = False
self.priority = 0
self.assignedMachine = ""
def __eq__(self, other):
return self.itinerary == other.itinerary and self.colorOfItinerary == other.colorOfItinerary and self.machine == other.machine and self.duration == other.duration and self.completed == other.completed and self.idOperation == other.idOperation
def __hash__(self):
return hash(str(self))
def __str__(self):
return "Job" + str(self.idItinerary) + "_" + str(self.idOperation) + " Machine:" + self.machine + "Duration: " + str(self.duration)
def getTupleStartAndDuration(self):
return (self.startTime, self.duration)
def getEndTime(self):
self.endTime = self.startTime + self.duration
return self.endTime
|
def palin(n,m):
if n<l//2:
if arr[n]==arr[m]:
return palin(n+1,m-1)
else:
return False
else:
return True
try:
arr= []
print(" Enter the array inputs and type 'stop' when you are done\n" )
while True:
arr.append(int(input()))
except:# if the input is not-integer, just continue to the next step
l= len(arr)
if palin(0,l-1):
print("PALINDROME")
else:
print("NOT PALINDROME")
|
""" What are Python Packages?
Packages are just folders(directories) that contain modules.
They contain a special python file named:__init__.py
The __init__.py file can be empty. The file tells Python that the directory of folder contains a python package which can be imported like a module.
Packages are a convent way to organize modules.
A package also can contain sub-packages.
"""
|
'''
Created on 1 dec. 2021
@author: laurentmichel
'''
class TableIterator(object):
'''
Simple wrapper iterating over table rows
'''
def __init__(self,
name,
data_table):
"""
Constructor
:param name: table name : not really used
:param data_table: Numpy table returned by astropy.votable
"""
self.name = name
self.data_table = data_table
self.last_row = None
self.iter = None
# not used yet
self.row_filter = None
def _get_next_row(self):
'''
Returns the next Numpy row or None.
The end of table exception usually returned by Numpy is trapped
'''
# The iterator is set at the first iteration
if self.iter == None:
self.iter = iter(self.data_table)
try:
while True:
row = next(self.iter)
if row is not None:
if (self.row_filter is None or
self.row_filter.row_match(row) == True):
self.last_row = row
return row
else:
return None
except:
return None
def _rewind(self):
"""
Set the pointer on the table top, destroys the iterator actually
"""
self.iter = None
|
# Celery配置文件
# 3.1指定中间人,消息队列,任务队列,容器,使用redis
broker_url = "redis://192.168.152.12/10"
|
__version__ = "v0.6.1-1"
__author__ = "Kanelis Elias"
__email__ = "[email protected]"
__license__ = "MIT"
|
## https://leetcode.com/problems/count-and-say/
## this problem seems hard at first, but that's mostly
## because it's incredibly poorly described. went to
## wikipedia and it makes sense. for ease, I went ahead
## and hard-coded in the first 5; after that, we generate
## from the previous one.
## generating the next one is actually pretty easy -- just
## loop over the string and keep track of how many times
## you hit the same number in a row, then when you hit a new
## number, update the output string with the number of times
## you saw the previous number (then make sure to catch the
## final run outside the loop).
## runtime comes in at ~94th percentile and memory comes in
## at ~86th percentile.
class Solution:
def generate_next(self, seq: str) -> str:
char = seq[0]
count = 1
out = ''
for c in seq[1:]:
if c == char:
count = count + 1
else:
out = out + str(count) + char
char = c
count = 1
## now the final string of numbers:
out = out + str(count) + char
return out
def countAndSay(self, n: int) -> str:
## ok so the description of the problem is terrible. from wikipedia:
## To generate a member of the sequence from the previous member,
## read off the digits of the previous member, counting the number
## of digits in groups of the same digit.
## so, 1st term is "one one", so the second term is "11".
## second term is then "two ones", so the third term is "21"
## then you read one 2, one 1s => 1211, etc.
if n == 1:
return '1'
if n == 2:
return '11'
if n == 3:
return '21'
if n == 4:
return '1211'
if n == 5:
return '111221'
myn = 5
seq = '111221'
while myn < n:
myn += 1
seq = self.generate_next(seq)
return seq |
soma = 0
for n in range(1, 500, 2):
if n % 3 == 0:
soma = soma + n
print(soma)
|
# Times Tables
# Ask the user to input the number they would like the times tables for
tTable = int(input("What number would you like to see the times table for? "))
# Loop through 12 times
for number in range(12):
print("{0} times {1} equals {2}" .format(number+1, tTable, (number+1) * tTable))
input()
|
expected_output = {
"GigabitEthernet3/8/0/38": {
"auto_negotiate": True,
"counters": {
"normal": {
"in_broadcast_pkts": 1093,
"in_mac_pause_frames": 0,
"in_multicast_pkts": 18864,
"in_octets": 0,
"in_pkts": 7446905,
"in_unicast_pkts": 7426948,
"out_broadcast_pkts": 373635,
"out_mac_pause_frames": 0,
"out_multicast_pkts": 34367737,
"out_octets": 0,
"out_pkts": 40981139,
"out_unicast_pkts": 6239767
},
"in_abort": 0,
"in_broadcast_pkts": 1093,
"in_crc_errors": 0,
"in_errors": 0,
"in_frame": 0,
"in_giants": 0,
"in_ignored": 0,
"in_mac_pause_frames": 0,
"in_multicast_pkts": 18864,
"in_octets": 10280397282,
"in_overrun": 0,
"in_parity_errors": 0,
"in_pkts": 7446905,
"in_runts": 0,
"in_throttles": 0,
"in_unicast_pkts": 7426948,
"last_clear": "Never",
"out_abort": 0,
"out_broadcast_pkts": 373635,
"out_buffer_failure": 0,
"out_collision": 0,
"out_deferred": 0,
"out_errors": 0,
"out_late_collision": 0,
"out_lost_carrier": 0,
"out_mac_pause_frames": 0,
"out_multicast_pkts": 34367737,
"out_no_carrier": 0,
"out_octets": 44666966188,
"out_pkts": 40981139,
"out_underruns": 0,
"out_unicast_pkts": 6239767,
"rate": {
"in_rate_bytes": 0,
"in_rate_pkts": 0,
"load_interval": 300,
"out_rate_bytes": 0,
"out_rate_pkts": 0
}
},
"description": "GigabitEthernet3/8/0/38 Interface",
"duplex_mode": "unknown",
"enabled": True,
"frame_type": "PKTFMT_ETHNT_2",
"mac_address": "cc3e-5f69-5751",
"max_frame_length": 9216,
"media_type": "twisted pair",
"oper_status": "DOWN",
"port_speed": "unknown",
"port_type": "1000_BASE_T",
"priority": 0,
"pvid": 17,
"switchport": {
"mode": "access",
"untagged": 17
},
"type": "GigabitEthernet"
}
}
|
"""
Write an iterative implementation of a binary search function.
"""
def binary_search(haystack, needle):
first = 0
last = len(haystack) - 1
found = False
while first <= last and not found:
mid = (first + last) // 2
print('haystack is {}, mid is {}, first is {}, last is {}'.format(
haystack, mid, first, last))
if haystack[mid] == needle:
found = True
elif haystack[mid] < needle:
first = mid + 1
else:
last = mid - 1
return found
print(binary_search([2, 4, 6, 8, 9, 11, 15], 11))
|
"""These keypoint formats are taken from https://github.com/CMU-Perceptual-
Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp."""
OPENPOSE_135_KEYPOINTS = [
'nose',
'left_eye',
'right_eye',
'left_ear',
'right_ear',
'left_shoulder',
'right_shoulder',
'left_elbow',
'right_elbow',
'left_wrist',
'right_wrist',
'left_hip',
'right_hip',
'left_knee',
'right_knee',
'left_ankle',
'right_ankle',
'neck', # upper_neck
'head',
'left_bigtoe',
'left_smalltoe',
'left_heel',
'right_bigtoe',
'right_smalltoe',
'right_heel',
'left_thumb_1',
'left_thumb_2',
'left_thumb_3',
'left_thumb',
'left_index_1',
'left_index_2',
'left_index_3',
'left_index',
'left_middle_1',
'left_middle_2',
'left_middle_3',
'left_middle',
'left_ring_1',
'left_ring_2',
'left_ring_3',
'left_ring',
'left_pinky_1',
'left_pinky_2',
'left_pinky_3',
'left_pinky',
'right_thumb_1',
'right_thumb_2',
'right_thumb_3',
'right_thumb',
'right_index_1',
'right_index_2',
'right_index_3',
'right_index',
'right_middle_1',
'right_middle_2',
'right_middle_3',
'right_middle',
'right_ring_1',
'right_ring_2',
'right_ring_3',
'right_ring',
'right_pinky_1',
'right_pinky_2',
'right_pinky_3',
'right_pinky',
'face_contour_1',
'face_contour_2',
'face_contour_3',
'face_contour_4',
'face_contour_5',
'face_contour_6',
'face_contour_7',
'face_contour_8',
'face_contour_9',
'face_contour_10',
'face_contour_11',
'face_contour_12',
'face_contour_13',
'face_contour_14',
'face_contour_15',
'face_contour_16',
'face_contour_17',
'right_eyebrow_1',
'right_eyebrow_2',
'right_eyebrow_3',
'right_eyebrow_4',
'right_eyebrow_5',
'left_eyebrow_5',
'left_eyebrow_4',
'left_eyebrow_3',
'left_eyebrow_2',
'left_eyebrow_1',
'nosebridge_1',
'nosebridge_2',
'nosebridge_3',
'nosebridge_4',
'nose_1',
'nose_2',
'nose_3',
'nose_4',
'nose_5',
'right_eye_1',
'right_eye_2',
'right_eye_3',
'right_eye_4',
'right_eye_5',
'right_eye_6',
'left_eye_4',
'left_eye_3',
'left_eye_2',
'left_eye_1',
'left_eye_6',
'left_eye_5',
'mouth_1',
'mouth_2',
'mouth_3',
'mouth_4',
'mouth_5',
'mouth_6',
'mouth_7',
'mouth_8',
'mouth_9',
'mouth_10',
'mouth_11',
'mouth_12',
'lip_1',
'lip_2',
'lip_3',
'lip_4',
'lip_5',
'lip_6',
'lip_7',
'lip_8',
'right_eyeball',
'left_eyeball'
]
OPENPOSE_25_KEYPOINTS = [
'nose_openpose',
'neck_openpose', # 'upper_neck'
'right_shoulder_openpose',
'right_elbow_openpose',
'right_wrist_openpose',
'left_shoulder_openpose',
'left_elbow_openpose',
'left_wrist_openpose',
'pelvis_openpose', # 'mid_hip'
'right_hip_openpose',
'right_knee_openpose',
'right_ankle_openpose',
'left_hip_openpose',
'left_knee_openpose',
'left_ankle_openpose',
'right_eye_openpose',
'left_eye_openpose',
'right_ear_openpose',
'left_ear_openpose',
'left_bigtoe_openpose',
'left_smalltoe_openpose',
'left_heel_openpose',
'right_bigtoe_openpose',
'right_smalltoe_openpose',
'right_heel_openpose'
]
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
__all__ = ['SE_Block',
]
'''see: Squeeze-and-Excitation Networks'''
def SE_Block(sym, data, num_out, name):
if type(num_out) is tuple:
num_mid = (int(sum(num_out)/16), 0)
else:
num_mid = int(num_out/16)
# global pooling
out = sym.Pooling(data=data, pool_type='avg', kernel=(1, 1), global_pool=True, stride=(1, 1), name=('%s_pool' % name))
# fc1
out = sym.FullyConnected(data=out, num_hidden=num_mid, no_bias=False, name=('%s_fc1' % name))
out = sym.Activation(data=out, act_type='relu', name=('%s_relu' % name))
# fc2
out = sym.FullyConnected(data=out, num_hidden=num_out, no_bias=False, name=('%s_fc2' % name))
out = sym.Activation(data=out, act_type='sigmoid', name=('%s_sigmoid' % name))
# rescale
out = sym.expand_dims(out, axis=2, name=('%s_expend1' % name))
out = sym.expand_dims(out, axis=3, name=('%s_expend2' % name)) # add spatial dims back
output = sym.broadcast_mul(data, out, name=('%s_mul' % name))
return output
# import mxnet as mx
#
# def SE_Block(sym, data, num_out, name):
# if type(num_out) is tuple:
# num_mid = (int(sum(num_out) / 16), 0)
# else:
# num_mid = int(num_out / 16)
#
# # global pooling
# out = sym.Pooling(data=data, pool_type='avg', kernel=(1, 1), global_pool=True, stride=(1, 1),
# name=('%s_pool' % name))
#
# # fc1
# out = mx.sym.Convolution(data=out, num_filter=num_mid, kernel=(1,1), stride=(1,1), pad=(0,0), name=('%s_fc1' % name))
# out = sym.Activation(data=out, act_type='relu', name=('%s_relu' % name))
#
# # fc2
# out = mx.sym.Convolution(data=out, num_filter=num_out, kernel=(1,1), stride=(1,1), pad=(0,0), name=('%s_fc2' % name))
# out = sym.Activation(data=out, act_type='sigmoid', name=('%s_sigmoid' % name))
#
# # rescale
# # out = sym.expand_dims(out, axis=2, name=('%s_expend1' % name))
# # out = sym.expand_dims(out, axis=3, name=('%s_expend2' % name)) # add spatial dims back
# output = sym.broadcast_mul(data, out, name=('%s_mul' % name))
#
# return output
|
__title__ = "access-client"
__version__ = "0.0.1"
__summary__ = "Client for accessai solutions"
__uri__ = "http://accessai.co"
__author__ = "ConvexHull Technology"
__email__ = "[email protected]"
__license__ = "Apache 2.0"
__release__ = True
|
'''
Write code to remove duplicates from an unsorted linked list.
FOLLOW UP
How would you solve this problem if a temporary buffer is not allowed?
'''
class Node(object):
def __init__(self, data=None, next_node=None):
self.data = data
self.next_node = next_node
def get_data(self):
return self.data
def set_data(self, data):
self.data = data
def get_next_node(self):
return self.next_node
def set_next_node(self, next_node):
self.next_node = next_node
dup_data = dict()
class LinkedList(object):
def __init__(self, head=None):
self.head = head
def insert_node(self, data):
new_node = Node(data, self.head)
self.head = new_node
# print(data, " inserted!")
def traversal(self):
curr_node = self.head
while curr_node:
data_ = curr_node.get_data()
print(data_, end=" ")
curr_node = curr_node.get_next_node()
print("")
def build_dup_hashset(self):
global dup_data
curr_node = self.head
while curr_node:
data_ = curr_node.get_data()
if data_ in dup_data:
count = dup_data[data_]
dup_data[data_] = count + 1
else:
dup_data[data_] = 1
curr_node = curr_node.get_next_node()
def del_node(self, data):
curr_node = self.head
prev_node = None
while curr_node:
if curr_node.get_data() == data:
if prev_node:
prev_node.set_next_node(curr_node.get_next_node())
else:
self.head = curr_node.get_next_node()
print(data, " deleted!")
return
else:
prev_node = curr_node
curr_node = curr_node.get_next_node()
def del_dups(self):
global dup_data
self.build_dup_hashset()
for k, v in dup_data.items():
if v > 1:
for _ in range(v-1):
self.del_node(k)
dup_data[k] = v - 1
myLL = LinkedList()
print("Inserting nodes to linked list")
myLL.insert_node(10)
myLL.insert_node(20)
myLL.insert_node(50)
myLL.insert_node(30)
myLL.insert_node(20)
myLL.insert_node(50)
myLL.insert_node(50)
myLL.insert_node(20)
myLL.insert_node(10)
myLL.insert_node(60)
print("Traversing the linked list")
myLL.traversal()
print("Deleting duplicate data")
print(myLL.del_dups())
print("Traversing the de-duplicated linked list")
myLL.traversal()
|
# @lc app=leetcode id=2139 lang=python3
#
# [2139] Minimum Moves to Reach Target Score
#
# https://leetcode.com/problems/minimum-moves-to-reach-target-score/
# @lc code=start
class Solution:
def minMoves(self, target: int, maxDoubles: int) -> int:
steps = 0
while maxDoubles > 0 and target > 1:
if target % 2 == 0:
target /= 2
steps = steps + 1
maxDoubles -= 1
else:
target -= 1
steps = steps + 1
steps = steps + target - 1
return int(steps)
# @lc code=end
|
# Truncatable primes
def prime_test_list(numbers):
return all(prime_test(elem) for elem in numbers)
def prime_test(num):
try:
if num == 2:
return True
if num == 0 or num == 1 or num % 2 == 0:
return False
for i in range(3, int(num**(1/2))+1, 2):
if num % i == 0:
return False
else:
return True
except TypeError:
return all(prime_test(elem) for elem in num)
def make_truncated_list(integer):
len_str_int = len(str(integer))
return [int(integer/(10**i)) for i in range(len_str_int)] + [integer % (10**i) for i in range(1, len_str_int)]
counter = 11
truncatable_prime_numbers = []
while len(truncatable_prime_numbers) < 11:
if prime_test_list(make_truncated_list(counter)):
print("The number {} is prime_truncatable both ways.".format(counter))
truncatable_prime_numbers.append(counter)
counter += 2
print(truncatable_prime_numbers)
|
class RunnerMixin(object):
def add_artifacts(self, artifacts):
url = self._url('/runner/artifacts')
data = [{'filename': d} for d in artifacts]
return self._result(self.post(url, json={
'artifacts': data
}))
def add_logs(self, logs):
url = self._url('/runner/log')
return self._result(self.post(url, json={
'logs': logs
}))
def fetch_config(self):
url = self._url('/runner/config')
return self._result(self.get(url))
def get_runner_status(self):
url = self._url('/runner/status')
return self._result(self.get(url))
def set_runner_status(self, status, error=None):
url = self._url('/runner/status')
data = {
'status': status
}
if error:
data['error'] = error
return self._result(self.post(url, json=data)) |
# -*- encoding: utf-8 -*-
"""
@File : __init__.py.py
@Time : 2020/2/29 11:57 AM
@Author : zhengjiani
@Email : [email protected]
@Software: PyCharm
"""
|
def load(h):
return ({'abbr': 'a', 'code': 1, 'title': '70 332.5 40 10'},
{'abbr': 'b', 'code': 2, 'title': '72.5 0 50 45'},
{'abbr': 'c', 'code': 3, 'title': '57.5 345 32.5 17.5'},
{'abbr': 'd', 'code': 4, 'title': '57.5 2.5 32.5 42.5'},
{'abbr': 'e', 'code': 5, 'title': '75 340 30 45'},
{'abbr': 'f', 'code': 6, 'title': '60 310 40 0'},
{'abbr': 'm', 'code': 7, 'title': '66 354 30 42'},
{'abbr': 'm', 'code': 8, 'title': '66 -6 30 42'},
{'abbr': 'm', 'code': 9, 'title': '46 354 30 36'},
{'abbr': 'm', 'code': 10, 'title': '46 -6 30 36'},
{'abbr': 'm', 'code': 11, 'title': '46 -6 30 36.5'},
{'abbr': 'm', 'code': 12, 'title': '46 -6 35 17'},
{'abbr': 'm', 'code': 13, 'title': '46 12 40 20'},
{'abbr': 'm', 'code': 14, 'title': '81 262 9 42'},
{'abbr': 'm', 'code': 15, 'title': '81 -98 9 42'},
{'abbr': 'g', 'code': 16, 'title': '90 0 -90 359.5'},
{'abbr': 'g', 'code': 17, 'title': '81 0 -81 358.5'},
{'abbr': 'g', 'code': 18, 'title': '81 0 -81 359.5'},
{'abbr': 'g', 'code': 19, 'title': '90 0 -90 358.5'},
{'abbr': 'g', 'code': 20, 'title': '90 0 -90 357'},
{'abbr': 'g', 'code': 21, 'title': '90 0 -90 357.9'},
{'abbr': 'g', 'code': 22, 'title': '90 0 -90 359'},
{'abbr': 'g', 'code': 23, 'title': '81 0 -78 357'},
{'abbr': 'g', 'code': 24, 'title': '90 0 -90 357.5'},
{'abbr': 'g', 'code': 25, 'title': '90 0 -90 359.64'},
{'abbr': 's', 'code': 26, 'title': '-0.5 0 -81 359.5'},
{'abbr': 'n', 'code': 27, 'title': '81 0 0 359.5'},
{'abbr': 'b', 'code': 28, 'title': '66 9 40 42'},
{'abbr': 'm', 'code': 29, 'title': '44.5 -6 35 16'},
{'abbr': 'm', 'code': 30, 'title': '45 -6 35 14'},
{'abbr': 'm', 'code': 31, 'title': '45.976 12 40 19.956'},
{'abbr': 'g', 'code': 32, 'title': '89.731 0 -89.731 359.648'},
{'abbr': 'g', 'code': 33, 'title': '90 0 -90 358.5'},
{'abbr': 'n', 'code': 34, 'title': '90 0 30 359.9'},
{'abbr': 's', 'code': 35, 'title': '-30 0 -90 359.9'},
{'abbr': 'e', 'code': 36, 'title': '75 320 25 34.999'},
{'abbr': 't', 'code': 37, 'title': '30 260 0 359.9'},
{'abbr': 'u', 'code': 38, 'title': '30 100 0 220'},
{'abbr': 'g', 'code': 39, 'title': '0 0 0 0'})
|
def inv_lr_scheduler(param_lr, optimizer, iter_num, gamma=10,
power=0.75, init_lr=0.001,weight_decay=0.0005,
max_iter=10000):
#10000
"""Decay learning rate by a factor of 0.1 every lr_decay_epoch epochs."""
#max_iter = 10000
gamma = 10.0
lr = init_lr * (1 + gamma * min(1.0, iter_num / max_iter)) ** (-power)
i=0
for param_group in optimizer.param_groups:
param_group['lr'] = lr * param_lr[i]
i+=1
return lr
|
"""A module with errors used in the qtools3 package."""
class XlsformError(Exception):
pass
class ConvertError(Exception):
pass
class XformError(Exception):
pass
class QxmleditError(Exception):
pass
|
description = 'Devices for the first detector assembly'
pvpref = 'SQ:ZEBRA:mcu3:'
excludes = ['wagen2']
devices = dict(
nu = device('nicos_ess.devices.epics.motor.EpicsMotor',
description = 'Detector tilt',
motorpv = pvpref + 'A4T',
errormsgpv = pvpref + 'A4T-MsgTxt',
precision = 0.01,
),
detdist = device('nicos_ess.devices.epics.motor.EpicsMotor',
description = 'Detector distance',
motorpv = pvpref + 'W1DIST',
errormsgpv = pvpref + 'W1DIST-MsgTxt',
precision = 0.1,
),
ana = device('nicos.devices.generic.mono.Monochromator',
description = 'Dummy analyzer for TAS mode',
unit = 'meV'
),
)
|
class Person:
def __init__(self, fname, lname):
self.firstname = fname
self.lastname = lname
def printname(self):
print(self.firstname, self.lastname)
#Use the Person class to create an object, and then execute the printname method:
x = Person("John", "Doe")
x.printname()
|
name = "Maedeh Ashouri"
for i in name:
print(i) |
# Copyright (c) 2017-2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
__all__ = ["ConfigError", "ConfigWarning"]
class ConfigError(ValueError):
pass
class ConfigWarning(Warning):
pass
|
# coding: utf-8
# In[3]:
class RuleClass:
attributes = []
attributes_value = []
attributes_cover = []
decision_cover =[]
false_cover =[]
Decision = ''
def __cmp__(self, other):
if self.strength > other.strength:
return 1
elif self.strength < other.strength:
return -1
else:
return 0
def __init__(self,attributes,attributes_value,attributes_cover,decision,decision_cover,false_cover):
self.strength = len(decision_cover)
self.specificity=len(attributes)
self.support = self.strength*self.specificity
self.attributes=attributes
self.attributes_value=attributes_value
self.attributes_cover = attributes_cover
self.Decision=decision
self.decision_cover = decision_cover
self.false_cover= false_cover
self.conditionalProbablity=0
if(len(attributes_cover)!=0):
self.conditionalProbablity= self.strength/len(attributes_cover)
def print_rule(self):
print('Rule ')
print(attributes, attributes_value , decision_cover)
print('Strength ',self.strength)
print('Specificity ',self.specificity)
print('Support ',self.support)
prine('-----------------------------------------')
# In[2]:
|
'''
Module for parsing spatial and temporal premises.
Created on 16.07.2018
@author: Christian Breu <[email protected]>, Julia Mertesdorf <[email protected]>
'''
# Global variable enabling function-prints. Mainly used for debugging purposes.
PRINT_PARSING = False # global variable for whether to print parsing process or not
# GRAMMAR FOR SPATIAL PREMISES
GRAMMAR_SPATIAL = [[["NP-sing", "PRED"], "S 1", "s-prop"],
[["NP-sing", "NEGPRED"], "S 2", "sneg-sem"],
[["art-def", "N-sing"], "NP-sing 1", "npfun"],
[["rel"], "reln 1", "npfun"],
[["next-p", "to-p"], "reln 1", "npfun"],
[["in-p", "rel front-p", "of-p"], "reln 1", "npfun"],
[["in-p", "art-def", "front-p", "of-p"], "reln 1", "npfun"],
[["on-p", "art-def", "rel horiz", "of-p"], "reln 1", "npfun"],
[["on-p", "rel vert", "of-p"], "reln 1", "npfun"],
[["in-p", "art-def", "adj-same", "n-loc", "as-p"], "reln 1", "npfun"],
[["in-p", "art-indef", "adj-different", "n-loc", "to-p"], "reln 1", "npfun"],
[["in-p", "art-indef", "adj-different", "n-loc", "from-p"], "reln 1", "npfun"],
[["V-cop", "reln", "NP-sing"], "PRED 1", "pred"],
[["V-cop", "NEG", "reln", "NP-sing"], "NEGPRED 1", "neg-pred-sem"]]
# LEXICON FOR SPATIAL PREMISES
LEXICON_SPATIAL = [["a", ["art-indef", ["dummy"]]], ["the", ["art-def", ["dummy"]]],
["not", ["neg", ["neg-semantics"]]], ["of", ["of-p", ["dummy"]]],
["as", ["as-p", ["dummy"]]], ["is", ["V-cop", ["dummy"]]],
["in", ["in-p", ["dummy"]]],
["next", ["next-p", ["next-semantics"]]],
["to", ["to-p", ["dummy"]]], ["from", ["from-p", ["dummy"]]],
["on", ["on-p", ["dummy"]]], ["right", ["rel horiz", ["(1 0 0)"]]],
["left", ["rel horiz", ["(-1 0 0)"]]],
["front", ["rel front-p", ["(0 1 0)"]]],
["behind", ["rel", ["(0 -1 0)"]]], ["above", ["rel", ["(0 0 -1)"]]],
["top", ["rel vert", ["(0 0 -1)"]]], ["below", ["rel", ["(0 0 1)"]]],
["between", ["relat", ["between-semantics"]]],
["among", ["relat", ["among-semantics"]]],
["beside", ["relat", ["beside-semantics"]]],
["square", ["N-sing", ["[]"]]],
["triangle", ["N-sing", ["V"]]],
["circle", ["N-sing", ["O"]]], ["line", ["N-sing", ["I"]]],
["cross", ["N-sing", ["+"]]],
["ell", ["N-sing", ["L"]]], ["vee", ["N-sing", ["^"]]],
["star", ["N-sing", ["*"]]], ["ess", ["N-sing", ["S"]]]]
# GRAMMAR FOR TEMPORAL PREMISES
GRAMMAR_TEMPORAL = [[["NP-sing", "PRED"], "S 1", "s-prop"],
[["art-def", "N-sing"], "NP-sing 1", "npfun"],
[["N-sing"], "NP-sing 1", "npfun"],
[["rel"], "reln 1", "npfun"],
[["V-cop", "reln", "NP-sing"], "PRED 1", "pred"]]
# LEXICON FOR TEMPORAL PREMISES
LEXICON_TEMPORAL = [["the", ["art-def", ["dummy"]]],
["happens", ["V-cop", ["dummy"]]],
["is", ["V-cop", ["dummy"]]],
["after", ["rel", ["(1 0 0)"]]],
["before", ["rel", ["(-1 0 0)"]]],
["while", ["rel", ["(0 1 0)"]]],
["during", ["rel", ["(0 1 0)"]]]]
class Parser:
"""Class for parsing spatial or temporal premises. It can be set up for one
of the two premise types. Depending on this type, it uses a different grammar
and parsing algorithm.
"""
#global variables for parsing
sentence = [] # Currently parsed sentence (premise).
pstack = [] # Stack where all words and syntactic/semantic interpretations are put on.
rel_grammar = [] # the grammar that will be used for parsing
rel_lexicon = [] # the lexicon that will be used for parsing.
spatial = True # variable used to decide which reduces word should be used.
def __init__(self, spatial_parser=True):
"""constructor for the parser class. Takes a boolean argument which decides
what kind of premises the parser should be able to parse. If spatial is
set to False, the temporal grammar and lexicon will be used.
According to the boolean value, sets the correct grammar for the parsing.
"""
if spatial_parser:
self.rel_grammar = GRAMMAR_SPATIAL
self.rel_lexicon = LEXICON_SPATIAL
else:
self.rel_grammar = GRAMMAR_TEMPORAL
self.rel_lexicon = LEXICON_TEMPORAL
# keep the value as attribute within the class.
self.spatial = spatial_parser
def parse(self, premise):
"""
Function parses a given premise.
Works through the premise and replaces the words by their lexical category
and finally their semantics. Calls analyze to process words on the stack.
If that is not possible, shift to the next word of the premise and put it
on top of the stack. If that didn´t work either, use backtracking to go back
if nothing else worked (backtrack is never used apparently).
At the end of the function, return the parsed premise.
Example:
returns [[0, 1, 0], ['A'], ['B']] for the premise:
["the", A", "happens", "while", "the", "B"]
"""
# Initialize all global variables for the parsing process
self.sentence = premise
gram = self.rel_grammar
lex = self.rel_lexicon
self.pstack = []
anything_worked = True
while anything_worked:
if PRINT_PARSING:
print("-------------------------LOOP--------------------------")
print("pStack contains: ", self.pstack, "\n", "Rest of phrase is: ", self.sentence)
anything_worked = False
if (not self.sentence) and (len(self.pstack) >= 1) and (self.pstack[0][0] == "S"):
return self.pstack[0][1]
# Always try to analyze first.
if self.analyze(gram, lex, None):
anything_worked = True # Continue Parsing
# If analyze didn´t work, try to shift to the next word (works if sentence not empty).
elif self.shift():
anything_worked = True # Continue Parsing
print("Parsing process fails!")
return None
def analyze(self, gram, lex, lhs):
"""
The function first trys to analyze the word on top of the pstack by replaccing it's
lexical category through passing it to reduce word. If this doesn t work,
the function trys to reduce the syntax to their respective semantics by
calling reduces_syntax instead.
If the word reduction worked, the result of it is added to the history, else
return None.
"""
if not self.pstack:
if PRINT_PARSING:
print("Analyze doesn´t work - stack is emtpy")
return None
if self.spatial:
tmp = self.reduces_word_spatial(lex, lhs)# Contains word, if it exists.
else:
tmp = self.reduces_word_temporal(lex, lhs)# Contains word, if it exists.
if tmp != None:
if PRINT_PARSING:
print("ANALYZE WORKED WITH WORD REDUCTION")
return True
else:
tmp = self.reduces_syntax(gram, lhs)
if tmp != None:
if PRINT_PARSING:
print("ANALYZE WORKED WITH SYNTAX REDUCTION")
return True
return None
def reduces_word_temporal(self, lexicon, lhs):
"""
This function checks if the word at the top of the current stack has an entry in the
lexicon. If so, retrieves the lexical category of the current word.
Returns the lexical category of the word, or manually assigns the lexical category
"n-sing" to it via function-call of "check_var", in case the word is not in the lexicon.
The current word is removed from the stack and the list containing the word and the found
lexical category (or the manually assigned category) is inserted at the top of the stack.
Return the list of the current word and its lexical category.
"""
if not self.pstack:
return None
pstack_at0 = self.pstack[0]
len_pstack_at0 = 1
if isinstance(pstack_at0, list):
len_pstack_at0 = len(pstack_at0)
if len_pstack_at0 == 2:
if PRINT_PARSING:
print("+++++++++++++++++++++REDUCE WORD FAILED+++++++++++++++++")
return None
tmp = self.lexical_category(self.pstack[0], lexicon, lhs)
# Found lexical category! Add it to the top of the pstack.
if tmp != None:
if PRINT_PARSING:
print("REDUCE WORD", self.pstack[0], " - lexical category is", tmp)
tmp2 = [tmp[0], tmp[1]]
self.pstack = self.pstack[1:]
self.pstack.insert(0, tmp2)
return tmp2
# Couldn´t find lexical category! Manually assign "n-sing" to it, add to pstack and return.
tmp3 = self.check_var(self.pstack[0])
self.pstack = self.pstack[1:]
self.pstack.insert(0, tmp3)
if PRINT_PARSING:
print("REDUCE WORD", self.pstack[0], "-- lexical category is", tmp3)
return tmp3
def reduces_word_spatial(self, lexicon, lhs):
"""OK [6]
this function checks if the top of the current stack has an entry in the
lexicon. If so, retrieves the lexical category for the word.
The top of the stack gets sliced of and a list containing the two
elements of the lexical category are put on the stack.
Returns the lexical category or None, if the word is not in the lexicon.
Example:
top of the pstack is "in". The method will replace this by
["in-p", ["dummy"]] at the top of the pstack.
"""
if not self.pstack:
return None#or False
#print(self.pstack)
if self.word(self.pstack[0], lexicon):
tmp = self.lexical_category(self.pstack[0], lexicon, lhs)
#print("lexical cat:", tmp)
if tmp != None: # if top of stack is word in lexicon
# only use first two entries(if there are more)
new_tmp = [tmp[0], tmp[1]]
# print("new tmp:", new_tmp)
self.pstack = self.pstack[1:]
self.pstack.insert(0, new_tmp)
# print("reduces_word worked", new_tmp)
return new_tmp
return None
@staticmethod
def word(item, lex):
"""only used for spatial parsing.
Takes an item and a lexicon as a list. Returns true, if the item has an
entry in the lexicon, else returns false.
"""
if not lex:
return False
#iterate over the lexicon and check if something is matchinf with item
for entry in lex:
if entry[0] == item:#entry is another list
return True
return False
@staticmethod
def check_var(list_item):
"""only used for temporal parsing
Function manually assigns the current observed listItem the lexical category "n-sing"
and returns the list of both the item and the category to the function "reduces_word".
"""
var_list = ["N-sing", [list_item]]
return var_list
def reduces_syntax(self, gram, lhs):
"""
The function calls strip_semantic, rule_list and syntax_rule and with that,
gets the first applicable syntax rule which fits the information on the stack.
If there is no such rule, "none" is returned, otherwise the function calls compose
with the found rule. This way the semantic elemts of the pstack will be
further processed with each step.
"""
if PRINT_PARSING:
print("TRY Reduce Syntax with gram", gram, "and lhs", lhs)
stripped_semantic = self.strip_semantic(self.pstack)
appl_rules = self.rule_list(stripped_semantic, gram)
current_rule = self.syntax_rule(appl_rules, lhs, gram) # Usually returns first rule of list
if current_rule != None:
if PRINT_PARSING:
print("REDUCES SYNTAX WORKED")
return self.compose(current_rule, gram)
else:
if PRINT_PARSING:
print("+++++++++++++++++++++++++REDUCES SYNTAX FAILED+++++++++++++++++++++++++++")
return None
def compose(self, rule, gram):
"""
This function first computes new_syntax (the new syntax which can be
applied to the stack, f.i. art-def & N-Sing = NP-sing), new_sem (the
corresponding semantics to that rule) and the arguments (contains the
semantic of all words which are relevant for the rule).
Depending from the value of new_sem, it calls a specific function to
get a certain part of these arguments. new_sem is then replaced by the
outcome of its function which was called.
The result of the composition (new syntax and new semantics) is then
placed on top of the pstack, while all old symbols on the pstack which
were used for the syntax reduction are deleted.
The function then returns the complete lhs of the rule and the new
semantics.
"""
if PRINT_PARSING:
print("\n COMPOSE with rule: ", rule)
new_syntax = self.lhs_of_rule(rule, gram).split()[0] # removes the " 1"
new_sem = self.sem_of_rule(rule)
reversed_rhs = list(reversed(self.rhs_of_rule(rule)))
arguments = self.args_from_stack(reversed_rhs, self.pstack)
if PRINT_PARSING:
print("New_syntax:", new_syntax, " new_semantic:", new_sem, " Arguments:", arguments)
# Call function with name given in new_sem & replace new_sem by outcome of called function
if new_sem == "npfun":
new_sem = self.npfun(arguments) # Returns first non-dummy function from arguments.
if PRINT_PARSING:
print("new_sem after npfun:", new_sem)
elif new_sem == "pred":
new_sem = self.pred(arguments) # Shifts argument behind relation.
if PRINT_PARSING:
print("new_sem after pred:", new_sem)
elif new_sem == "s-prop":
new_sem = self.s_proposition(arguments) # Assembles premise to desired pattern.
if PRINT_PARSING:
print("new_sem after s_propositon", new_sem)
self.pstack = self.chop(self.rhs_of_rule(rule), self.pstack)
self.pstack.insert(0, [new_syntax, new_sem])
return [self.lhs_of_rule(rule, gram), new_sem]
@staticmethod
def args_from_stack(rev_rhs, stack):
"""
Function takes the reversed right hand side of a rule and the stack as input and iterates
over them. It returns a list of all the corresponding semantic part of elements of
the stack that match an element of the reversed rhs (at correct position)
Example:
rev_rhs is of the form [a, b] and stack of the form [[a, 1], [b, 2]]
(1 and 2 can be lists as well); a and b are phrase functions like
"N-sing", "art-def" (and 1 & 2 the semantics).
The function appends the second element of each list of the stack, which fits the
element of rev_rhs (a and b) to a list and returns this list when the iteration ends
([1, 2] is returned).
"""
if PRINT_PARSING:
print("ARGS_FROM_STACK: rev rhs is: ", rev_rhs, " stack is ", stack)
if not rev_rhs:
if PRINT_PARSING:
print("no rev rhs, stop args_from_stack")
return None
result = []
for count, obj in enumerate(rev_rhs):
if obj == stack[count][0]:
result.append(stack[count][1])
return result
def shift(self):
"""
Adds the current(first) word of the sentence to the pstack and history,
then deletes it from the sentence. Returns True.
If the sentence is empty, returns None.
"""
if PRINT_PARSING:
print("--------------------------SHIFT to the next word------------------------")
if not self.sentence:
return None
self.pstack = [self.sentence[0]] + self.pstack
self.sentence = self.sentence[1:]
return True
def lexical_category(self, item, lexicon, lc_):
"""
Returns the lexical category of a given word, if the word is in the
lexicon. Iterates through the lexicon and checks, if the item equals
a word in the lexicon and returns the entry for that word in the
lexicon.
"""
for lex_entry in lexicon:
if item == lex_entry[0]:
return self.legalcategory(lc_, lex_entry[1])#the next item after lc in lex_entry[1]
if PRINT_PARSING:
print("symbol not in lexicon")
return None #if item is not found
def legalcategory(self, lc_, lis):
"""
Takes a list of a lexical category from the lexicon and an argument lc_.
Returns the next item in the list after lc_. This way there can be more
lexical categories than one. If lc_ is None, returns the whole lexical
category.
"""
if not lc_:
return lis
print("lc is not empty!")
return self.member_lis(lc_, lis)[0] #the next item after lc in lis
def syntax_rule(self, rules_list, lhs, gram):
"""
Returns the rhs to a given lhs in the rule_list. If lhs is None, returns
the first elemt of the given rule_list. In parsing the method is only
called with lhs = None.
If lhs is not None, returns the complete grammatical rule after the item
that matches with lhs.
"""
if not rules_list:
return None
if lhs is None: # Lhs usually none in parsing
return rules_list[0]
list_p1 = self.expand(lhs, gram)
list_p2 = self.rule_semantics(lhs, gram)
list_complete = list_p1+lhs+list_p2
return self.member_lis(list_complete, rules_list)[0]
def rule_list(self, syn_stack, gram):
"""
Function iterates trough all grammar rules and searches for matches between the
stack and the grammar rules. All rules that fit the information on the stack
are added to the result list.
"""
if not gram:
return None
result_list = []
for gra in gram:
if self.match_rule(list(reversed(self.rhs_of_rule(gra))), syn_stack):
result_list.append(gra)
return result_list
@staticmethod
def match_rule(reversed_rule, syn_stack):
"""
Function returns True if a given rule (which is reversed before) matches
the information on top of the stack. Returns False otherwise.
"""
if((not syn_stack) or (len(syn_stack) < len(reversed_rule))):
return False
for counter, value in enumerate(reversed_rule): # Iterate over the reversed rule
if value != syn_stack[counter]:
return False # The rules don't match!
return True # No differences found between the two rules, hence the loop went through
@staticmethod
def member_lis(list1, list2):
"""
Function takes two lists. Iterates over list2 and returns the remainder of list2
if list1 equals the current element of list2.
If list2 is None or list1 couldn´t be found, return False.
"""
if PRINT_PARSING:
print("member lis - list1 is ", list1, "list2 is ", list2)
if list2 is None:
return False
for count, value in enumerate(list2):
if list1 == value:
return list2[count+1:]#return the rest of list2
return False
@staticmethod
def chop(list1, stack):
"""
Function returns the stack minus the same number of items as in list1.
"""
return stack[len(list1):] # Deletes the first len(list) elements from stack.
def expand(self, lhs, gram):
"""
For a given left hand side of a rule, find the matching right
hand side of this rule in the grammar, and return it.
Return False if there is no fitting right hand side.
"""
for count, value in enumerate(gram):
if lhs == self.lhs_of_rule(value, gram[count:]):
return self.rhs_of_rule(value)
if PRINT_PARSING:
print("reduction not in grammar")
return False
@staticmethod
def strip_semantic(stack):
"""
Iterates through the stack and returns all the syntactic elements of the
stack(the first item in the lists from the stack).
"""
result = []
for item in stack:
result.append(item[0])
return result
def lhs_of_rule(self, rule, gram):
"""
Function takes a rule and the grammar and checks, whether this rule is contained
in the grammar. If that´s the case, it returns the left-hand-side of the rule.
"""
if rule == self.rule_in_grammar(rule, gram):
return rule[1]
return None
@staticmethod
def rule_in_grammar(rule, grammar):
"""
Function taks a rule and the grammar and searches for this rule in the grammar.
If the rule is found, it is returned, otherwise the function returns "None".
"""
for gram in grammar:
if rule == gram:
return rule
print("rule not in grammar")
return None
@staticmethod
def rhs_of_rule(rule):
"""
Returns the right hand side of the given rule.
"""
return rule[0]
@staticmethod
def sem_of_rule(rule):
"""
Function returns the semantics of the rule, which is the 3rd element of the list.
"""
return rule[2]
def rule_semantics(self, lhs, gram):
"""
Function takes an lhs of a rule, retrieves the whole rule and then returns the
semantics of the rule (third item of the rhs).
"""
return self.sem_of_rule(self.expand(lhs, gram))
@staticmethod
def s_proposition(list1):
"""
This function assembles relation, and the two arguments in list, for
instance [[1, 0, 0], ["A"], ["B"]].
This is The final pattern of the parsed premise that will be returned.
"""
return [list1[0][0][0], list1[1], list1[0][1][0]]
@staticmethod
def npfun(list1):
"""
Function returns the first item that is not "dummy" from a given list.
"""
if list1 is None:
return None
for list_item in list1:
if list_item[0] != "dummy": # List items are lists, so look at the first element
return list_item
return None
@staticmethod
def pred(list1):
"""
Shifts argument in list behind the relation.
"""
return [list1[1], [list1[0]]]
|
def convert_lambda_to_def(string):
args=string[string.index("lambda ")+7:string.index(":")]
name=string[:string.index(" ")]
cal=string[string.index(":")+2:]
return f"def {name}({args}):\n return {cal}" |
# -*- coding: utf-8 -*-
class SigfoxBaseException(BaseException):
pass
class SigfoxConnectionError(SigfoxBaseException):
pass
class SigfoxBadStatusError(SigfoxBaseException):
pass
class SigfoxResponseError(SigfoxBaseException):
pass
class SigfoxTooManyRequestsError(SigfoxBaseException):
pass
|
#63 Escreva um programa que leia um número N inteiro qualquer e mostre na tela os N primeiros elementos de uma Sequência de Fibonacci.
termos = int(input('Quantos termos você quer mostrar? '))
termo1 = 0
termo2 = termo3 = 1
atual = termos
while termos > 0:
if termos == 2:
print(f'{termo1} → {termo2}', end='→')
elif termos == 1:
print(f'{termo1}', end ='→')
else:
termo3 = termo1 + termo2
print(termo3, end=' → ')
atual -= 1
termo1 = termo2
termo2 = termo3
print('FIM')
|
#To find factorial of number
num = int(input('N='))
factorial = 1
if num<0:
print('Number is not accepted')
elif num==0:
print(1)
else:
for i in range(1,num+1):
factorial = factorial * i
print(factorial)
|
income = float(input())
gross_pay = income
taxes_owed = income * .12
net_pay = gross_pay - taxes_owed
print(gross_pay)
print(taxes_owed)
print(net_pay) |
'''Plotting utility functions'''
def remove_top_right_borders(ax):
'''Remove top and right borders from Matplotlib axis'''
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
|
first_wire = ['R8', 'U5', 'L5', 'D3']
second_wire = ['U7', 'R6', 'D4', 'L4']
def wire_path(wire):
path = {}
x = 0
y = 0
count = 0
dirs = {"R": 1,
"L": -1,
"U": 1,
"D": -1}
for i in wire:
dir = i[0]
mov = int(i[1:])
for _ in range(mov):
count += 1
if dir in "RL":
x += dirs[dir]
else:
y += dirs[dir]
path[(x, y)] = count
return path
def part1(first_wire, second_wire):
path_first = wire_path(first_wire)
path_second = wire_path(second_wire)
inter = [abs(i[0]) + abs(i[1]) for i in path_first.keys() if i in path_second]
return min(inter)
def part2(first_wire, second_wire):
path_first = wire_path(first_wire)
path_second = wire_path(second_wire)
inter = [path_first[i] + path_second[i] for i in path_first.keys() if i in path_second]
return min(inter)
# Test
first_wire = ['R8', 'U5', 'L5', 'D3']
second_wire = ['U7', 'R6', 'D4', 'L4']
assert (part1(first_wire, second_wire) == 6)
assert (part2(first_wire, second_wire) == 30)
with open("input.txt", 'r') as file:
first_wire = [i for i in file.readline().split(',')]
second_wire = [i for i in file.readline().split(',')]
print(part1(first_wire, second_wire))
print(part2(first_wire, second_wire))
|
class Solution:
def calculate(self, s: str) -> int:
stack = [1]
sign = 1
res = 0
i = 0
while i < len(s):
if s[i].isdigit():
val = 0
while i < len(s) and s[i].isdigit():
val = val * 10 + int(s[i])
i += 1
res += val * sign
else:
if s[i] == "+":
sign = stack[-1]
elif s[i] == "-":
sign = -stack[-1]
elif s[i] == "(":
stack.append(sign)
elif s[i] == ")":
stack.pop()
elif s[i] == " ":
pass
else:
raise ValueError("Unexpected character")
i += 1
return res
|
class Astronaut:
def __init__(self, name, age=30, agency='NASA'):
self.name = name
self.agency = agency
self.age = age
jose = Astronaut(name='José Jiménez')
ivan = Astronaut(name='Иван Иванович', agency='Roscosmos')
jose.agency # NASA
ivan.agency # Roscosmos
|
class Calculator:
def __init__(self, ss, am, fsp, sc, isp, bc, cgtr):
self.ss = ss
self.am = int(am)
self.fsp = float(fsp)
self.sc = float(sc)
self.isp = float(isp)
self.bc = float(bc)
self.cgtr = float(cgtr)
def get_pc(self):
pc = self.am * self.fsp
return pc
def get_cost(self):
proceeds = self.get_pc()
commission = self.bc + self.sc
price = self.am * self.isp
tax = (self.cgtr / 100) * (proceeds - commission - price)
costs = price + commission + tax
return costs
def get_net_profit(self):
profit = self.get_pc() - self.get_cost()
return profit
def get_roi(self):
roi = self.get_net_profit() / self.get_cost()
return "{:.2%}".format(roi)
def get_expect_fsp(self):
commission = self.bc + self.sc
expect_fsp = commission / self.am + self.isp
return expect_fsp
def get_purchase_price(self):
return self.am * self.isp
def get_tax(self):
tax = self.cgtr/100 * (self.get_pc() - self.bc - self.sc - (self.am * self.isp))
return tax |
'''
Faça um Programa que peça as quatro notas de 10 alunos, calcule e
armazene num vetor a média de cada aluno,
imprima o número de alunos com média maior ou igual a 7.0.
'''
medias = []
for x in range(1, 11):
soma, media = 0, 0
for y in range(1, 5):
n = float(input(f'Digite sua {y}ª notado {x}º Aluno: '))
soma += n
media = soma / 4
print('='*30)
medias.append(media)
aluno = cont = 0
for media in medias:
if media >= 7:
aluno += 1
if media < 7:
cont += 1
print(f'Número de alunos com média maior ou igual a 7: {aluno}')
print(f'Número de alunos com média abaixo de 7: {cont}') |
"""igcollect - Library Entry Point
Copyright (c) 2018 InnoGames GmbH
"""
|
# -*- coding: utf-8 -*-
"""The Windows Registry definitions."""
KEY_PATH_SEPARATOR = '\\'
# The Registry value types.
REG_NONE = 0
REG_SZ = 1
REG_EXPAND_SZ = 2
REG_BINARY = 3
REG_DWORD = 4
REG_DWORD_LITTLE_ENDIAN = 4
REG_DWORD_LE = REG_DWORD_LITTLE_ENDIAN
REG_DWORD_BIG_ENDIAN = 5
REG_DWORD_BE = REG_DWORD_BIG_ENDIAN
REG_LINK = 6
REG_MULTI_SZ = 7
REG_RESOURCE_LIST = 8
REG_FULL_RESOURCE_DESCRIPTOR = 9
REG_RESOURCE_REQUIREMENTS_LIST = 10
REG_QWORD = 11
# Kept for backwards compatibility for now.
REG_RESOURCE_REQUIREMENT_LIST = REG_RESOURCE_REQUIREMENTS_LIST
INTEGER_VALUE_TYPES = frozenset([REG_DWORD, REG_DWORD_BIG_ENDIAN, REG_QWORD])
|
# V0
# V1
# https://blog.csdn.net/fuxuemingzhu/article/details/79343638
# IDEA : DFS
class Solution(object):
def combinationSum2(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
candidates.sort()
print(candidates)
res = []
self.dfs(candidates, target, 0, res, [])
return res
def dfs(self, nums, target, index, res, path):
if target < 0:
return
elif target == 0:
res.append(path)
return
for i in range(index, len(nums)):
if i > index and nums[i] == nums[i-1]:
continue
self.dfs(nums, target - nums[i], i + 1, res, path + [nums[i]])
# V1'
# IDEA : BACKTRACKING
# V2
# Time: O(k * C(n, k))
# Space: O(k)
class Solution(object):
# @param candidates, a list of integers
# @param target, integer
# @return a list of lists of integers
def combinationSum2(self, candidates, target):
result = []
self.combinationSumRecu(sorted(candidates), result, 0, [], target)
return result
def combinationSumRecu(self, candidates, result, start, intermediate, target):
if target == 0:
result.append(list(intermediate))
prev = 0
while start < len(candidates) and candidates[start] <= target:
if prev != candidates[start]:
intermediate.append(candidates[start])
self.combinationSumRecu(candidates, result, start + 1, intermediate, target - candidates[start])
intermediate.pop()
prev = candidates[start]
start += 1 |
'''
https://leetcode.com/problems/maximum-length-of-pair-chain/solution/
You are given n pairs of numbers. In every pair, the first number is always smaller than the second number.
Now, we define a pair (c, d) can follow another pair (a, b) if and only if b < c. Chain of pairs can be formed in this fashion.
Given a set of pairs, find the length longest chain which can be formed. You needn't use up all the given pairs. You can select pairs in any order.
Example 1:
Input: [[1,2], [2,3], [3,4]]
Output: 2
Explanation: The longest chain is [1,2] -> [3,4]
Note:
The number of given pairs will be in the range [1, 1000]
'''
class Solution:
def findLongestChain(self, pairs: List[List[int]]) -> int:
intervals = sorted(pairs, key=lambda v: v[1])
lastInterval = intervals.pop(0)
chainLen = 1
for interval in intervals:
s, e = interval[0], interval[1]
if s > lastInterval[1]:
lastInterval = interval
chainLen = chainLen + 1
return chainLen
|
class Error(Exception):
"""Base class for exceptions in this module."""
pass
class RobotError(Error):
"""Exception raised for robot detection (solvable).
Attributes:
message -- explanation of the error
"""
def __init__(self):
self.message = "Robot Check Detected."
class AQError(Error):
"""Exception raised for auto-query detection.
Attributes:
message -- explanation of the error.
"""
def __init__(self):
self.message = "Automated Queries Check Detected."
class SearchError(Error):
"""Exception raised for empty search results.
Attributes:
message -- explanation of the error.
"""
def __init__(self):
self.message = "No search results."
class GScholarError(Error):
"""Exception raised for auto-query detection.
Attributes:
message -- explanation of the error (non-solvable).
"""
def __init__(self):
self.message = "No more alternative addresses. Restart this program." |
lilly_dict = {"name": "Lilly",
"age": 18,
"pets": False,
"hair_color": 'Black'}
class Person(object):
def __init__(self, name, age, pets, hair_color):
self.name = name
self.age = age
self.pets = pets
self.hair_color = hair_color
self.hungry = True
def eat(self, food):
print('I am eating ' + food)
self.hungry = False
def __str__(self):
return 'Name: ' + self.name
lilly = Person(
name = "Lilly",
age = 18,
pets = False,
hair_color = "Black")
david = Person(
name = "David",
age = 16,
pets = False,
hair_color = "Black")
print(lilly.name)
print(lilly.hungry)
print(lilly.eat("banana"))
print(david.name)
|
"""
Exercise 13 - Inventory Management
"""
def create_inventory (items):
"""
:param items: list - list of items to create an inventory from.
:return: dict - the inventory dictionary.
"""
return add_items ({}, items)
def add_items (inventory, items):
"""
:param inventory: dict - dictionary of existing inventory.
:param items: list - list of items to update the inventory with.
:return: dict - the inventory dictionary update with the new items.
"""
return add_or_decrement_items (inventory, items, 'add')
def decrement_items (inventory, items):
"""
:param inventory: dict - inventory dictionary.
:param items: list - list of items to decrement from the inventory.
:return: dict - updated inventory dictionary with items decremented.
"""
return add_or_decrement_items (inventory, items, 'minus')
def remove_item (inventory, item):
"""
:param inventory: dict - inventory dictionary.
:param item: str - item to remove from the inventory.
:return: dict - updated inventory dictionary with item removed.
"""
if item not in inventory:
return inventory
inventory.pop (item)
return inventory
def list_inventory (inventory):
"""
:param inventory: dict - an inventory dictionary.
:return: list of tuples - list of key, value pairs from the inventory dictionary.
"""
result = []
for element, quantity in inventory.items():
if quantity > 0:
result.append ((element, quantity))
return result
def add_or_decrement_items (inventory, items, operation):
"""
:param inventory: dict - dictionary of existing inventory.
:param items: list - list of items to update the inventory with.
:param operation: string - 'add' or 'minus'.
:return: dict - the inventory dictionary update with the new items.
"""
for item in items:
if not item in inventory:
if operation == 'add':
inventory[item] = 1
else:
if operation == 'add':
inventory[item] += 1
else:
if inventory[item] > 0:
inventory[item] -= 1
return inventory |
startMsg = 'counting...'
endMsg = 'launched !'
count = 10
print (startMsg)
while count >= 0 :
print (count)
count -= 1
print (endMsg) |
# 1. 两数之和
class Solution:
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
# n=len(nums)
# for i in range(n):
# result=target-nums[i]
# for j in range(i+1,n):
# if result==nums[j]:
# return [i,j]
dicts={}
for i,item in enumerate(nums):
if item in dicts:
return [dicts[item],i]
else:
dicts[target-item]=i |
def multiple_letter_count(string):
return {letter: string.count(letter) for letter in string}
print(multiple_letter_count('awesome'))
|
animals = ['bear', 'python', 'peacock', 'kangaroo', 'whale', 'platypus']
print("The animal at 1. ", animals[1])
print("The third (3rd) animal. ", animals[3-1])#This is the n - 1 trick
print("The first (1st) animal. ", animals[0])
print("The animal at 3. ", animals[3])
print("The fifth (5th) animal. ", animals[4])
print("The animal at 2. ", animals[2])
print("The sixth (6th) animal. ", animals[5])
print("The animal at 4. ", animals[4])
#Study Drill 2: Here is another list to play with
hobbies = ['Running', 'Programming', 'Flying', 'Gaming', 'Sleeping']
|
"""
Created on 八月 03 2017
@author: [email protected]
"""
# -*- coding: utf-8 -*-
# 已知数组python列表a = [99,66,25,10,3],并且是已经排序过的。现在要求,将a数组的元素逆向排序
a = [99,66,25,10,3]
if __name__ == "__main__":
N = len(a)
print(a)
print(len(a)/2)
for i in range(len(a)//2):
a[i],a[N-i-1] = a[N-i-1],a[i]
print(a)
|
"""
Pattern matching with mapping—requires Python ≥ 3.10
# tag::DICT_MATCH_TEST[]
>>> b1 = dict(api=1, author='Douglas Hofstadter',
... type='book', title='Gödel, Escher, Bach')
>>> get_creators(b1)
['Douglas Hofstadter']
>>> from collections import OrderedDict
>>> b2 = OrderedDict(api=2, type='book',
... title='Python in a Nutshell',
... authors='Martelli Ravenscroft Holden'.split())
>>> get_creators(b2)
['Martelli', 'Ravenscroft', 'Holden']
>>> get_creators({'type': 'book', 'pages': 770})
Traceback (most recent call last):
...
ValueError: Invalid 'book' record: {'type': 'book', 'pages': 770}
>>> get_creators('Spam, spam, spam')
Traceback (most recent call last):
...
ValueError: Invalid record: 'Spam, spam, spam'
# end::DICT_MATCH_TEST[]
"""
# tag::DICT_MATCH[]
def get_creators(record: dict) -> list:
match record:
case {'type': 'book', 'api': 2, 'authors': [*names]}: # <1>
return names
case {'type': 'book', 'api': 1, 'author': name}: # <2>
return [name]
case {'type': 'book'}: # <3>
raise ValueError(f"Invalid 'book' record: {record!r}")
case {'type': 'movie', 'director': name}: # <4>
return [name]
case _: # <5>
raise ValueError(f'Invalid record: {record!r}')
# end::DICT_MATCH[]
|
"""Faça um programa que leia um número qualquer e mostre o seu fatorial.
Exemplo: 5! = 5 x 4 x 3 x 2 x 1 = 120"""
n = int(input('Digite um número para calcular seu fatorial: '))
c = n
f = 1
while c > 0:
print('{}'.format(c), end='')
print(' x ' if c > 1 else ' = ', end= '')
f = f * c
c-= 1
print('{}'.format(f)) |
__author__ = "Douglas Lassance"
__copyright__ = "2020, Douglas Lassance"
__email__ = "[email protected]"
__license__ = "MIT"
__version__ = "0.1.0.dev5"
|
# Copyright (c) 2019 The DAML Authors. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
load("//bazel_tools:pkg.bzl", "pkg_tar")
# taken from rules_proto:
# https://github.com/stackb/rules_proto/blob/f5d6eea6a4528bef3c1d3a44d486b51a214d61c2/compile.bzl#L369-L393
def get_plugin_runfiles(tool, plugin_runfiles):
"""Gather runfiles for a plugin.
"""
files = []
if not tool:
return files
info = tool[DefaultInfo]
if not info:
return files
if info.files:
files += info.files.to_list()
if info.default_runfiles:
runfiles = info.default_runfiles
if runfiles.files:
files += runfiles.files.to_list()
if info.data_runfiles:
runfiles = info.data_runfiles
if runfiles.files:
files += runfiles.files.to_list()
if plugin_runfiles:
for target in plugin_runfiles:
files += target.files.to_list()
return files
def _proto_gen_impl(ctx):
src_descs = [src.proto.direct_descriptor_set for src in ctx.attr.srcs]
dep_descs = [dep.proto.direct_descriptor_set for dep in ctx.attr.deps]
descriptors = src_descs + dep_descs
sources_out = ctx.actions.declare_directory(ctx.attr.name + "-sources")
descriptor_set_delim = "\;" if _is_windows(ctx) else ":"
args = []
args += [
"--descriptor_set_in=" + descriptor_set_delim.join([d.path for d in descriptors]),
]
args += [
"--{}_out={}:{}".format(ctx.attr.plugin_name, ",".join(ctx.attr.plugin_options), sources_out.path),
]
plugins = []
plugin_runfiles = []
if ctx.attr.plugin_name not in ["java", "python"]:
plugins = [ctx.executable.plugin_exec]
plugin_runfiles = get_plugin_runfiles(ctx.attr.plugin_exec, ctx.attr.plugin_runfiles)
args += [
"--plugin=protoc-gen-{}={}".format(ctx.attr.plugin_name, ctx.executable.plugin_exec.path),
]
inputs = []
for src in ctx.attr.srcs:
src_root = src.proto.proto_source_root
for direct_source in src.proto.direct_sources:
path = ""
# in some cases the paths of src_root and direct_source are only partially
# overlapping. the following for loop finds the maximum overlap of these two paths
for i in range(len(src_root) + 1):
if direct_source.path.startswith(src_root[-i:]):
path = direct_source.path[i:]
else:
# this noop is needed to make bazel happy
noop = ""
path = direct_source.short_path if not path else path
path = path[1:] if path.startswith("/") else path
inputs += [path]
args += inputs
ctx.actions.run_shell(
mnemonic = "ProtoGen",
outputs = [sources_out],
inputs = descriptors + [ctx.executable.protoc] + plugin_runfiles,
command = "mkdir -p " + sources_out.path + " && " + ctx.executable.protoc.path + " " + " ".join(args),
tools = plugins,
use_default_shell_env = True,
)
# since we only have the output directory of the protoc compilation,
# we need to find all the files below sources_out and add them to the zipper args file
zipper_args_file = ctx.actions.declare_file(ctx.label.name + ".zipper_args")
ctx.actions.run_shell(
mnemonic = "CreateZipperArgsFile",
outputs = [zipper_args_file],
inputs = [sources_out],
command = "find -L {src_path} -type f | sed -E 's#^{src_path}/(.*)$#\\1={src_path}/\\1#' | sort > {args_file}".format(
src_path = sources_out.path,
args_file = zipper_args_file.path,
),
progress_message = "zipper_args_file %s" % zipper_args_file.path,
use_default_shell_env = True,
)
# Call zipper to create srcjar
zipper_args = ctx.actions.args()
zipper_args.add("c")
zipper_args.add(ctx.outputs.out.path)
zipper_args.add("@%s" % zipper_args_file.path)
ctx.actions.run(
executable = ctx.executable._zipper,
inputs = [sources_out, zipper_args_file],
outputs = [ctx.outputs.out],
arguments = [zipper_args],
progress_message = "srcjar %s" % ctx.outputs.out.short_path,
)
proto_gen = rule(
implementation = _proto_gen_impl,
attrs = {
"srcs": attr.label_list(allow_files = True),
"deps": attr.label_list(providers = [ProtoInfo]),
"plugin_name": attr.string(),
"plugin_exec": attr.label(
cfg = "host",
executable = True,
),
"plugin_options": attr.string_list(),
"plugin_runfiles": attr.label_list(
default = [],
allow_files = True,
),
"protoc": attr.label(
default = Label("@com_google_protobuf//:protoc"),
cfg = "host",
allow_files = True,
executable = True,
),
"_zipper": attr.label(
default = Label("@bazel_tools//tools/zip:zipper"),
cfg = "host",
executable = True,
allow_files = True,
),
},
outputs = {
"out": "%{name}.srcjar",
},
output_to_genfiles = True,
)
def _is_windows(ctx):
return ctx.configuration.host_path_separator == ";"
|
'''
Unit Test Cases for JSON2HTML
Description - python wrapper for converting JSON to HTML Table format
(c) 2013 Varun Malhotra. MIT License
'''
__author__ = 'Varun Malhotra'
__version__ = '1.1.1'
__license__ = 'MIT'
|
# bubble_sort
# Bubble_sort uses the technique of comparing and swapping
def bubble_sort(lst):
for passnum in range(len(lst) - 1, 0, -1):
for i in range(passnum):
if lst[i] > lst[i + 1]:
temp = lst[i]
lst[i] = lst[i + 1]
lst[i + 1] = temp
lst = [54,26,93,17,77,31,44,55,20]
bubble_sort(lst)
print("sorted %s" %lst) # [17,20,26,31,44,54,55,77,91] |
def tip(total, percentage):
tip = (total * percentage) / 100
return tip
print(tip(24, 13))
|
""""""
"""
# Floyd's Tortoise and Hare
[Reference : wiki](https://en.wikipedia.org/wiki/Cycle_detection)
## Description
Floyd's cycle-finding algorithm is a pointer algorithm that uses only two pointers, which move through the sequence at
different speeds. It is also called the "tortoise and the hare algorithm"
Checking the existence of the cycle in the linked-list. We can also find the node with which linked-list is linked
Linear TIme
Constant Space
"""
class Node:
"""
Node for the linked-list
"""
def __init__(self, data=0, next=None):
self.data = data
self.next = next
class FloydTortoiseAndHare:
"""
Implementation of Floyd's Tortoise and Hare Algorithm
"""
def check_cycle(self, head):
"""
Return True if cycle is present else False
:param head:
:return:
"""
# Two pointers
tortoise, hare = head, head
# Base Case
while hare and hare.next:
tortoise = tortoise.next
hare = hare.next.next
# Condition for cycle
if tortoise == hare:
return True
# Condition when there is no cycle
return False
def cycle_node(self, head):
"""
Finding the node where cycle exists
:param head:
:return:
"""
# Two pointers
tortoise, hare = head, head
while True:
# Condition when pointer reaches to end
if not hare or not hare.next:
return None
tortoise = tortoise.next
hare = hare.next.next
if tortoise == hare:
break
# Iterating over the ll to find
tortoise = head
while tortoise != hare:
tortoise = tortoise.next
hare = hare.next
# Returning node where cycle was found
return tortoise
class FormLinkedList:
"""
Class to form linked-list with cycle
"""
def __init__(self, array, ind):
"""
Initialization
:param array: array of data of linked list
:param ind: Tail linked to the index ind,
if no cycle, then ind = -1
"""
self.head = None
self.array = array
self.ind = ind
def createll(self):
"""
Function to create linked-list with cycle
:return:
"""
node_at_cycle = None
self.head = temp = Node(None)
for index, ele in enumerate(self.array):
new_node = Node(ele)
temp.next = new_node
# Keeping track of the node where tail will be linked
if index == self.ind:
node_at_cycle = temp
temp = temp.next
# linking tail to the node of given index
temp.next = node_at_cycle
return self.head.next
if __name__ == "__main__":
print(f"Enter space separated integers")
array = list(map(int, input().split()))
print(f"Enter the index where tail is linked")
ind = int(input())
formll = FormLinkedList(array, ind)
head = formll.createll()
floyd = FloydTortoiseAndHare()
cycle = floyd.check_cycle(head)
print(f"Cycle is {'' if cycle else 'not '}present")
if cycle:
node = floyd.cycle_node(head)
print(f"Tail connects to node {node.data}")
"""
### Implementation
| Problem No. | Level | Problems | Solutions |
| :--- | :---: | :--- | :---|
| 141. | Easy | [Linked List Cycle](https://leetcode.com/problems/linked-list-cycle/) | [Python](https://github.com/ramanaditya/data-structure-and-algorithms/tree/master/leetcode/linked-list/linked-list-cycle.py) |
| 142. | Medium | [Linked List Cycle II](https://leetcode.com/problems/linked-list-cycle-ii/) | [Python](https://github.com/ramanaditya/data-structure-and-algorithms/tree/master/leetcode/linked-list/linked-list-cycle-ii.py) |
"""
|
class Solution:
def validPalindrome(self, s: str) -> bool:
return self.isValidPalindrome(s, False)
def isValidPalindrome(self, s: str, did_delete: bool) -> bool:
curr_left = 0
curr_right = len(s) - 1
while curr_left < curr_right:
# If left and right characters are same, keep checking
if s[curr_left] == s[curr_right]:
curr_left += 1
curr_right -= 1
# If already deleted, not a valid palindrome
elif did_delete:
return False
# If we can still delete one character,
# run check after deleting left, right character
else:
return self.isValidPalindrome(s[curr_left + 1:curr_right + 1], True) or self.isValidPalindrome(s[curr_left:curr_right], True)
return True
|
# -*- coding: utf-8 -*-
SPELLINGS_MAP={
"accessorise":"accessorize",
"accessorised":"accessorized",
"accessorises":"accessorizes",
"accessorising":"accessorizing",
"acclimatisation":"acclimatization",
"acclimatise":"acclimatize",
"acclimatised":"acclimatized",
"acclimatises":"acclimatizes",
"acclimatising":"acclimatizing",
"accoutrements":"accouterments",
"aeon":"eon",
"aeons":"eons",
"aerogramme":"aerogram",
"aerogrammes":"aerograms",
"aeroplane":"airplane",
"aeroplanes":"airplanes",
"aesthete":"esthete",
"aesthetes":"esthetes",
"aesthetic":"esthetic",
"aesthetically":"esthetically",
"aesthetics":"esthetics",
"aetiology":"etiology",
"ageing":"aging",
"aggrandisement":"aggrandizement",
"agonise":"agonize",
"agonised":"agonized",
"agonises":"agonizes",
"agonising":"agonizing",
"agonisingly":"agonizingly",
"almanack":"almanac",
"almanacks":"almanacs",
"aluminium":"aluminum",
"amortisable":"amortizable",
"amortisation":"amortization",
"amortisations":"amortizations",
"amortise":"amortize",
"amortised":"amortized",
"amortises":"amortizes",
"amortising":"amortizing",
"amphitheatre":"amphitheater",
"amphitheatres":"amphitheaters",
"anaemia":"anemia",
"anaemic":"anemic",
"anaesthesia":"anesthesia",
"anaesthetic":"anesthetic",
"anaesthetics":"anesthetics",
"anaesthetise":"anesthetize",
"anaesthetised":"anesthetized",
"anaesthetises":"anesthetizes",
"anaesthetising":"anesthetizing",
"anaesthetist":"anesthetist",
"anaesthetists":"anesthetists",
"anaesthetize":"anesthetize",
"anaesthetized":"anesthetized",
"anaesthetizes":"anesthetizes",
"anaesthetizing":"anesthetizing",
"analogue":"analog",
"analogues":"analogs",
"analyse":"analyze",
"analysed":"analyzed",
"analyses":"analyzes",
"analysing":"analyzing",
"anglicise":"anglicize",
"anglicised":"anglicized",
"anglicises":"anglicizes",
"anglicising":"anglicizing",
"annualised":"annualized",
"antagonise":"antagonize",
"antagonised":"antagonized",
"antagonises":"antagonizes",
"antagonising":"antagonizing",
"apologise":"apologize",
"apologised":"apologized",
"apologises":"apologizes",
"apologising":"apologizing",
"appal":"appall",
"appals":"appalls",
"appetiser":"appetizer",
"appetisers":"appetizers",
"appetising":"appetizing",
"appetisingly":"appetizingly",
"arbour":"arbor",
"arbours":"arbors",
"archaeological":"archeological",
"archaeologically":"archeologically",
"archaeologist":"archeologist",
"archaeologists":"archeologists",
"archaeology":"archeology",
"ardour":"ardor",
"armour":"armor",
"armoured":"armored",
"armourer":"armorer",
"armourers":"armorers",
"armouries":"armories",
"armoury":"armory",
"artefact":"artifact",
"artefacts":"artifacts",
"authorise":"authorize",
"authorised":"authorized",
"authorises":"authorizes",
"authorising":"authorizing",
"axe":"ax",
"backpedalled":"backpedaled",
"backpedalling":"backpedaling",
"bannister":"banister",
"bannisters":"banisters",
"baptise":"baptize",
"baptised":"baptized",
"baptises":"baptizes",
"baptising":"baptizing",
"bastardise":"bastardize",
"bastardised":"bastardized",
"bastardises":"bastardizes",
"bastardising":"bastardizing",
"battleaxe":"battleax",
"baulk":"balk",
"baulked":"balked",
"baulking":"balking",
"baulks":"balks",
"bedevilled":"bedeviled",
"bedevilling":"bedeviling",
"behaviour":"behavior",
"behavioural":"behavioral",
"behaviourism":"behaviorism",
"behaviourist":"behaviorist",
"behaviourists":"behaviorists",
"behaviours":"behaviors",
"behove":"behoove",
"behoved":"behooved",
"behoves":"behooves",
"bejewelled":"bejeweled",
"belabour":"belabor",
"belaboured":"belabored",
"belabouring":"belaboring",
"belabours":"belabors",
"bevelled":"beveled",
"bevvies":"bevies",
"bevvy":"bevy",
"biassed":"biased",
"biassing":"biasing",
"bingeing":"binging",
"bougainvillaea":"bougainvillea",
"bougainvillaeas":"bougainvilleas",
"bowdlerise":"bowdlerize",
"bowdlerised":"bowdlerized",
"bowdlerises":"bowdlerizes",
"bowdlerising":"bowdlerizing",
"breathalyse":"breathalyze",
"breathalysed":"breathalyzed",
"breathalyser":"breathalyzer",
"breathalysers":"breathalyzers",
"breathalyses":"breathalyzes",
"breathalysing":"breathalyzing",
"brutalise":"brutalize",
"brutalised":"brutalized",
"brutalises":"brutalizes",
"brutalising":"brutalizing",
"buses":"busses",
"busing":"bussing",
"caesarean":"cesarean",
"caesareans":"cesareans",
"calibre":"caliber",
"calibres":"calibers",
"calliper":"caliper",
"callipers":"calipers",
"callisthenics":"calisthenics",
"canalise":"canalize",
"canalised":"canalized",
"canalises":"canalizes",
"canalising":"canalizing",
"cancellation":"cancelation",
"cancellations":"cancelations",
"cancelled":"canceled",
"cancelling":"canceling",
"candour":"candor",
"cannibalise":"cannibalize",
"cannibalised":"cannibalized",
"cannibalises":"cannibalizes",
"cannibalising":"cannibalizing",
"canonise":"canonize",
"canonised":"canonized",
"canonises":"canonizes",
"canonising":"canonizing",
"capitalise":"capitalize",
"capitalised":"capitalized",
"capitalises":"capitalizes",
"capitalising":"capitalizing",
"caramelise":"caramelize",
"caramelised":"caramelized",
"caramelises":"caramelizes",
"caramelising":"caramelizing",
"carbonise":"carbonize",
"carbonised":"carbonized",
"carbonises":"carbonizes",
"carbonising":"carbonizing",
"carolled":"caroled",
"carolling":"caroling",
"catalogue":"catalog",
"catalogued":"cataloged",
"catalogues":"catalogs",
"cataloguing":"cataloging",
"catalyse":"catalyze",
"catalysed":"catalyzed",
"catalyses":"catalyzes",
"catalysing":"catalyzing",
"categorise":"categorize",
"categorised":"categorized",
"categorises":"categorizes",
"categorising":"categorizing",
"cauterise":"cauterize",
"cauterised":"cauterized",
"cauterises":"cauterizes",
"cauterising":"cauterizing",
"cavilled":"caviled",
"cavilling":"caviling",
"centigramme":"centigram",
"centigrammes":"centigrams",
"centilitre":"centiliter",
"centilitres":"centiliters",
"centimetre":"centimeter",
"centimetres":"centimeters",
"centralise":"centralize",
"centralised":"centralized",
"centralises":"centralizes",
"centralising":"centralizing",
"centre":"center",
"centred":"centered",
"centrefold":"centerfold",
"centrefolds":"centerfolds",
"centrepiece":"centerpiece",
"centrepieces":"centerpieces",
"centres":"centers",
"channelled":"channeled",
"channelling":"channeling",
"characterise":"characterize",
"characterised":"characterized",
"characterises":"characterizes",
"characterising":"characterizing",
"cheque":"check",
"chequebook":"checkbook",
"chequebooks":"checkbooks",
"chequered":"checkered",
"cheques":"checks",
"chilli":"chili",
"chimaera":"chimera",
"chimaeras":"chimeras",
"chiselled":"chiseled",
"chiselling":"chiseling",
"circularise":"circularize",
"circularised":"circularized",
"circularises":"circularizes",
"circularising":"circularizing",
"civilise":"civilize",
"civilised":"civilized",
"civilises":"civilizes",
"civilising":"civilizing",
"clamour":"clamor",
"clamoured":"clamored",
"clamouring":"clamoring",
"clamours":"clamors",
"clangour":"clangor",
"clarinettist":"clarinetist",
"clarinettists":"clarinetists",
"collectivise":"collectivize",
"collectivised":"collectivized",
"collectivises":"collectivizes",
"collectivising":"collectivizing",
"colonisation":"colonization",
"colonise":"colonize",
"colonised":"colonized",
"coloniser":"colonizer",
"colonisers":"colonizers",
"colonises":"colonizes",
"colonising":"colonizing",
"colour":"color",
"colourant":"colorant",
"colourants":"colorants",
"coloured":"colored",
"coloureds":"coloreds",
"colourful":"colorful",
"colourfully":"colorfully",
"colouring":"coloring",
"colourize":"colorize",
"colourized":"colorized",
"colourizes":"colorizes",
"colourizing":"colorizing",
"colourless":"colorless",
"colours":"colors",
"commercialise":"commercialize",
"commercialised":"commercialized",
"commercialises":"commercializes",
"commercialising":"commercializing",
"compartmentalise":"compartmentalize",
"compartmentalised":"compartmentalized",
"compartmentalises":"compartmentalizes",
"compartmentalising":"compartmentalizing",
"computerise":"computerize",
"computerised":"computerized",
"computerises":"computerizes",
"computerising":"computerizing",
"conceptualise":"conceptualize",
"conceptualised":"conceptualized",
"conceptualises":"conceptualizes",
"conceptualising":"conceptualizing",
"connexion":"connection",
"connexions":"connections",
"contextualise":"contextualize",
"contextualised":"contextualized",
"contextualises":"contextualizes",
"contextualising":"contextualizing",
"cosier":"cozier",
"cosies":"cozies",
"cosiest":"coziest",
"cosily":"cozily",
"cosiness":"coziness",
"cosy":"cozy",
"councillor":"councilor",
"councillors":"councilors",
"counselled":"counseled",
"counselling":"counseling",
"counsellor":"counselor",
"counsellors":"counselors",
"crenellated":"crenelated",
"criminalise":"criminalize",
"criminalised":"criminalized",
"criminalises":"criminalizes",
"criminalising":"criminalizing",
"criticise":"criticize",
"criticised":"criticized",
"criticises":"criticizes",
"criticising":"criticizing",
"crueller":"crueler",
"cruellest":"cruelest",
"crystallisation":"crystallization",
"crystallise":"crystallize",
"crystallised":"crystallized",
"crystallises":"crystallizes",
"crystallising":"crystallizing",
"cudgelled":"cudgeled",
"cudgelling":"cudgeling",
"customise":"customize",
"customised":"customized",
"customises":"customizes",
"customising":"customizing",
"cypher":"cipher",
"cyphers":"ciphers",
"decentralisation":"decentralization",
"decentralise":"decentralize",
"decentralised":"decentralized",
"decentralises":"decentralizes",
"decentralising":"decentralizing",
"decriminalisation":"decriminalization",
"decriminalise":"decriminalize",
"decriminalised":"decriminalized",
"decriminalises":"decriminalizes",
"decriminalising":"decriminalizing",
"defence":"defense",
"defenceless":"defenseless",
"defences":"defenses",
"dehumanisation":"dehumanization",
"dehumanise":"dehumanize",
"dehumanised":"dehumanized",
"dehumanises":"dehumanizes",
"dehumanising":"dehumanizing",
"demeanour":"demeanor",
"demilitarisation":"demilitarization",
"demilitarise":"demilitarize",
"demilitarised":"demilitarized",
"demilitarises":"demilitarizes",
"demilitarising":"demilitarizing",
"demobilisation":"demobilization",
"demobilise":"demobilize",
"demobilised":"demobilized",
"demobilises":"demobilizes",
"demobilising":"demobilizing",
"democratisation":"democratization",
"democratise":"democratize",
"democratised":"democratized",
"democratises":"democratizes",
"democratising":"democratizing",
"demonise":"demonize",
"demonised":"demonized",
"demonises":"demonizes",
"demonising":"demonizing",
"demoralisation":"demoralization",
"demoralise":"demoralize",
"demoralised":"demoralized",
"demoralises":"demoralizes",
"demoralising":"demoralizing",
"denationalisation":"denationalization",
"denationalise":"denationalize",
"denationalised":"denationalized",
"denationalises":"denationalizes",
"denationalising":"denationalizing",
"deodorise":"deodorize",
"deodorised":"deodorized",
"deodorises":"deodorizes",
"deodorising":"deodorizing",
"depersonalise":"depersonalize",
"depersonalised":"depersonalized",
"depersonalises":"depersonalizes",
"depersonalising":"depersonalizing",
"deputise":"deputize",
"deputised":"deputized",
"deputises":"deputizes",
"deputising":"deputizing",
"desensitisation":"desensitization",
"desensitise":"desensitize",
"desensitised":"desensitized",
"desensitises":"desensitizes",
"desensitising":"desensitizing",
"destabilisation":"destabilization",
"destabilise":"destabilize",
"destabilised":"destabilized",
"destabilises":"destabilizes",
"destabilising":"destabilizing",
"dialled":"dialed",
"dialling":"dialing",
"dialogue":"dialog",
"dialogues":"dialogs",
"diarrhoea":"diarrhea",
"digitise":"digitize",
"digitised":"digitized",
"digitises":"digitizes",
"digitising":"digitizing",
"disc":"disk",
"discolour":"discolor",
"discoloured":"discolored",
"discolouring":"discoloring",
"discolours":"discolors",
"discs":"disks",
"disembowelled":"disemboweled",
"disembowelling":"disemboweling",
"disfavour":"disfavor",
"dishevelled":"disheveled",
"dishonour":"dishonor",
"dishonourable":"dishonorable",
"dishonourably":"dishonorably",
"dishonoured":"dishonored",
"dishonouring":"dishonoring",
"dishonours":"dishonors",
"disorganisation":"disorganization",
"disorganised":"disorganized",
"distil":"distill",
"distils":"distills",
"dramatisation":"dramatization",
"dramatisations":"dramatizations",
"dramatise":"dramatize",
"dramatised":"dramatized",
"dramatises":"dramatizes",
"dramatising":"dramatizing",
"draught":"draft",
"draughtboard":"draftboard",
"draughtboards":"draftboards",
"draughtier":"draftier",
"draughtiest":"draftiest",
"draughts":"drafts",
"draughtsman":"draftsman",
"draughtsmanship":"draftsmanship",
"draughtsmen":"draftsmen",
"draughtswoman":"draftswoman",
"draughtswomen":"draftswomen",
"draughty":"drafty",
"drivelled":"driveled",
"drivelling":"driveling",
"duelled":"dueled",
"duelling":"dueling",
"economise":"economize",
"economised":"economized",
"economises":"economizes",
"economising":"economizing",
"edoema":"edema",
"editorialise":"editorialize",
"editorialised":"editorialized",
"editorialises":"editorializes",
"editorialising":"editorializing",
"empathise":"empathize",
"empathised":"empathized",
"empathises":"empathizes",
"empathising":"empathizing",
"emphasise":"emphasize",
"emphasised":"emphasized",
"emphasises":"emphasizes",
"emphasising":"emphasizing",
"enamelled":"enameled",
"enamelling":"enameling",
"enamoured":"enamored",
"encyclopaedia":"encyclopedia",
"encyclopaedias":"encyclopedias",
"encyclopaedic":"encyclopedic",
"endeavour":"endeavor",
"endeavoured":"endeavored",
"endeavouring":"endeavoring",
"endeavours":"endeavors",
"energise":"energize",
"energised":"energized",
"energises":"energizes",
"energising":"energizing",
"enrol":"enroll",
"enrols":"enrolls",
"enthral":"enthrall",
"enthrals":"enthralls",
"epaulette":"epaulet",
"epaulettes":"epaulets",
"epicentre":"epicenter",
"epicentres":"epicenters",
"epilogue":"epilog",
"epilogues":"epilogs",
"epitomise":"epitomize",
"epitomised":"epitomized",
"epitomises":"epitomizes",
"epitomising":"epitomizing",
"equalisation":"equalization",
"equalise":"equalize",
"equalised":"equalized",
"equaliser":"equalizer",
"equalisers":"equalizers",
"equalises":"equalizes",
"equalising":"equalizing",
"eulogise":"eulogize",
"eulogised":"eulogized",
"eulogises":"eulogizes",
"eulogising":"eulogizing",
"evangelise":"evangelize",
"evangelised":"evangelized",
"evangelises":"evangelizes",
"evangelising":"evangelizing",
"exorcise":"exorcize",
"exorcised":"exorcized",
"exorcises":"exorcizes",
"exorcising":"exorcizing",
"extemporisation":"extemporization",
"extemporise":"extemporize",
"extemporised":"extemporized",
"extemporises":"extemporizes",
"extemporising":"extemporizing",
"externalisation":"externalization",
"externalisations":"externalizations",
"externalise":"externalize",
"externalised":"externalized",
"externalises":"externalizes",
"externalising":"externalizing",
"factorise":"factorize",
"factorised":"factorized",
"factorises":"factorizes",
"factorising":"factorizing",
"faecal":"fecal",
"faeces":"feces",
"familiarisation":"familiarization",
"familiarise":"familiarize",
"familiarised":"familiarized",
"familiarises":"familiarizes",
"familiarising":"familiarizing",
"fantasise":"fantasize",
"fantasised":"fantasized",
"fantasises":"fantasizes",
"fantasising":"fantasizing",
"favour":"favor",
"favourable":"favorable",
"favourably":"favorably",
"favoured":"favored",
"favouring":"favoring",
"favourite":"favorite",
"favourites":"favorites",
"favouritism":"favoritism",
"favours":"favors",
"feminise":"feminize",
"feminised":"feminized",
"feminises":"feminizes",
"feminising":"feminizing",
"fertilisation":"fertilization",
"fertilise":"fertilize",
"fertilised":"fertilized",
"fertiliser":"fertilizer",
"fertilisers":"fertilizers",
"fertilises":"fertilizes",
"fertilising":"fertilizing",
"fervour":"fervor",
"fibre":"fiber",
"fibreglass":"fiberglass",
"fibres":"fibers",
"fictionalisation":"fictionalization",
"fictionalisations":"fictionalizations",
"fictionalise":"fictionalize",
"fictionalised":"fictionalized",
"fictionalises":"fictionalizes",
"fictionalising":"fictionalizing",
"fillet":"filet",
"filleted":"fileted",
"filleting":"fileting",
"fillets":"filets",
"finalisation":"finalization",
"finalise":"finalize",
"finalised":"finalized",
"finalises":"finalizes",
"finalising":"finalizing",
"flautist":"flutist",
"flautists":"flutists",
"flavour":"flavor",
"flavoured":"flavored",
"flavouring":"flavoring",
"flavourings":"flavorings",
"flavourless":"flavorless",
"flavours":"flavors",
"flavoursome":"flavorsome",
"flyer / flier":"flier / flyer",
"foetal":"fetal",
"foetid":"fetid",
"foetus":"fetus",
"foetuses":"fetuses",
"formalisation":"formalization",
"formalise":"formalize",
"formalised":"formalized",
"formalises":"formalizes",
"formalising":"formalizing",
"fossilisation":"fossilization",
"fossilise":"fossilize",
"fossilised":"fossilized",
"fossilises":"fossilizes",
"fossilising":"fossilizing",
"fraternisation":"fraternization",
"fraternise":"fraternize",
"fraternised":"fraternized",
"fraternises":"fraternizes",
"fraternising":"fraternizing",
"fulfil":"fulfill",
"fulfilment":"fulfillment",
"fulfils":"fulfills",
"funnelled":"funneled",
"funnelling":"funneling",
"galvanise":"galvanize",
"galvanised":"galvanized",
"galvanises":"galvanizes",
"galvanising":"galvanizing",
"gambolled":"gamboled",
"gambolling":"gamboling",
"gaol":"jail",
"gaolbird":"jailbird",
"gaolbirds":"jailbirds",
"gaolbreak":"jailbreak",
"gaolbreaks":"jailbreaks",
"gaoled":"jailed",
"gaoler":"jailer",
"gaolers":"jailers",
"gaoling":"jailing",
"gaols":"jails",
"gases":"gasses",
"gauge":"gage",
"gauged":"gaged",
"gauges":"gages",
"gauging":"gaging",
"generalisation":"generalization",
"generalisations":"generalizations",
"generalise":"generalize",
"generalised":"generalized",
"generalises":"generalizes",
"generalising":"generalizing",
"ghettoise":"ghettoize",
"ghettoised":"ghettoized",
"ghettoises":"ghettoizes",
"ghettoising":"ghettoizing",
"gipsies":"gypsies",
"glamorise":"glamorize",
"glamorised":"glamorized",
"glamorises":"glamorizes",
"glamorising":"glamorizing",
"glamour":"glamor",
"globalisation":"globalization",
"globalise":"globalize",
"globalised":"globalized",
"globalises":"globalizes",
"globalising":"globalizing",
"glueing":"gluing",
"goitre":"goiter",
"goitres":"goiters",
"gonorrhoea":"gonorrhea",
"gramme":"gram",
"grammes":"grams",
"gravelled":"graveled",
"grey":"gray",
"greyed":"grayed",
"greying":"graying",
"greyish":"grayish",
"greyness":"grayness",
"greys":"grays",
"grovelled":"groveled",
"grovelling":"groveling",
"groyne":"groin",
"groynes":"groins",
"gruelling":"grueling",
"gruellingly":"gruelingly",
"gryphon":"griffin",
"gryphons":"griffins",
"gynaecological":"gynecological",
"gynaecologist":"gynecologist",
"gynaecologists":"gynecologists",
"gynaecology":"gynecology",
"haematological":"hematological",
"haematologist":"hematologist",
"haematologists":"hematologists",
"haematology":"hematology",
"haemoglobin":"hemoglobin",
"haemophilia":"hemophilia",
"haemophiliac":"hemophiliac",
"haemophiliacs":"hemophiliacs",
"haemorrhage":"hemorrhage",
"haemorrhaged":"hemorrhaged",
"haemorrhages":"hemorrhages",
"haemorrhaging":"hemorrhaging",
"haemorrhoids":"hemorrhoids",
"harbour":"harbor",
"harboured":"harbored",
"harbouring":"harboring",
"harbours":"harbors",
"harmonisation":"harmonization",
"harmonise":"harmonize",
"harmonised":"harmonized",
"harmonises":"harmonizes",
"harmonising":"harmonizing",
"homoeopath":"homeopath",
"homoeopathic":"homeopathic",
"homoeopaths":"homeopaths",
"homoeopathy":"homeopathy",
"homogenise":"homogenize",
"homogenised":"homogenized",
"homogenises":"homogenizes",
"homogenising":"homogenizing",
"honour":"honor",
"honourable":"honorable",
"honourably":"honorably",
"honoured":"honored",
"honouring":"honoring",
"honours":"honors",
"hospitalisation":"hospitalization",
"hospitalise":"hospitalize",
"hospitalised":"hospitalized",
"hospitalises":"hospitalizes",
"hospitalising":"hospitalizing",
"humanise":"humanize",
"humanised":"humanized",
"humanises":"humanizes",
"humanising":"humanizing",
"humour":"humor",
"humoured":"humored",
"humouring":"humoring",
"humourless":"humorless",
"humours":"humors",
"hybridise":"hybridize",
"hybridised":"hybridized",
"hybridises":"hybridizes",
"hybridising":"hybridizing",
"hypnotise":"hypnotize",
"hypnotised":"hypnotized",
"hypnotises":"hypnotizes",
"hypnotising":"hypnotizing",
"hypothesise":"hypothesize",
"hypothesised":"hypothesized",
"hypothesises":"hypothesizes",
"hypothesising":"hypothesizing",
"idealisation":"idealization",
"idealise":"idealize",
"idealised":"idealized",
"idealises":"idealizes",
"idealising":"idealizing",
"idolise":"idolize",
"idolised":"idolized",
"idolises":"idolizes",
"idolising":"idolizing",
"immobilisation":"immobilization",
"immobilise":"immobilize",
"immobilised":"immobilized",
"immobiliser":"immobilizer",
"immobilisers":"immobilizers",
"immobilises":"immobilizes",
"immobilising":"immobilizing",
"immortalise":"immortalize",
"immortalised":"immortalized",
"immortalises":"immortalizes",
"immortalising":"immortalizing",
"immunisation":"immunization",
"immunise":"immunize",
"immunised":"immunized",
"immunises":"immunizes",
"immunising":"immunizing",
"impanelled":"impaneled",
"impanelling":"impaneling",
"imperilled":"imperiled",
"imperilling":"imperiling",
"individualise":"individualize",
"individualised":"individualized",
"individualises":"individualizes",
"individualising":"individualizing",
"industrialise":"industrialize",
"industrialised":"industrialized",
"industrialises":"industrializes",
"industrialising":"industrializing",
"inflexion":"inflection",
"inflexions":"inflections",
"initialise":"initialize",
"initialised":"initialized",
"initialises":"initializes",
"initialising":"initializing",
"initialled":"initialed",
"initialling":"initialing",
"instal":"install",
"instalment":"installment",
"instalments":"installments",
"instals":"installs",
"instil":"instill",
"instils":"instills",
"institutionalisation":"institutionalization",
"institutionalise":"institutionalize",
"institutionalised":"institutionalized",
"institutionalises":"institutionalizes",
"institutionalising":"institutionalizing",
"intellectualise":"intellectualize",
"intellectualised":"intellectualized",
"intellectualises":"intellectualizes",
"intellectualising":"intellectualizing",
"internalisation":"internalization",
"internalise":"internalize",
"internalised":"internalized",
"internalises":"internalizes",
"internalising":"internalizing",
"internationalisation":"internationalization",
"internationalise":"internationalize",
"internationalised":"internationalized",
"internationalises":"internationalizes",
"internationalising":"internationalizing",
"ionisation":"ionization",
"ionise":"ionize",
"ionised":"ionized",
"ioniser":"ionizer",
"ionisers":"ionizers",
"ionises":"ionizes",
"ionising":"ionizing",
"italicise":"italicize",
"italicised":"italicized",
"italicises":"italicizes",
"italicising":"italicizing",
"itemise":"itemize",
"itemised":"itemized",
"itemises":"itemizes",
"itemising":"itemizing",
"jeopardise":"jeopardize",
"jeopardised":"jeopardized",
"jeopardises":"jeopardizes",
"jeopardising":"jeopardizing",
"jewelled":"jeweled",
"jeweller":"jeweler",
"jewellers":"jewelers",
"jewellery":"jewelry",
"judgement":"judgment",
"kilogramme":"kilogram",
"kilogrammes":"kilograms",
"kilometre":"kilometer",
"kilometres":"kilometers",
"labelled":"labeled",
"labelling":"labeling",
"labour":"labor",
"laboured":"labored",
"labourer":"laborer",
"labourers":"laborers",
"labouring":"laboring",
"labours":"labors",
"lacklustre":"lackluster",
"legalisation":"legalization",
"legalise":"legalize",
"legalised":"legalized",
"legalises":"legalizes",
"legalising":"legalizing",
"legitimise":"legitimize",
"legitimised":"legitimized",
"legitimises":"legitimizes",
"legitimising":"legitimizing",
"leukaemia":"leukemia",
"levelled":"leveled",
"leveller":"leveler",
"levellers":"levelers",
"levelling":"leveling",
"libelled":"libeled",
"libelling":"libeling",
"libellous":"libelous",
"liberalisation":"liberalization",
"liberalise":"liberalize",
"liberalised":"liberalized",
"liberalises":"liberalizes",
"liberalising":"liberalizing",
"licence":"license",
"licenced":"licensed",
"licences":"licenses",
"licencing":"licensing",
"likeable":"likable",
"lionisation":"lionization",
"lionise":"lionize",
"lionised":"lionized",
"lionises":"lionizes",
"lionising":"lionizing",
"liquidise":"liquidize",
"liquidised":"liquidized",
"liquidiser":"liquidizer",
"liquidisers":"liquidizers",
"liquidises":"liquidizes",
"liquidising":"liquidizing",
"litre":"liter",
"litres":"liters",
"localise":"localize",
"localised":"localized",
"localises":"localizes",
"localising":"localizing",
"louvre":"louver",
"louvred":"louvered",
"louvres":"louvers",
"lustre":"luster",
"magnetise":"magnetize",
"magnetised":"magnetized",
"magnetises":"magnetizes",
"magnetising":"magnetizing",
"manoeuvrability":"maneuverability",
"manoeuvrable":"maneuverable",
"manoeuvre":"maneuver",
"manoeuvred":"maneuvered",
"manoeuvres":"maneuvers",
"manoeuvring":"maneuvering",
"manoeuvrings":"maneuverings",
"marginalisation":"marginalization",
"marginalise":"marginalize",
"marginalised":"marginalized",
"marginalises":"marginalizes",
"marginalising":"marginalizing",
"marshalled":"marshaled",
"marshalling":"marshaling",
"marvelled":"marveled",
"marvelling":"marveling",
"marvellous":"marvelous",
"marvellously":"marvelously",
"materialisation":"materialization",
"materialise":"materialize",
"materialised":"materialized",
"materialises":"materializes",
"materialising":"materializing",
"maximisation":"maximization",
"maximise":"maximize",
"maximised":"maximized",
"maximises":"maximizes",
"maximising":"maximizing",
"meagre":"meager",
"mechanisation":"mechanization",
"mechanise":"mechanize",
"mechanised":"mechanized",
"mechanises":"mechanizes",
"mechanising":"mechanizing",
"mediaeval":"medieval",
"memorialise":"memorialize",
"memorialised":"memorialized",
"memorialises":"memorializes",
"memorialising":"memorializing",
"memorise":"memorize",
"memorised":"memorized",
"memorises":"memorizes",
"memorising":"memorizing",
"mesmerise":"mesmerize",
"mesmerised":"mesmerized",
"mesmerises":"mesmerizes",
"mesmerising":"mesmerizing",
"metabolise":"metabolize",
"metabolised":"metabolized",
"metabolises":"metabolizes",
"metabolising":"metabolizing",
"metre":"meter",
"metres":"meters",
"micrometre":"micrometer",
"micrometres":"micrometers",
"militarise":"militarize",
"militarised":"militarized",
"militarises":"militarizes",
"militarising":"militarizing",
"milligramme":"milligram",
"milligrammes":"milligrams",
"millilitre":"milliliter",
"millilitres":"milliliters",
"millimetre":"millimeter",
"millimetres":"millimeters",
"miniaturisation":"miniaturization",
"miniaturise":"miniaturize",
"miniaturised":"miniaturized",
"miniaturises":"miniaturizes",
"miniaturising":"miniaturizing",
"minibuses":"minibusses",
"minimise":"minimize",
"minimised":"minimized",
"minimises":"minimizes",
"minimising":"minimizing",
"misbehaviour":"misbehavior",
"misdemeanour":"misdemeanor",
"misdemeanours":"misdemeanors",
"misspelt":"misspelled",
"mitre":"miter",
"mitres":"miters",
"mobilisation":"mobilization",
"mobilise":"mobilize",
"mobilised":"mobilized",
"mobilises":"mobilizes",
"mobilising":"mobilizing",
"modelled":"modeled",
"modeller":"modeler",
"modellers":"modelers",
"modelling":"modeling",
"modernise":"modernize",
"modernised":"modernized",
"modernises":"modernizes",
"modernising":"modernizing",
"moisturise":"moisturize",
"moisturised":"moisturized",
"moisturiser":"moisturizer",
"moisturisers":"moisturizers",
"moisturises":"moisturizes",
"moisturising":"moisturizing",
"monologue":"monolog",
"monologues":"monologs",
"monopolisation":"monopolization",
"monopolise":"monopolize",
"monopolised":"monopolized",
"monopolises":"monopolizes",
"monopolising":"monopolizing",
"moralise":"moralize",
"moralised":"moralized",
"moralises":"moralizes",
"moralising":"moralizing",
"motorised":"motorized",
"mould":"mold",
"moulded":"molded",
"moulder":"molder",
"mouldered":"moldered",
"mouldering":"moldering",
"moulders":"molders",
"mouldier":"moldier",
"mouldiest":"moldiest",
"moulding":"molding",
"mouldings":"moldings",
"moulds":"molds",
"mouldy":"moldy",
"moult":"molt",
"moulted":"molted",
"moulting":"molting",
"moults":"molts",
"moustache":"mustache",
"moustached":"mustached",
"moustaches":"mustaches",
"moustachioed":"mustachioed",
"multicoloured":"multicolored",
"nationalisation":"nationalization",
"nationalisations":"nationalizations",
"nationalise":"nationalize",
"nationalised":"nationalized",
"nationalises":"nationalizes",
"nationalising":"nationalizing",
"naturalisation":"naturalization",
"naturalise":"naturalize",
"naturalised":"naturalized",
"naturalises":"naturalizes",
"naturalising":"naturalizing",
"neighbour":"neighbor",
"neighbourhood":"neighborhood",
"neighbourhoods":"neighborhoods",
"neighbouring":"neighboring",
"neighbourliness":"neighborliness",
"neighbourly":"neighborly",
"neighbours":"neighbors",
"neutralisation":"neutralization",
"neutralise":"neutralize",
"neutralised":"neutralized",
"neutralises":"neutralizes",
"neutralising":"neutralizing",
"normalisation":"normalization",
"normalise":"normalize",
"normalised":"normalized",
"normalises":"normalizes",
"normalising":"normalizing",
"odour":"odor",
"odourless":"odorless",
"odours":"odors",
"oesophagus":"esophagus",
"oesophaguses":"esophaguses",
"oestrogen":"estrogen",
"offence":"offense",
"offences":"offenses",
"omelette":"omelet",
"omelettes":"omelets",
"optimise":"optimize",
"optimised":"optimized",
"optimises":"optimizes",
"optimising":"optimizing",
"organisation":"organization",
"organisational":"organizational",
"organisations":"organizations",
"organise":"organize",
"organised":"organized",
"organiser":"organizer",
"organisers":"organizers",
"organises":"organizes",
"organising":"organizing",
"orthopaedic":"orthopedic",
"orthopaedics":"orthopedics",
"ostracise":"ostracize",
"ostracised":"ostracized",
"ostracises":"ostracizes",
"ostracising":"ostracizing",
"outmanoeuvre":"outmaneuver",
"outmanoeuvred":"outmaneuvered",
"outmanoeuvres":"outmaneuvers",
"outmanoeuvring":"outmaneuvering",
"overemphasise":"overemphasize",
"overemphasised":"overemphasized",
"overemphasises":"overemphasizes",
"overemphasising":"overemphasizing",
"oxidisation":"oxidization",
"oxidise":"oxidize",
"oxidised":"oxidized",
"oxidises":"oxidizes",
"oxidising":"oxidizing",
"paederast":"pederast",
"paederasts":"pederasts",
"paediatric":"pediatric",
"paediatrician":"pediatrician",
"paediatricians":"pediatricians",
"paediatrics":"pediatrics",
"paedophile":"pedophile",
"paedophiles":"pedophiles",
"paedophilia":"pedophilia",
"palaeolithic":"paleolithic",
"palaeontologist":"paleontologist",
"palaeontologists":"paleontologists",
"palaeontology":"paleontology",
"panelled":"paneled",
"panelling":"paneling",
"panellist":"panelist",
"panellists":"panelists",
"paralyse":"paralyze",
"paralysed":"paralyzed",
"paralyses":"paralyzes",
"paralysing":"paralyzing",
"parcelled":"parceled",
"parcelling":"parceling",
"parlour":"parlor",
"parlours":"parlors",
"particularise":"particularize",
"particularised":"particularized",
"particularises":"particularizes",
"particularising":"particularizing",
"passivisation":"passivization",
"passivise":"passivize",
"passivised":"passivized",
"passivises":"passivizes",
"passivising":"passivizing",
"pasteurisation":"pasteurization",
"pasteurise":"pasteurize",
"pasteurised":"pasteurized",
"pasteurises":"pasteurizes",
"pasteurising":"pasteurizing",
"patronise":"patronize",
"patronised":"patronized",
"patronises":"patronizes",
"patronising":"patronizing",
"patronisingly":"patronizingly",
"pedalled":"pedaled",
"pedalling":"pedaling",
"pedestrianisation":"pedestrianization",
"pedestrianise":"pedestrianize",
"pedestrianised":"pedestrianized",
"pedestrianises":"pedestrianizes",
"pedestrianising":"pedestrianizing",
"penalise":"penalize",
"penalised":"penalized",
"penalises":"penalizes",
"penalising":"penalizing",
"pencilled":"penciled",
"pencilling":"penciling",
"personalise":"personalize",
"personalised":"personalized",
"personalises":"personalizes",
"personalising":"personalizing",
"pharmacopoeia":"pharmacopeia",
"pharmacopoeias":"pharmacopeias",
"philosophise":"philosophize",
"philosophised":"philosophized",
"philosophises":"philosophizes",
"philosophising":"philosophizing",
"philtre":"filter",
"philtres":"filters",
"phoney":"phony",
"plagiarise":"plagiarize",
"plagiarised":"plagiarized",
"plagiarises":"plagiarizes",
"plagiarising":"plagiarizing",
"plough":"plow",
"ploughed":"plowed",
"ploughing":"plowing",
"ploughman":"plowman",
"ploughmen":"plowmen",
"ploughs":"plows",
"ploughshare":"plowshare",
"ploughshares":"plowshares",
"polarisation":"polarization",
"polarise":"polarize",
"polarised":"polarized",
"polarises":"polarizes",
"polarising":"polarizing",
"politicisation":"politicization",
"politicise":"politicize",
"politicised":"politicized",
"politicises":"politicizes",
"politicising":"politicizing",
"popularisation":"popularization",
"popularise":"popularize",
"popularised":"popularized",
"popularises":"popularizes",
"popularising":"popularizing",
"pouffe":"pouf",
"pouffes":"poufs",
"practise":"practice",
"practised":"practiced",
"practises":"practices",
"practising":"practicing",
"praesidium":"presidium",
"praesidiums":"presidiums",
"pressurisation":"pressurization",
"pressurise":"pressurize",
"pressurised":"pressurized",
"pressurises":"pressurizes",
"pressurising":"pressurizing",
"pretence":"pretense",
"pretences":"pretenses",
"primaeval":"primeval",
"prioritisation":"prioritization",
"prioritise":"prioritize",
"prioritised":"prioritized",
"prioritises":"prioritizes",
"prioritising":"prioritizing",
"privatisation":"privatization",
"privatisations":"privatizations",
"privatise":"privatize",
"privatised":"privatized",
"privatises":"privatizes",
"privatising":"privatizing",
"professionalisation":"professionalization",
"professionalise":"professionalize",
"professionalised":"professionalized",
"professionalises":"professionalizes",
"professionalising":"professionalizing",
"programme":"program",
"programmes":"programs",
"prologue":"prolog",
"prologues":"prologs",
"propagandise":"propagandize",
"propagandised":"propagandized",
"propagandises":"propagandizes",
"propagandising":"propagandizing",
"proselytise":"proselytize",
"proselytised":"proselytized",
"proselytiser":"proselytizer",
"proselytisers":"proselytizers",
"proselytises":"proselytizes",
"proselytising":"proselytizing",
"psychoanalyse":"psychoanalyze",
"psychoanalysed":"psychoanalyzed",
"psychoanalyses":"psychoanalyzes",
"psychoanalysing":"psychoanalyzing",
"publicise":"publicize",
"publicised":"publicized",
"publicises":"publicizes",
"publicising":"publicizing",
"pulverisation":"pulverization",
"pulverise":"pulverize",
"pulverised":"pulverized",
"pulverises":"pulverizes",
"pulverising":"pulverizing",
"pummelled":"pummel",
"pummelling":"pummeled",
"pyjama":"pajama",
"pyjamas":"pajamas",
"pzazz":"pizzazz",
"quarrelled":"quarreled",
"quarrelling":"quarreling",
"radicalise":"radicalize",
"radicalised":"radicalized",
"radicalises":"radicalizes",
"radicalising":"radicalizing",
"rancour":"rancor",
"randomise":"randomize",
"randomised":"randomized",
"randomises":"randomizes",
"randomising":"randomizing",
"rationalisation":"rationalization",
"rationalisations":"rationalizations",
"rationalise":"rationalize",
"rationalised":"rationalized",
"rationalises":"rationalizes",
"rationalising":"rationalizing",
"ravelled":"raveled",
"ravelling":"raveling",
"realisable":"realizable",
"realisation":"realization",
"realisations":"realizations",
"realise":"realize",
"realised":"realized",
"realises":"realizes",
"realising":"realizing",
"recognisable":"recognizable",
"recognisably":"recognizably",
"recognisance":"recognizance",
"recognise":"recognize",
"recognised":"recognized",
"recognises":"recognizes",
"recognising":"recognizing",
"reconnoitre":"reconnoiter",
"reconnoitred":"reconnoitered",
"reconnoitres":"reconnoiters",
"reconnoitring":"reconnoitering",
"refuelled":"refueled",
"refuelling":"refueling",
"regularisation":"regularization",
"regularise":"regularize",
"regularised":"regularized",
"regularises":"regularizes",
"regularising":"regularizing",
"remodelled":"remodeled",
"remodelling":"remodeling",
"remould":"remold",
"remoulded":"remolded",
"remoulding":"remolding",
"remoulds":"remolds",
"reorganisation":"reorganization",
"reorganisations":"reorganizations",
"reorganise":"reorganize",
"reorganised":"reorganized",
"reorganises":"reorganizes",
"reorganising":"reorganizing",
"revelled":"reveled",
"reveller":"reveler",
"revellers":"revelers",
"revelling":"reveling",
"revitalise":"revitalize",
"revitalised":"revitalized",
"revitalises":"revitalizes",
"revitalising":"revitalizing",
"revolutionise":"revolutionize",
"revolutionised":"revolutionized",
"revolutionises":"revolutionizes",
"revolutionising":"revolutionizing",
"rhapsodise":"rhapsodize",
"rhapsodised":"rhapsodized",
"rhapsodises":"rhapsodizes",
"rhapsodising":"rhapsodizing",
"rigour":"rigor",
"rigours":"rigors",
"ritualised":"ritualized",
"rivalled":"rivaled",
"rivalling":"rivaling",
"romanticise":"romanticize",
"romanticised":"romanticized",
"romanticises":"romanticizes",
"romanticising":"romanticizing",
"rumour":"rumor",
"rumoured":"rumored",
"rumours":"rumors",
"sabre":"saber",
"sabres":"sabers",
"saltpetre":"saltpeter",
"sanitise":"sanitize",
"sanitised":"sanitized",
"sanitises":"sanitizes",
"sanitising":"sanitizing",
"satirise":"satirize",
"satirised":"satirized",
"satirises":"satirizes",
"satirising":"satirizing",
"saviour":"savior",
"saviours":"saviors",
"savour":"savor",
"savoured":"savored",
"savouries":"savories",
"savouring":"savoring",
"savours":"savors",
"savoury":"savory",
"scandalise":"scandalize",
"scandalised":"scandalized",
"scandalises":"scandalizes",
"scandalising":"scandalizing",
"sceptic":"skeptic",
"sceptical":"skeptical",
"sceptically":"skeptically",
"scepticism":"skepticism",
"sceptics":"skeptics",
"sceptre":"scepter",
"sceptres":"scepters",
"scrutinise":"scrutinize",
"scrutinised":"scrutinized",
"scrutinises":"scrutinizes",
"scrutinising":"scrutinizing",
"secularisation":"secularization",
"secularise":"secularize",
"secularised":"secularized",
"secularises":"secularizes",
"secularising":"secularizing",
"sensationalise":"sensationalize",
"sensationalised":"sensationalized",
"sensationalises":"sensationalizes",
"sensationalising":"sensationalizing",
"sensitise":"sensitize",
"sensitised":"sensitized",
"sensitises":"sensitizes",
"sensitising":"sensitizing",
"sentimentalise":"sentimentalize",
"sentimentalised":"sentimentalized",
"sentimentalises":"sentimentalizes",
"sentimentalising":"sentimentalizing",
"sepulchre":"sepulcher",
"sepulchres":"sepulchers",
"serialisation":"serialization",
"serialisations":"serializations",
"serialise":"serialize",
"serialised":"serialized",
"serialises":"serializes",
"serialising":"serializing",
"sermonise":"sermonize",
"sermonised":"sermonized",
"sermonises":"sermonizes",
"sermonising":"sermonizing",
"sheikh":"sheik",
"shovelled":"shoveled",
"shovelling":"shoveling",
"shrivelled":"shriveled",
"shrivelling":"shriveling",
"signalise":"signalize",
"signalised":"signalized",
"signalises":"signalizes",
"signalising":"signalizing",
"signalled":"signaled",
"signalling":"signaling",
"smoulder":"smolder",
"smouldered":"smoldered",
"smouldering":"smoldering",
"smoulders":"smolders",
"snivelled":"sniveled",
"snivelling":"sniveling",
"snorkelled":"snorkeled",
"snorkelling":"snorkeling",
"snowplough":"snowplow",
"snowploughs":"snowplow",
"socialisation":"socialization",
"socialise":"socialize",
"socialised":"socialized",
"socialises":"socializes",
"socialising":"socializing",
"sodomise":"sodomize",
"sodomised":"sodomized",
"sodomises":"sodomizes",
"sodomising":"sodomizing",
"solemnise":"solemnize",
"solemnised":"solemnized",
"solemnises":"solemnizes",
"solemnising":"solemnizing",
"sombre":"somber",
"specialisation":"specialization",
"specialisations":"specializations",
"specialise":"specialize",
"specialised":"specialized",
"specialises":"specializes",
"specialising":"specializing",
"spectre":"specter",
"spectres":"specters",
"spiralled":"spiraled",
"spiralling":"spiraling",
"splendour":"splendor",
"splendours":"splendors",
"squirrelled":"squirreled",
"squirrelling":"squirreling",
"stabilisation":"stabilization",
"stabilise":"stabilize",
"stabilised":"stabilized",
"stabiliser":"stabilizer",
"stabilisers":"stabilizers",
"stabilises":"stabilizes",
"stabilising":"stabilizing",
"standardisation":"standardization",
"standardise":"standardize",
"standardised":"standardized",
"standardises":"standardizes",
"standardising":"standardizing",
"stencilled":"stenciled",
"stencilling":"stenciling",
"sterilisation":"sterilization",
"sterilisations":"sterilizations",
"sterilise":"sterilize",
"sterilised":"sterilized",
"steriliser":"sterilizer",
"sterilisers":"sterilizers",
"sterilises":"sterilizes",
"sterilising":"sterilizing",
"stigmatisation":"stigmatization",
"stigmatise":"stigmatize",
"stigmatised":"stigmatized",
"stigmatises":"stigmatizes",
"stigmatising":"stigmatizing",
"storey":"story",
"storeys":"stories",
"subsidisation":"subsidization",
"subsidise":"subsidize",
"subsidised":"subsidized",
"subsidiser":"subsidizer",
"subsidisers":"subsidizers",
"subsidises":"subsidizes",
"subsidising":"subsidizing",
"succour":"succor",
"succoured":"succored",
"succouring":"succoring",
"succours":"succors",
"sulphate":"sulfate",
"sulphates":"sulfates",
"sulphide":"sulfide",
"sulphides":"sulfides",
"sulphur":"sulfur",
"sulphurous":"sulfurous",
"summarise":"summarize",
"summarised":"summarized",
"summarises":"summarizes",
"summarising":"summarizing",
"swivelled":"swiveled",
"swivelling":"swiveling",
"symbolise":"symbolize",
"symbolised":"symbolized",
"symbolises":"symbolizes",
"symbolising":"symbolizing",
"sympathise":"sympathize",
"sympathised":"sympathized",
"sympathiser":"sympathizer",
"sympathisers":"sympathizers",
"sympathises":"sympathizes",
"sympathising":"sympathizing",
"synchronisation":"synchronization",
"synchronise":"synchronize",
"synchronised":"synchronized",
"synchronises":"synchronizes",
"synchronising":"synchronizing",
"synthesise":"synthesize",
"synthesised":"synthesized",
"synthesiser":"synthesizer",
"synthesisers":"synthesizers",
"synthesises":"synthesizes",
"synthesising":"synthesizing",
"syphon":"siphon",
"syphoned":"siphoned",
"syphoning":"siphoning",
"syphons":"siphons",
"systematisation":"systematization",
"systematise":"systematize",
"systematised":"systematized",
"systematises":"systematizes",
"systematising":"systematizing",
"tantalise":"tantalize",
"tantalised":"tantalized",
"tantalises":"tantalizes",
"tantalising":"tantalizing",
"tantalisingly":"tantalizingly",
"tasselled":"tasseled",
"technicolour":"technicolor",
"temporise":"temporize",
"temporised":"temporized",
"temporises":"temporizes",
"temporising":"temporizing",
"tenderise":"tenderize",
"tenderised":"tenderized",
"tenderises":"tenderizes",
"tenderising":"tenderizing",
"terrorise":"terrorize",
"terrorised":"terrorized",
"terrorises":"terrorizes",
"terrorising":"terrorizing",
"theatre":"theater",
"theatregoer":"theatergoer",
"theatregoers":"theatergoers",
"theatres":"theaters",
"theorise":"theorize",
"theorised":"theorized",
"theorises":"theorizes",
"theorising":"theorizing",
"tonne":"ton",
"tonnes":"tons",
"towelled":"toweled",
"towelling":"toweling",
"toxaemia":"toxemia",
"tranquillise":"tranquilize",
"tranquillised":"tranquilized",
"tranquilliser":"tranquilizer",
"tranquillisers":"tranquilizers",
"tranquillises":"tranquilizes",
"tranquillising":"tranquilizing",
"tranquillity":"tranquility",
"tranquillize":"tranquilize",
"tranquillized":"tranquilized",
"tranquillizer":"tranquilizer",
"tranquillizers":"tranquilizers",
"tranquillizes":"tranquilizes",
"tranquillizing":"tranquilizing",
"tranquilly":"tranquility",
"transistorised":"transistorized",
"traumatise":"traumatize",
"traumatised":"traumatized",
"traumatises":"traumatizes",
"traumatising":"traumatizing",
"travelled":"traveled",
"traveller":"traveler",
"travellers":"travelers",
"travelling":"traveling",
"travelogue":"travelog",
"travelogues":"travelogs",
"trialled":"trialed",
"trialling":"trialing",
"tricolour":"tricolor",
"tricolours":"tricolors",
"trivialise":"trivialize",
"trivialised":"trivialized",
"trivialises":"trivializes",
"trivialising":"trivializing",
"tumour":"tumor",
"tumours":"tumors",
"tunnelled":"tunneled",
"tunnelling":"tunneling",
"tyrannise":"tyrannize",
"tyrannised":"tyrannized",
"tyrannises":"tyrannizes",
"tyrannising":"tyrannizing",
"tyre":"tire",
"tyres":"tires",
"unauthorised":"unauthorized",
"uncivilised":"uncivilized",
"underutilised":"underutilized",
"unequalled":"unequaled",
"unfavourable":"unfavorable",
"unfavourably":"unfavorably",
"unionisation":"unionization",
"unionise":"unionize",
"unionised":"unionized",
"unionises":"unionizes",
"unionising":"unionizing",
"unorganised":"unorganized",
"unravelled":"unraveled",
"unravelling":"unraveling",
"unrecognisable":"unrecognizable",
"unrecognised":"unrecognized",
"unrivalled":"unrivaled",
"unsavoury":"unsavory",
"untrammelled":"untrammeled",
"urbanisation":"urbanization",
"urbanise":"urbanize",
"urbanised":"urbanized",
"urbanises":"urbanizes",
"urbanising":"urbanizing",
"utilisable":"utilizable",
"utilisation":"utilization",
"utilise":"utilize",
"utilised":"utilized",
"utilises":"utilizes",
"utilising":"utilizing",
"valour":"valor",
"vandalise":"vandalize",
"vandalised":"vandalized",
"vandalises":"vandalizes",
"vandalising":"vandalizing",
"vaporisation":"vaporization",
"vaporise":"vaporize",
"vaporised":"vaporized",
"vaporises":"vaporizes",
"vaporising":"vaporizing",
"vapour":"vapor",
"vapours":"vapors",
"verbalise":"verbalize",
"verbalised":"verbalized",
"verbalises":"verbalizes",
"verbalising":"verbalizing",
"victimisation":"victimization",
"victimise":"victimize",
"victimised":"victimized",
"victimises":"victimizes",
"victimising":"victimizing",
"videodisc":"videodisk",
"videodiscs":"videodisks",
"vigour":"vigor",
"visualisation":"visualization",
"visualisations":"visualizations",
"visualise":"visualize",
"visualised":"visualized",
"visualises":"visualizes",
"visualising":"visualizing",
"vocalisation":"vocalization",
"vocalisations":"vocalizations",
"vocalise":"vocalize",
"vocalised":"vocalized",
"vocalises":"vocalizes",
"vocalising":"vocalizing",
"vulcanised":"vulcanized",
"vulgarisation":"vulgarization",
"vulgarise":"vulgarize",
"vulgarised":"vulgarized",
"vulgarises":"vulgarizes",
"vulgarising":"vulgarizing",
"waggon":"wagon",
"waggons":"wagons",
"watercolour":"watercolor",
"watercolours":"watercolors",
"weaselled":"weaseled",
"weaselling":"weaseling",
"westernisation":"westernization",
"westernise":"westernize",
"westernised":"westernized",
"westernises":"westernizes",
"westernising":"westernizing",
"womanise":"womanize",
"womanised":"womanized",
"womaniser":"womanizer",
"womanisers":"womanizers",
"womanises":"womanizes",
"womanising":"womanizing",
"woollen":"woolen",
"woollens":"woolens",
"woollies":"woolies",
"woolly":"wooly",
"worshipped":"worshiped",
"worshipping":"worshiping",
"worshipper":"worshiper",
"yodelled":"yodeled",
"yodelling":"yodeling",
"yoghourt":"yogurt",
"yoghourts":"yogurts",
"yoghurt":"yogurt",
"yoghurts":"yogurts",
} |
n = int(input())
while n != -1:
prev_time = 0
total = 0
for _ in range(n):
(speed, time) = map(int, input().split(" "))
total += (time - prev_time) * speed
prev_time = time
print(total, "miles")
n = int(input())
|
"""
URL: https://codeforces.com/problemset/problem/753/A
Author: Safiul Kabir [safiulanik at gmail.com]
Tags: dp, greedy, math, *1000
"""
n = int(input())
count = 0
a = []
summ = 0
for i in range(1, n + 1):
summ += i
if summ > n:
break
count += 1
a.append(i)
if summ > n:
a[-1] += n - summ + i
print(count)
print(' '.join(map(str, a)))
|
"""Given a linked list, return the node where the cycle begins. If there is no cycle, return null.
Note: Do not modify the linked list.
Follow up:
Can you solve it without using extra space?
"""
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
def detect_cycle(head):
"""If head is a linked list with a cycle, its entry point node is returned. If not,
None is returned.
Time Complexity: O(N), where N is the number of nodes of the linked list.
Space Complexity: O(1).
:param head: ListNode
:return: entry: ListNode
"""
slow = head
fast = head
while fast and fast.next and fast.next.next:
slow = slow.next
fast = fast.next.next
# If there is a cycle, at some point slow and fast should be equal.
if slow == fast:
# In that case, move head and slow until there are equal. That
# node is the entry node.
while slow != head:
slow = slow.next
head = head.next
return slow
return None
if __name__ == "__main__":
"""
Linked list with a cycle starting at node "b"
c - d
/ \
a - b e
\ /
g - f
"""
a = ListNode("a")
b = ListNode("b")
c = ListNode("c")
d = ListNode("d")
e = ListNode("e")
f = ListNode("f")
g = ListNode("g")
a.next = b
b.next = c
c.next = d
d.next = e
e.next = f
f.next = g
g.next = b
entry_node = detect_cycle(a)
assert entry_node == b
ll = ListNode("a")
ll.next = ListNode("b")
ll.next.next = ListNode("c")
assert detect_cycle(ll) is None
ll = ListNode("a")
ll.next = ListNode("b")
ll.next.next = ListNode("c")
ll.next.next.next = ll
assert detect_cycle(ll) == ll
|
class Node:
_fields = []
def __init__(self, *args):
for key, value in zip(self._fields, args):
setattr(self, key, value)
def __repl__(self):
return self.__str__()
# literals...
class Bool(Node):
_fields = ["value"]
def __str__(self):
string = "Boolean=" + str(self.value)
return string
class Num(Node):
_fields = ["value"]
def __str__(self):
string = "Number=" + str(self.value)
return string
class Str(Node):
_fields = ["s"]
def __str__(self):
string = "String=" + str(self.s)
return string
# ids..
class Id(Node):
_fields = ["name"]
def __str__(self):
return "Id=" + str(self.name)
# expressions
class Expr(Node):
_fields = ["body"]
def __str__(self):
return "Expression(" + str(self.body) + ")"
class Assign(Node):
_fields = ["iden", "expr"]
def __str__(self):
return "Assign(" + str(self.iden) + ", " + str(self.expr) + ")"
class Call(Node):
_fields = ["operator", "operands"]
def __str__(self):
operands = ""
for operand in self.operands:
operands += (str(operand) + ', ')
return "Call(" + str(self.operator) + ", [" + operands + "])"
class Lambda(Node):
_fields = ["formals", "body"]
def __str__(self):
body = ""
for exp in self.body:
body += (str(exp) + ', ')
return "Lambda(formals = " + str(self.formals) + ", body=" + body + ")"
class Arguments(Node):
_fields = ["required_args", "optional_args"]
def __str__(self):
r_args = ''
for arg in self.required_args:
r_args += (str(arg) + ", ")
o_args = ''
if self.optional_args:
for arg in self.optional_args:
o_args += (str(arg) + ", ")
return "r_args=[" + r_args + "], o_args=[" + o_args + "]"
class Program(Node):
_fields = ["exprs"]
def __str__(self):
return "Program(" + str(self.exprs) + ")"
class Conditional(Node):
_fields = ["test", "conseq", "alt"]
def __str__(self):
test = str(self.test)
then = str(self.conseq)
el = str(self.alt)
return "Conditional(if=" + test + ", then=" + then + ", else=" + el + ")"
class Do(Node):
_fields = ["iter_specs", "test", "do_result", "cmds"]
def __str__(self):
specs =""
for spec in self.iter_specs:
specs += (str(spec) + ", ")
test = str(self.test)
do_result = str(self.do_result)
return "Do((" + specs + "), " + test + do_result + str(self.cmds) + ")"
class IterSpec(Node):
_fields = ["iden", "init", "step"]
def __str__(self):
return "IterSpec(" + str(self.iden) + ", " + str(self.init) + ", " + str(self.step) + ")"
|
def enum(**enums):
return type('Enum', (), enums)
control = enum(CHOICE_BOX = 0,
TEXT_BOX = 1,
COMBO_BOX = 2,
INT_CTRL = 3,
FLOAT_CTRL = 4,
DIR_COMBO_BOX = 5,
CHECKLIST_BOX = 6,
LISTBOX_COMBO = 7,
TEXTBOX_COMBO = 8,
CHECKBOX_GRID = 9,
GPA_CHECKBOX_GRID = 10,
SPIN_BOX_FLOAT = 11)
dtype = enum(BOOL = 0,
STR = 1,
NUM = 2,
LBOOL = 3,
LSTR = 4,
LNUM = 5,
LOFL = 6,
COMBO = 7,
LDICT = 8 )
substitution_map = {'On': 1,
'Off': 0,
'On/Off': 10,
'ANTS & FSL': 11,
'3dAutoMask & BET': 12,
'AFNI & BET' : 12,
'ALFF':'alff',
'f/ALFF':'falff',
'ReHo':'reho',
'ROI Average SCA':'sca_roi',
'Multiple Regression SCA':'sca_tempreg',
'VMHC':'vmhc',
'Network Centrality':'centrality',
'Dual Regression':'dr_tempreg',
'ROI Average Time Series Extraction': 'roi_average',
'ROI Voxelwise Time Series Extraction': 'roi_voxelwise',
}
multiple_value_wfs = ['runAnatomicalPreprocessing',
'runFunctionalPreprocessing',
'runRegistrationPreprocessing',
'runRegisterFuncToMNI',
'runAnatomicalToFunctionalRegistration',
'runSegmentationPreprocessing',
'runNuisance',
'runFrequencyFiltering',
'runMedianAngleCorrection',
'runScrubbing',
'runFristonModel'
'runEPI_DistCorr']
|
'''
At CodingNinjas, we love to play with marbles. We have many marble games, but the most popular one is “Target Marbles”. Now, our marbles are unique. Each marble has a number on it.
In Target Marbles, the player is given a number in the starting and this number is called target. The player is also given N number of marbles to play with. Now, player has to arrange the marbles in a specific way such that sum of the values of at least one of the continuous subset of the arrangement is equal to given target.
Now, NinjaCoder came to play this game and made an arrangement of marbles. The judges of the game need your help. You have to determine if NinjaCoder has won it or not.
Input Format :
First line contains number of marbles(N) and target (target_number) that was assigned to NinjaCoder. Second line contains N space separated integers, which represent arrangement of the marbles and value written on that particular marble.
Constraints:
1<= N <=100
1<=target_number<=10000
Value on the marbles lies in the range [0, 1000].
Output Format :
You have to print “true”, if the NinjaCoder wins with the given arrangement and you have to print the values of the continuous subsets. If there are more that one continuous subsets, then you have to print the values of first continuous subset. If the Ninjas coder is unable to win, you just have to print “false”.
Sample Input 1 :
10 10
9 1 2 3 4 5 5 16 17 19
Sample Output 1 :
true
9 1
Explanation:
Here, if the NinjaCoder arranges the given 10 marbles in this arrangement, then he/she will win the game. Now, there are many continuous subsets of marbles that will win the game such as (9,1) or (1, 2, 3, 4). Out of these winning combinations, you have to print the first one which is (9,1).
Sample Input 2 :
10 10
19 11 12 131 14 15 5 16 17 19
Sample Output 2:
false
'''
def SubarraySum(arr, n, ans):
curr_sum = arr[0]
st, i = 0, 1
while i <= n:
while curr_sum > ans and st < i-1:
curr_sum = curr_sum - arr[st]
st += 1
if curr_sum == ans:
print("true")
for j in range(st, i):
print(arr[j], end = " ")
return
if i < n:
curr_sum += arr[i]
i += 1
print("false")
n, ans = input().split()
n = int(n)
ans = int(ans)
arr = [int(x) for x in input().split()]
SubarraySum(arr, n, ans) |
"""
Given a binary tree, return the zigzag level order traversal of its nodes'
values. (ie, from left to right, then right to left for the next level and
alternate between).
For example:
Given binary tree {3,9,20,#,#,15,7},
3
/ \
9 20
/ \
15 7
return its zigzag level order traversal as:
[
[3],
[20,9],
[15,7]
]
"""
# Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param root, a tree node
# @return a list of lists of integers
def zigzagLevelOrder(self, root):
if root is None:
return []
res = []
queue = []
rev = False # Reverse direction
level = []
queue.append(root)
queue.append(None)
while queue:
root = queue.pop(0)
if root is None:
if queue:
queue.append(None)
res.append(level)
level = []
rev = not rev # Toggle direction
else:
if rev:
level.insert(0, root.val)
else:
level.append(root.val)
if root.left is not None:
queue.append(root.left)
if root.right is not None:
queue.append(root.right)
return res
|
'''
lanhuage: python
Descripttion:
version: beta
Author: xiaoshuyui
Date: 2020-09-15 13:53:11
LastEditors: xiaoshuyui
LastEditTime: 2020-09-22 11:20:14
'''
__version__ = '0.0.0'
__appname__ = 'show and search'
|
def contains_magic_number(list1, magic_number):
for i in list1:
if i == magic_number:
print("This list contains the magic number")
# if not add break , will run more meaningless loop
break
else:
print("This list does NOT contain the magic number")
if __name__ == "__main__":
contains_magic_number(range(10), 3)
|
def dfs_inorder(tree):
if tree is None: return None
out = []
dfs_inorder(tree.left)
out.append(tree.value)
print(tree.value)
dfs_inorder(tree.right)
return out
def dfs_preorder(tree):
if tree is None: return None
out = []
out.append(tree.value)
print(tree.value)
dfs_preorder(tree.left)
dfs_preorder(tree.right)
return out
def dfs_postorder(tree):
if tree is None: return None
out = []
dfs_postorder(tree.left)
dfs_postorder(tree.right)
out.append(tree.value)
print(tree.value)
return out
|
def round_down(num, digits: int):
a = float(num)
if digits < 0:
b = 10 ** int(abs(digits))
answer = int(a * b) / b
else:
b = 10 ** int(digits)
answer = int(a / b) * b
assert not(not(-0.01 < num < 0.01) and answer == 0)
return answer
|
INPUT = {
2647: [
list("#....#####"),
list(".##......#"),
list("##......##"),
list(".....#..#."),
list(".........#"),
list(".....#..##"),
list("#.#....#.."),
list("#......#.#"),
list("#....##..#"),
list("...##....."),
],
1283: [
list("######..#."),
list("#.#..#.#.."),
list("..#..#...#"),
list(".#.##..#.."),
list("#......#.."),
list("#.#....##."),
list(".#.....#.#"),
list("#.#..#.#.#"),
list(".#......##"),
list("...##....."),
],
3547: [
list("#.#.#.###."),
list("#........."),
list("#....##..."),
list("#.....#..#"),
list("#.....#.#."),
list("##..##...#"),
list("#...##...."),
list("......#..#"),
list("#...##...."),
list(".....###.#"),
],
1451: [
list("##..#.#..."),
list("#.#......."),
list("##.#.....#"),
list("....#....."),
list("...#...##."),
list("......#.#."),
list("#...##.##."),
list("........#."),
list(".#.##.#..."),
list("..##..#..."),
],
3137: [
list("....#.##.#"),
list("#....#...#"),
list("..#.#....."),
list("...####..#"),
list(".#.###...#"),
list(".......#.."),
list("##.##.#..#"),
list(".#.##....#"),
list("#...#....#"),
list("..##.##..#"),
],
2897: [
list("###..#.##."),
list("..#......#"),
list(".....#...."),
list("###.#....#"),
list("#.#..#...#"),
list(".#...##..#"),
list("##..##.##."),
list("#.....#..#"),
list(".#......##"),
list("#.#.#.##.#"),
],
1093: [
list("..#.#.#.#."),
list("#.#......."),
list("..##....#."),
list(".#.....#.#"),
list("#........#"),
list(".#....#..#"),
list("##....#..#"),
list("#.##..#..#"),
list("..###...##"),
list(".######.##"),
],
1217: [
list("#..#....##"),
list("#.....#..."),
list("##...##..#"),
list("#.....#..."),
list("..#.#..#.."),
list("#..#....##"),
list(".##.#....."),
list("......#..."),
list(".#........"),
list(".#..###.#."),
],
2801: [
list("###..##.#."),
list(".........#"),
list("##.#...###"),
list("#......#.."),
list("#........#"),
list("......#..."),
list("##.####..."),
list(".....##..."),
list("..#..#.##."),
list("...###.##."),
],
1361: [
list("...#.##..#"),
list("....#....."),
list("###......."),
list("#......#.."),
list(".......##."),
list("#...#..#.."),
list("#.....##.#"),
list("##........"),
list("#.#......."),
list("###.#..###"),
],
2063: [
list("...#....##"),
list("##...#..##"),
list("#........#"),
list("........##"),
list("#.......##"),
list("#........."),
list("##.....##."),
list(".....##..#"),
list(".#.##.#..."),
list(".#..#####."),
],
3797: [
list("##..#...#."),
list(".###.#.##."),
list(".....#.##."),
list("..#......."),
list("...#.#...."),
list("........##"),
list("#.#.#.##.#"),
list("#.....#.##"),
list("#.......#."),
list(".....#.##."),
],
1289: [
list("####.##.#."),
list(".....#...."),
list("#..#.#...."),
list("####...#.."),
list("#.#..#..#."),
list(".#.##..#.."),
list("#........#"),
list("....#..#.."),
list("........#."),
list("###.#.####"),
],
1427: [
list("##.##..##."),
list("###..#.##."),
list("#..##...#."),
list("#..#.#...#"),
list("#........#"),
list("#...##...."),
list("#........#"),
list(".....#..#."),
list(".####....#"),
list("##.#.##.#."),
],
1951: [
list("....##.#.#"),
list(".........#"),
list("#........#"),
list(".#..#...#."),
list(".....#####"),
list("#......#.#"),
list("...##....#"),
list("......#..."),
list("..#...#..#"),
list("....####.#"),
],
1483: [
list("....####.."),
list(".......#.#"),
list("###..#..##"),
list("...#.#...#"),
list("#..##...##"),
list("##.#......"),
list("#...#..#.."),
list("..#...#.##"),
list(".........#"),
list(".#...#...."),
],
1789: [
list("##..#####."),
list("....#....#"),
list("........#."),
list("..#.#..#.#"),
list("..##.#..##"),
list(".........#"),
list(".........#"),
list("#..#.#..##"),
list("....##...."),
list("#.#......."),
],
2129: [
list("#.###.#..#"),
list("....##...#"),
list(".#..#..##."),
list("...###.##."),
list("..#..#...#"),
list("....##...#"),
list("#........."),
list("#...#..###"),
list("#...#....."),
list("...#....##"),
],
2137: [
list("..#.####.#"),
list("##...#.#.."),
list(".......###"),
list(".#.....#.#"),
list(".#....##.#"),
list("#.......#."),
list("#....#...#"),
list("#.....####"),
list("......##.#"),
list("..#####.##"),
],
3761: [
list(".####.#..."),
list("####..#..#"),
list("#...##..##"),
list(".#.....#.#"),
list("....#....#"),
list("#.......#."),
list("...#..#..#"),
list("#.##...##."),
list("...###...#"),
list("...##.#..#"),
],
1327: [
list("..####.#.#"),
list("#..#......"),
list("......#.##"),
list("#..##....."),
list("..##.##..#"),
list("#.#.#....."),
list("####.....#"),
list("..#......."),
list("#.#...##.."),
list("#.##....#."),
],
2741: [
list(".#..#...#."),
list("#....#..#."),
list("......##.#"),
list("....#.#..#"),
list("........##"),
list("...#..#..."),
list("......##.."),
list("#...#..#.#"),
list("......##.."),
list("..#..#..#."),
],
1699: [
list(".###..####"),
list("##.....#.#"),
list(".....##.##"),
list("#.#...##.."),
list(".#........"),
list(".#....#..#"),
list("#..#....#."),
list(".#...#...#"),
list("#.......#."),
list("##.#..#..#"),
],
1151: [
list("..#.##...."),
list("##....#..."),
list("###.#..#.#"),
list("#.......##"),
list("....#.#..#"),
list("#...###..."),
list(".#..#.#..#"),
list("#.#..##..#"),
list(".#.#.#.#.."),
list(".###..####"),
],
2273: [
list("#.#.#.#.##"),
list(".........."),
list("#......#.."),
list("#.....#..."),
list("#.#...#..."),
list("##....##.."),
list("##..##.#.."),
list("#.#####.##"),
list("##.##...##"),
list("#...##..##"),
],
1999: [
list("##.##...##"),
list("#......#.."),
list("##..#....."),
list("#........#"),
list("#.#...####"),
list("..#....#.#"),
list("#..#...#.."),
list(".........#"),
list("#...##...."),
list("##.##.##.."),
],
1721: [
list("....##...#"),
list("###.#....#"),
list(".##..#...."),
list(".#.#.#...."),
list("...##....#"),
list("##..#....#"),
list("#....#.###"),
list("#.....##.."),
list("....#...##"),
list("..#.#.#..#"),
],
2521: [
list("..#######."),
list("#.#..##.#."),
list(".#....##.#"),
list("..#...####"),
list(".......##."),
list("##...###.."),
list("...##....#"),
list(".##.#....."),
list("###..##..#"),
list("####.##.#."),
],
2111: [
list("..#.#..#.."),
list("...#.....#"),
list("..####...#"),
list(".#.#..##.#"),
list(".##..#.##."),
list("........##"),
list("........##"),
list("#..#.#...."),
list("...#.###.."),
list(".#.#...#.."),
],
2767: [
list(".#######.."),
list("##.......#"),
list("#...#.##.."),
list("....#...##"),
list("#........#"),
list("..#.###..."),
list("....#..#.#"),
list("##....#.##"),
list("..##....##"),
list(".#####.#.."),
],
2141: [
list("####.#...."),
list("#..#.#...#"),
list("...#..#..#"),
list(".......#.."),
list(".....###.#"),
list("#....#...."),
list(".......#.#"),
list(".#...#..##"),
list("...#......"),
list(".###.####."),
],
2557: [
list(".#.##..#.."),
list("..##.....#"),
list("#.#.#....#"),
list("..##...#.."),
list("...#..##.#"),
list(".........."),
list("##......##"),
list("#..#......"),
list("#.#..#...#"),
list("##.#####.."),
],
2269: [
list(".#.#...##."),
list("#.......##"),
list("#.....##.."),
list("##.#......"),
list("#.##..###."),
list(".#.....##."),
list("....#....#"),
list("....#...##"),
list("#..##....."),
list("#.#.#.#.##"),
],
3511: [
list(".#.#.##..."),
list(".#.....##."),
list(".#....#..#"),
list("#.#......#"),
list("#.#.#....."),
list("#........#"),
list("..#......."),
list(".##.#....."),
list("##.#.....#"),
list("..####..##"),
],
2789: [
list("#......#.."),
list("#...#....."),
list("#........."),
list(".......#.#"),
list("...#....##"),
list("#.##..###."),
list("#...##...#"),
list(".........#"),
list(".........#"),
list(".###..##.."),
],
2971: [
list("#.##.#...."),
list("...#.....#"),
list(".#....#..."),
list("#.#..##..."),
list("#.....#..."),
list("####.....#"),
list("#..###..##"),
list("#....#...."),
list("#..#.##..."),
list("#.#..###.."),
],
3719: [
list("#.###....."),
list("...#.....#"),
list("...##...##"),
list(".#..#.#..#"),
list("#..#.#..#."),
list("#.#..#..##"),
list("#...###..#"),
list(".#.#..#.##"),
list("........#."),
list("#....###.."),
],
1901: [
list(".#...##.##"),
list("#........."),
list(".#.#.....#"),
list("#.##.....#"),
list("#........#"),
list("#....#...#"),
list(".....##.##"),
list("##.###..##"),
list("....#....#"),
list("....##..##"),
],
3191: [
list("#.#..###.#"),
list("#...#..##."),
list("#.....#..."),
list(".#.#.#...."),
list(".#..##...."),
list("#.....#.#."),
list(".##......."),
list("....#....#"),
list("#..##.#..."),
list("####....##"),
],
3709: [
list("..#......#"),
list("#..#...#.#"),
list("#.##....#."),
list(".#..#.##.."),
list("..#......#"),
list("#....##..."),
list("##........"),
list("....#....#"),
list(".........#"),
list(".#.#..###."),
],
1613: [
list("...##..##."),
list("#......#.."),
list("..##.#..##"),
list("......##.."),
list(".#..#..##."),
list(".......##."),
list(".......#.#"),
list("...#.#...."),
list("#......#.#"),
list("###..#...."),
],
2441: [
list("..#.######"),
list("#.#......."),
list("#..#.#...."),
list("....#...##"),
list("#...#...##"),
list("#.##...#.#"),
list("........##"),
list("#.#...#..."),
list("#..####.##"),
list("#.##.####."),
],
1409: [
list("..####.#.#"),
list("..##....#."),
list("..#.#...#."),
list("..##.##..."),
list(".#.##....#"),
list("#.....##.#"),
list("####.....#"),
list("###....#.."),
list("####..#.#."),
list("#..##.##.#"),
],
1523: [
list(".#.##..##."),
list("#..#.#...."),
list("##.#.#...#"),
list("....#.##.#"),
list("#........#"),
list("#.#......."),
list("#...##...#"),
list("...#..##.#"),
list("#.##...#.."),
list(".####..#.."),
],
1367: [
list("#..#...#.#"),
list("#.#......."),
list("..#..#...."),
list(".###..###."),
list("###..#.##."),
list("##...#..#."),
list("#..#...#.#"),
list("......##.."),
list("##.....#.#"),
list(".#####..##"),
],
1783: [
list("...#.####."),
list(".####..#.."),
list("#....#.###"),
list("#.#..#.#.#"),
list("#.#.#.#..#"),
list("#.......##"),
list("#.##.#.#.."),
list(".#.#....#."),
list("#..#.#...#"),
list(".###..##.#"),
],
1871: [
list(".##..#.##."),
list("#........#"),
list("#...#....#"),
list("##.#..##.."),
list("##.....##."),
list("#.....#.##"),
list("........##"),
list("....#....#"),
list("#........."),
list("....#.#..#"),
],
3217: [
list("#.#...#.##"),
list(".........#"),
list(".........#"),
list("#...#....."),
list("#....#.#.#"),
list(".........#"),
list("...#.##.##"),
list("#...#....."),
list(".#..#....#"),
list("#..###.#.#"),
],
3163: [
list("...##.#.##"),
list("#.#......#"),
list("....#...##"),
list("#.......##"),
list("###..#.#.."),
list(".#....####"),
list("##....#.##"),
list("#.......#."),
list(".....#..#."),
list(".##.#.#.##"),
],
3271: [
list("##.#.#.##."),
list("##....##.#"),
list("#.#.##..##"),
list("#.#...##.#"),
list(".##......#"),
list("#.....#.#."),
list("#........#"),
list("##..##...."),
list("#.#..##..#"),
list("#..#.####."),
],
2707: [
list("..###.#..."),
list("#...#....."),
list("#.#..#...."),
list("#..##...##"),
list(".###......"),
list(".#..##...#"),
list("#...#....."),
list("....#....."),
list("#..#.#...."),
list(".##....#.#"),
],
3083: [
list("##..#.#.##"),
list("#..#....##"),
list(".........#"),
list("..#.#...##"),
list("..#......."),
list(".#.#.....#"),
list("..#..#.#.."),
list("#...#.#..#"),
list("#..#.#...."),
list("#.###..##."),
],
1051: [
list("####...##."),
list("...#.#...#"),
list(".........."),
list("..#......."),
list("#......#.."),
list(".#.##.##.."),
list("#....#.#.#"),
list("#..#.#...#"),
list("#.#..##..#"),
list("......###."),
],
3767: [
list(".#..##.###"),
list("...#.#...."),
list("..#.....#."),
list("#.#......."),
list(".#.....#.#"),
list("##..#....#"),
list("#...#..#.#"),
list("........##"),
list("#........#"),
list("..#....##."),
],
2267: [
list(".#..#..#.."),
list(".#.#.#...."),
list(".#......#."),
list("#...#....#"),
list(".###..#..."),
list(".##.#...##"),
list("..#.##.##."),
list("...#.#.##."),
list("##.#.##..#"),
list(".#.##....."),
],
1973: [
list("#.#####..#"),
list(".#.......#"),
list("#..#.#..#."),
list("#.#.#.#.#."),
list(".##......."),
list("#.#.....#."),
list(".#.......#"),
list("#...##.#.#"),
list("##.......#"),
list(".##...####"),
],
3671: [
list("#..##.#.##"),
list("....##...#"),
list(".###.##..."),
list(".........#"),
list("#..#.....#"),
list("..##...#.."),
list("......#..."),
list("..#..#..##"),
list("..#......."),
list("##..###..#"),
],
3221: [
list("#.#..###.#"),
list("#..#....##"),
list("#..#......"),
list("#...#...##"),
list("..#..#..#."),
list("#..##...#."),
list("...#....#."),
list(".....#..#."),
list("##..#..#.."),
list(".....#...#"),
],
1549: [
list(".###.##..#"),
list("#.#.##...#"),
list("#....#...."),
list(".........."),
list("#.#......#"),
list("##.#.#..##"),
list("...#.#..##"),
list("........#."),
list("#.#....###"),
list("#....#...#"),
],
3461: [
list(".######..#"),
list("#.......##"),
list(".......#.."),
list(".#...#...."),
list("..##....#."),
list("#.....##.."),
list("##.#.#..#."),
list(".........#"),
list("##.##.#..."),
list("....#...##"),
],
2459: [
list("..##.##.#."),
list("...#..#..."),
list(".........#"),
list("#.#..#..##"),
list("#.###.#..."),
list("##.#......"),
list(".......#.."),
list(".........#"),
list("........##"),
list("#.##...#.."),
],
3203: [
list(".#...####."),
list("..##..#.#."),
list("#..#..##.."),
list("#.#....##."),
list("...#.#...."),
list(".......###"),
list("#.....##.."),
list("....#....#"),
list("#......#.."),
list("###......."),
],
2203: [
list("#.#..##.##"),
list(".......#.."),
list("......#.##"),
list("#.......##"),
list("#..##.##.#"),
list("..#.....##"),
list("#.##.....#"),
list("#.#....#.."),
list(".##.....##"),
list("......#..."),
],
3637: [
list("#...###.#."),
list("#........."),
list("..#......."),
list("...#.....#"),
list("#..##....#"),
list("#........#"),
list(".......#.."),
list("#....#.#.."),
list("#.#..##..#"),
list("..#.#..##."),
],
2467: [
list("..##.##..."),
list("##....####"),
list("...#.#.#.#"),
list("#.##...#.#"),
list("...##.##.."),
list("#.....#..."),
list("##........"),
list("..#...#.#."),
list("#...####.#"),
list("#......###"),
],
2411: [
list("...##....#"),
list("...##..###"),
list("...##.####"),
list("#.#..##.#."),
list("..##.#.###"),
list(".#..#.###."),
list("....####.#"),
list(".....##.#."),
list("#........."),
list(".#..#..###"),
],
2221: [
list("####.....#"),
list("#.#.....##"),
list(".#....#..."),
list(".#.#......"),
list(".##..#..#."),
list("....#....."),
list(".........#"),
list("##.......#"),
list("#....#...."),
list(".##.######"),
],
1487: [
list("..#..##..."),
list(".........#"),
list("#..#...###"),
list("....#...#."),
list(".#...##.#."),
list(".....#.#.#"),
list(".....##..."),
list("#.##......"),
list("#.#......."),
list("#.#####.#."),
],
1481: [
list("#.###.##.."),
list("....##...#"),
list("....#....."),
list("...#......"),
list("##.###.#.#"),
list("#.##..####"),
list("..#......#"),
list(".#....##.#"),
list("..##.##.#."),
list(".#####.#.#"),
],
1669: [
list("#...##.##."),
list("...#..#..."),
list(".##..#.#.#"),
list("#..#..#..#"),
list("#......#.#"),
list(".#......##"),
list("........#."),
list("......#..#"),
list(".##..#.#.#"),
list("##.##....#"),
],
3167: [
list(".#.####..."),
list(".........#"),
list("#......##."),
list(".....#...."),
list("..#.#...##"),
list("#.#.####.#"),
list("...#....#."),
list(".........#"),
list("#...#.#..#"),
list("#.#.#.#.#."),
],
3347: [
list("###...##.."),
list("#.#......#"),
list("...#.....#"),
list(".........."),
list("#.#.....#."),
list("..####..##"),
list("..#.#.#..#"),
list("##...#..#."),
list("..##.....#"),
list("#..#....#."),
],
2213: [
list("#..#####.#"),
list(".........."),
list("#..#.##.#."),
list("...###.#.#"),
list("......##.."),
list("......#..#"),
list(".##.....##"),
list("..#....###"),
list("...####..#"),
list(".####.#.##"),
],
3329: [
list("..##...#.."),
list("#.#....#.#"),
list("#...#..#.."),
list("......#.##"),
list("#...####.#"),
list(".........."),
list("##....##.#"),
list("#......##."),
list("....##...#"),
list("..####.##."),
],
3851: [
list("#.#....##."),
list(".........#"),
list("#.....#..."),
list("##.##....."),
list("...#.###.."),
list("#....##..."),
list(".....#.##."),
list(".#........"),
list("#......#.#"),
list("...#..#..#"),
],
2659: [
list("#.#...#.#."),
list(".....#.##."),
list("#..##.####"),
list("#.#.##...."),
list("#....#..#."),
list("...#...#.."),
list("...#....#."),
list("#....#.#.."),
list(".##.#....#"),
list(".....#..#."),
],
1933: [
list(".####.##.."),
list("#..####..."),
list(".#..####.."),
list(".#.#.##..."),
list("......#.#."),
list("##........"),
list(".#.#.....#"),
list("#..#......"),
list("....#....."),
list("...#...##."),
],
3299: [
list("###.##..#."),
list(".......#.."),
list("...#...##."),
list("###...#.##"),
list("......##.."),
list("....#.#..#"),
list(".###......"),
list(".#.#####.."),
list("#..#.#..#."),
list(".....#.#.#"),
],
3691: [
list("...###...#"),
list("#........."),
list("#.#.....##"),
list("#.#....#.."),
list("#..#...#.."),
list(".........."),
list("##...##..#"),
list(".#...#...#"),
list("#.....#.##"),
list(".###..#..."),
],
3733: [
list("#..#.#####"),
list(".....#...."),
list("....###..#"),
list("#..#.#...."),
list("#.#..#.###"),
list("..###...##"),
list("......#.##"),
list("...###...."),
list("...#....#."),
list("..##......"),
],
2131: [
list("##.#..#.#."),
list(".#...#..##"),
list("#.......#."),
list("....##...#"),
list(".###..#..."),
list("...#####.."),
list(".....#...#"),
list("##..#..##."),
list("..##....#."),
list(".#...####."),
],
1723: [
list(".....#####"),
list(".#.#..#..."),
list("##......#."),
list("#.......##"),
list(".###...#.."),
list("#..#......"),
list("#........."),
list("......#..#"),
list(".........#"),
list(".###.##.##"),
],
3463: [
list("##.#....##"),
list("#....##..#"),
list("..#.#....."),
list("#.#...#..#"),
list("#....#...."),
list("..#....#.#"),
list("#...#..###"),
list("##....#.##"),
list("..#.#....."),
list(".#..#.##.."),
],
2549: [
list("#.####.#.."),
list("...##....#"),
list("##..#.##.#"),
list("..###.#..#"),
list("#.#......#"),
list("#........#"),
list("....#....."),
list("#......#.#"),
list("#....####."),
list("...##.#.##"),
],
1031: [
list("#..#.#.#.#"),
list("......##.."),
list("#........#"),
list(".###......"),
list("..#..#..#."),
list("##....##.."),
list("......#..."),
list("...#...###"),
list(".###...#.."),
list(".##.#.###."),
],
1979: [
list("#.######.."),
list(".#.#.....#"),
list("#........#"),
list("#..##....."),
list("##........"),
list("##.....#.."),
list("......#..."),
list(".........#"),
list(".#........"),
list("..#.#####."),
],
2939: [
list("#.#...#.##"),
list(".#..#....#"),
list(".#.....#.#"),
list("##......##"),
list("...#..##.."),
list("#....#.##."),
list("#...##.#.#"),
list("..#...#..."),
list("##.....#.."),
list(".....##.#."),
],
2381: [
list("..##.###.#"),
list("..##...#.."),
list(".#...#...."),
list("#......#.#"),
list("##.......#"),
list("#..####..."),
list("...#.#.#.#"),
list("#.##.....#"),
list("..#......#"),
list("#..#.##..."),
],
3943: [
list("#.#.###..#"),
list(".......###"),
list("#.#...###."),
list("#..##.#..#"),
list("#......#.."),
list("#.##...#.#"),
list("#........."),
list("##....##.#"),
list("....#.#..."),
list(".###.#...."),
],
1553: [
list("#####.####"),
list("#...#....."),
list("#.#.....#."),
list("##......#."),
list("#....#.#.."),
list(".#.....#.#"),
list("##....#.#."),
list("#........#"),
list(".........#"),
list(".#.....##."),
],
2351: [
list(".###.###.."),
list("#.....#..."),
list("##.##....#"),
list("..#..##.#."),
list("#.#......."),
list("#....#...."),
list("......##.#"),
list("##...##..#"),
list(".#.....#.."),
list(".#.###..#."),
],
2311: [
list("#.#.#..##."),
list("#..###.#.."),
list("...##..#.#"),
list("###......."),
list("##........"),
list("#.#......."),
list("..##.....#"),
list(".#.####..."),
list("..#.#.#..."),
list("###..##.#."),
],
1567: [
list("..###.#.##"),
list(".#.....###"),
list("#...#..##."),
list("#.......#."),
list(".......#.."),
list("#....#...."),
list("...#.##.#."),
list("....#...##"),
list("....#....#"),
list("#.#...##.."),
],
2579: [
list("#.##..##.."),
list("#......#.."),
list("#..#..#..#"),
list("##.......#"),
list("....##.#.#"),
list("#.####..#."),
list("#..#..#.##"),
list("#...#..#.#"),
list("...##...#."),
list("#..#.###.."),
],
3593: [
list(".#.##.#.##"),
list("#...#....#"),
list(".........."),
list("##....#..#"),
list("##......##"),
list("#........."),
list("......#..#"),
list("...#.....#"),
list("....#....#"),
list("##..###..#"),
],
2281: [
list("##....###."),
list("...#......"),
list("#......#.#"),
list("##.#..#..#"),
list("###.#..##."),
list(".#...#...#"),
list(".........."),
list(".#.###.#.."),
list("#..#......"),
list("#..#.##.#."),
],
1193: [
list(".......###"),
list("##..#..#.."),
list(".###...###"),
list("....#.###."),
list("..#...#..#"),
list("#.#....#.."),
list("...####..#"),
list("#....#..##"),
list(".#.......#"),
list(".#.#...##."),
],
3833: [
list("...#####.."),
list("#..####..."),
list("#.#....###"),
list("...##.#.##"),
list("..#...#..#"),
list(".##.#####."),
list("#..#..#..#"),
list("#...##...."),
list(".....#.#.."),
list(".##.##.#.#"),
],
2003: [
list(".#.###.#.."),
list(".........#"),
list("..#..#...."),
list("#........."),
list("#..##....#"),
list(".......#.#"),
list("......#..."),
list("#....##..#"),
list(".#......##"),
list("..#..##.#."),
],
2731: [
list("#.#..#..##"),
list("....#..#.#"),
list("..#...#..."),
list("..#..#...."),
list("#.#..#...#"),
list("#....##..."),
list("#........."),
list("#..##..#.#"),
list("#........."),
list(".###.#...."),
],
3881: [
list("..##......"),
list("#...#..#.#"),
list("##...#...."),
list("....#....."),
list("##.......#"),
list(".....#####"),
list("...#....##"),
list(".........#"),
list(".........."),
list("#..##.####"),
],
3673: [
list("##..###.#."),
list("...##....#"),
list("###.....##"),
list("#..#...#.#"),
list("#.##......"),
list("..#.#....."),
list("..#.#....#"),
list(".###.....#"),
list(".###.##..."),
list("###.#..#.#"),
],
1021: [
list("#..###.#.."),
list("###..##.#."),
list("#..##....#"),
list(".....###.."),
list("....##...#"),
list("....#....."),
list("#.##..#..#"),
list(".........."),
list(".......#.#"),
list("..#.##..#."),
],
2423: [
list("#.....####"),
list(".##.#....#"),
list(".#........"),
list("##.....#.."),
list("#.....###."),
list("#...#...#."),
list("#...#..#.#"),
list(".#..#..##."),
list("##.......#"),
list(".#####.###"),
],
3923: [
list("..#....###"),
list("#.....#..#"),
list("#...#.#.#."),
list(".#.......#"),
list("#..#.#...."),
list(".......#.#"),
list("##....##.#"),
list(".#..#...#."),
list("#...##..#."),
list("..#.#.#..#"),
],
2753: [
list("..####..#."),
list("#.......#."),
list("#.##.#..##"),
list("#.#.#....."),
list("#..#......"),
list("....#.#..."),
list(".#.#..#..#"),
list("#.....#..#"),
list("##.#..#..."),
list("#####....#"),
],
3929: [
list("....#####."),
list("##..#.##.."),
list("##.#.#.##."),
list("##...#.#.."),
list("#........#"),
list(".##.#..#.."),
list("#..#.##..."),
list("##..#...#."),
list(".....#...#"),
list("###..####."),
],
3041: [
list(".##.#..#.#"),
list("#..#...#.."),
list("###..#..#."),
list(".#.#....##"),
list("...##....."),
list("#....#..##"),
list("#........#"),
list("##.#...#.."),
list("##....#..#"),
list("...#..#..#"),
],
3433: [
list("..#.#.#..."),
list("#.#......."),
list(".....#...."),
list("..#......#"),
list("#..#.....#"),
list("........##"),
list("##..##.##."),
list("##........"),
list("#.#.##..##"),
list("###.###..#"),
],
2719: [
list("..##..#..#"),
list("#.##..##.."),
list("#......#.."),
list("#...##..##"),
list("..#..#.#.#"),
list("#......###"),
list("..###..#.."),
list("....#.#..#"),
list("....##...#"),
list("##..#..###"),
],
1201: [
list(".#...##.##"),
list("#........#"),
list("##...##..."),
list(".........."),
list(".....#.#.."),
list("#.##.....#"),
list("...#.##..#"),
list(".........#"),
list(".#.#.....#"),
list(".##...#..."),
],
1129: [
list("...####..#"),
list("......##.."),
list("#.....##.."),
list("#.......#."),
list("#......#.."),
list("...##....#"),
list("........##"),
list("##.#.#.#.."),
list("...#..##.#"),
list("...##....#"),
],
3019: [
list("..#...###."),
list(".....#.##."),
list("#.##.....#"),
list(".#.##..#.."),
list(".#..###..#"),
list("..#.####.#"),
list("#..#.#...#"),
list(".......#.#"),
list("#..##.#..#"),
list("#.##....##"),
],
1747: [
list("##.###.#.."),
list("#.......#."),
list("#...#..#.#"),
list("##...##.#."),
list("..###.#..#"),
list("#..#..##.."),
list("#...#....."),
list("..#......."),
list("...#..#.#."),
list(".##..##.##"),
],
1741: [
list(".##.#..#.#"),
list("#...##..##"),
list("#....#.#.#"),
list("##...##..#"),
list("##.......#"),
list("#...#..##."),
list("...#.##.##"),
list("...#..#.#."),
list(".......#.#"),
list(".#####.###"),
],
1867: [
list("#..##....."),
list(".......###"),
list("#..##....#"),
list("##...#...."),
list("...###...."),
list("##..#....."),
list(".##......."),
list("#.....###."),
list("#...#..#.#"),
list("...###...."),
],
2803: [
list(".#.##....#"),
list("#.####..#."),
list("#........."),
list("#.#......#"),
list(".......#.#"),
list("........#."),
list("..#..#.#.#"),
list("....###..."),
list("#...##...."),
list("...###...."),
],
3643: [
list("#..#..#.##"),
list("####.#..#."),
list("#.#...#.##"),
list(".#..#....."),
list("##....#..#"),
list(".##......."),
list(".......#.#"),
list("...##.#..."),
list(".....#.##."),
list("#...####.#"),
],
2437: [
list("..###..###"),
list("....#....."),
list(".........."),
list("#.#..#.###"),
list("##...####."),
list("....##...."),
list("...##....."),
list("##..#.##.."),
list("#......#.."),
list("#.#.....#."),
],
1069: [
list("..####...."),
list("##..##...#"),
list(".#..#..##."),
list(".#....##.#"),
list("###.#.#.##"),
list("...##..#.#"),
list("##....#..."),
list("#.#....#.#"),
list(".#.....#.#"),
list("#.#.#....."),
],
1381: [
list(".###.#.##."),
list("....#..#.."),
list("#.......##"),
list("#...#....."),
list(".#...#..##"),
list("...#....##"),
list("#..#.###.."),
list("..######.#"),
list("#....#...#"),
list("#######.#."),
],
2617: [
list("..##..#.#."),
list("#.....##.#"),
list("..#.#..#.."),
list(".##.#..#.."),
list("###...#.#."),
list(".###.##..."),
list("#.#......."),
list("#..##.#..#"),
list("##.....#.."),
list(".##..#..##"),
],
2393: [
list(".##..#.#.#"),
list("..#.#..###"),
list("..##..#.##"),
list("....#....."),
list("#...#....."),
list("##.#.....#"),
list(".#.#..#.#."),
list("##.....#.."),
list(".......#.#"),
list("####..#..."),
],
3529: [
list("#.#...##.#"),
list("......#..#"),
list(".........#"),
list("#.....#..."),
list(".......#.."),
list(".....#.#.#"),
list(".....#...."),
list("#....#.#.#"),
list("....#.##.#"),
list(".####.#..#"),
],
2953: [
list("...##...#."),
list("##.#.#..##"),
list("#...#....."),
list("##.#...###"),
list("...#......"),
list("#.#.#..#.#"),
list(".#...#...#"),
list("##....#.##"),
list(".......#.."),
list(".#.#..#..."),
],
3617: [
list("#..##...##"),
list("......#..."),
list("#....#...."),
list(".........."),
list(".######.##"),
list("##..#.#.##"),
list("#.#...#..."),
list("........#."),
list(".######.##"),
list("##...###.#"),
],
3863: [
list(".##.#...##"),
list("#...#....."),
list("..#.#....#"),
list("#....#..##"),
list(".....###.."),
list("#.#......#"),
list("#.......#."),
list("...#.....#"),
list("#........."),
list("..###....#"),
],
3727: [
list("#.###.##.#"),
list(".........."),
list("...##....."),
list("..#..#..##"),
list("#......###"),
list("#....##..."),
list("###.##...."),
list(".....#...."),
list("##.####.#."),
list("#..#.#.###"),
],
3803: [
list("###..#.##."),
list(".##......#"),
list(".........#"),
list("###.....##"),
list("....###..#"),
list(".......#.#"),
list("........##"),
list("#..#......"),
list("##......##"),
list("#.###..#.."),
],
1579: [
list("#...##.###"),
list(".....#.###"),
list(".##...#..."),
list("#.#..#..#."),
list("..##.....#"),
list(".........#"),
list(".........."),
list("#.....#.##"),
list(".....#...."),
list(".###..#..."),
],
1049: [
list("#..#.##.##"),
list("##......##"),
list("..#.##...#"),
list("#.......#."),
list("###.....#."),
list(".....#.#.#"),
list("...#......"),
list("..##......"),
list("#.#....#.."),
list("##..#.#..."),
],
2687: [
list("##..#.##.."),
list(".#........"),
list("##..#...#."),
list(".#.#.....#"),
list(".#..#.#..#"),
list("#.###..#.."),
list("..#......#"),
list("#.......##"),
list("#..#.....#"),
list("#.##.#..##"),
],
1637: [
list("#..##...##"),
list("##..#....#"),
list("...#....#."),
list("#....#...."),
list(".....#...#"),
list("#...#...##"),
list(".#....#..."),
list("#........."),
list("..#....#.."),
list(".#.####..."),
],
3527: [
list(".#....#.#."),
list("#.......#."),
list("..#....#.#"),
list("####.#.#.#"),
list("...#..#..."),
list("###..#.###"),
list("##..#....#"),
list("#.##....##"),
list("..#......#"),
list(".....#.#.."),
],
2963: [
list("#.#.#.#.#."),
list("#.....#..."),
list("##.#.....#"),
list("..##......"),
list("..#......."),
list(".#...#.##."),
list("###......#"),
list("##....#..#"),
list(".#...#..##"),
list("..##..##.#"),
],
2287: [
list("##.######."),
list(".#.##.##.."),
list("#..#....##"),
list("##.#.#...#"),
list(".......##."),
list("#...##...#"),
list("...##..#.."),
list("##....#.#."),
list("....#.##.."),
list("..#.#..###"),
],
3677: [
list("###.....##"),
list("#..#.#..#."),
list("#.#......."),
list(".....#..##"),
list(".........."),
list("......#.##"),
list(".....#..#."),
list("#..#...#.."),
list(".##......#"),
list("#...##.##."),
],
3559: [
list("..#..#.##."),
list("###......#"),
list("..#.##...."),
list("#.#..#...."),
list("##..##..##"),
list("..#...#.#."),
list("#.....#.##"),
list("....#....#"),
list("...#.#...#"),
list("...#.###.."),
],
2837: [
list("..#...#..."),
list(".....##..."),
list("#.#..#...#"),
list("....#....#"),
list("...####.##"),
list("#........."),
list("...#...##."),
list(".#..###.#."),
list("....#....."),
list(".###.##.#."),
],
3539: [
list("..##....#."),
list("........#."),
list("......#..#"),
list("...#..#..."),
list("###....###"),
list("#...#....."),
list(".#........"),
list("#.....#..."),
list("..##.#..#."),
list("..###..#.#"),
],
1667: [
list(".#..####.."),
list(".....#...."),
list("......#..."),
list("#.#...##.#"),
list("#...#.#..#"),
list("##.#.#...#"),
list("##..#..#.."),
list("#...##...#"),
list(".#..###..."),
list("..#..####."),
],
2791: [
list("#.##.###.#"),
list("...#..#..."),
list("##.....###"),
list("...#.#..##"),
list(".........#"),
list(".###...#.."),
list("...#.....#"),
list("##.....##."),
list("###......."),
list("#..#.#...."),
],
2609: [
list("..##.#...."),
list("##.#.#...#"),
list("#.#..#...."),
list("#........."),
list("...#..#..#"),
list("#...#.#..."),
list("##.##....#"),
list(".###......"),
list("##.....##."),
list("#.#...#.#."),
],
3061: [
list("####..#.##"),
list("#.....##.."),
list(".........."),
list("......#..."),
list("..#.#..###"),
list(".#.#..#..#"),
list(".#...#...#"),
list("#........#"),
list(".....#.#.."),
list("#..#....##"),
],
}
|
def solve(s):
if s==s[::-1]:
return 1
else:
return 0
s=str(input())
print(solve(s))
|
print('=' * 50)
print('AVALIADOR DE MÉDIA'.center(50, '-'))
print('=' * 50)
# Recebendo os valores e calculando a média
nota1 = float(input('Primeira nota do aluno: '))
nota2 = float(input('Segunda nota do aluno: '))
media = (nota1 + nota2) / 2
print(f'A média do aluno é: {media}')
# Mensagem de acordo com cada resposta
if 0 <= media < 5:
print('O aluno está reprovado!')
elif 5 <= media < 7:
print('O aluno está de recuperação!')
elif 6.5 < media <= 10:
print('O aluno foi aprovado!')
else:
print('Nota inválida!') |
"""Effects"""
class FxName:
"""FX name"""
def __init__(self, name):
self.name = name
BITCRUSHER = FxName('bitcrusher')
COMPRESSOR = FxName('compressor')
ECHO = FxName('echo')
FLANGER = FxName('flanger')
KRUSH = FxName('krush')
LPF = FxName('lpf')
PAN = FxName('pan')
PANSLICER = FxName('panslicer')
REVERB = FxName('reverb')
SLICER = FxName('slicer')
WOBBLE = FxName('wobble')
|
# Copyright (c) 2017-2017 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This section is organized by N9K CLI commands
# show inventory
PATH_GET_NEXUS_TYPE = 'api/mo/sys/ch.json'
# conf t
# vlan <a,n-y>
# state active
PATH_VLAN_ALL = 'api/mo.json'
BODY_VLAN_ALL_BEG = '{"topSystem": { "children": [ {"bdEntity":'
BODY_VLAN_ALL_BEG += ' { children": ['
BODY_VLAN_ALL_INCR = ' {"l2BD": {"attributes": {"fabEncap": "vlan-%s",'
BODY_VLAN_ALL_INCR += ' "pcTag": "1", "adminSt": "active"}}}'
BODY_VXLAN_ALL_INCR = ' {"l2BD": {"attributes": {"fabEncap": "vlan-%s",'
BODY_VXLAN_ALL_INCR += ' "pcTag": "1", "adminSt": "active",'
BODY_VXLAN_ALL_INCR += ' "accEncap": "vxlan-%s"}}}'
BODY_VLAN_ALL_CONT = ','
BODY_VLAN_ALL_END = ' ]}}]}}'
# The following was added to make simple Test case results more readible.
BODY_VLAN_ADD_START = (BODY_VLAN_ALL_BEG + BODY_VLAN_ALL_INCR +
BODY_VLAN_ALL_CONT)
BODY_VLAN_ADD_NEXT = BODY_VLAN_ALL_INCR + BODY_VLAN_ALL_CONT
BODY_VLAN_ADD = (BODY_VLAN_ALL_BEG + BODY_VLAN_ALL_INCR +
BODY_VLAN_ALL_CONT + BODY_VLAN_ALL_END)
BODY_VXLAN_ADD = (BODY_VLAN_ALL_BEG + BODY_VXLAN_ALL_INCR +
BODY_VLAN_ALL_CONT + BODY_VLAN_ALL_END)
# conf t
# vlan <n>
# state active
PATH_VLAN = 'api/mo/sys/bd/bd-[vlan-%s].json'
BODY_VLAN_ACTIVE = '{"l2BD": {"attributes": {"adminSt": "active"}}}'
# conf t
# vlan <n>
# state active
# vn-segment <vni>
BODY_VXLAN_ACTIVE = '{"l2BD": {"attributes": {"adminSt": "active",'
BODY_VXLAN_ACTIVE += ' "accEncap": "vxlan-%s"}}}'
# conf t
# int ethernet x/x OR int port-channel n
# where %s is "phys-[eth1/19]" OR "aggr-[po50]"
PATH_IF = 'api/mo/sys/intf/%s.json'
# THEN
# switchport trunk native vlan <vlan>
# switchport trunk allowed vlan none | add <vlan> | remove <vlan>
# first %s is "l1PhysIf" | "pcAggrIf", 2nd trunkvlan string, 3rd one
# native vlan
BODY_TRUNKVLAN = '{"%s": {"attributes": {"trunkVlans": "%s"}}}'
BODY_NATIVE_TRUNKVLAN = '{"%s": {"attributes": {"trunkVlans": "%s",'
BODY_NATIVE_TRUNKVLAN += ' "nativeVlan": "%s"}}}'
# conf t
# feature nv overlay
PATH_VXLAN_STATE = 'api/mo/sys/fm/nvo.json'
# where %s is "enable" | "disable"
BODY_VXLAN_STATE = '{"fmNvo": {"attributes": {"adminSt": "%s"}}}'
# conf t
# feature vn-segment-vlan-based
PATH_VNSEG_STATE = 'api/mo/sys/fm/vnsegment.json'
BODY_VNSEG_STATE = '{"fmVnSegment": {"attributes": {"adminSt": "%s"}}}'
# conf t
# int nve%s
# no shut
# source-interface loopback %s
PATH_NVE_CREATE = 'api/mo/sys/epId-%s.json'
BODY_NVE_CREATE = '{"nvoEp": {"attributes": {"epId": "%s"}}}'
BODY_NVE_ADD_LOOPBACK = '{"nvoEp": {"attributes": {"adminSt": "%s",'
BODY_NVE_ADD_LOOPBACK += ' "sourceInterface": "lo%s"}}}'
# conf t
# int nve%s
# no shut
# source-interface loopback %s
# conf t
# int nve%s
# [no] member vni %s mcast-group %s
PATH_VNI_UPDATE = 'api/mo/sys/epId-%s/nws/vni-%s.json'
BODY_VNI_UPDATE = '{"nvoNw": {"attributes": {"vni": "%s", "vniRangeMin": "%s",'
BODY_VNI_UPDATE += ' "vniRangeMax": "%s", "mcastGroup": "%s", "isMcastRange":'
BODY_VNI_UPDATE += ' "yes", "suppressARP": "no", "associateVrfFlag": "no"}}}'
# channel-group x mode active is not immediately available beneath the
# ethernet interface data. Instead one needs to gather pc channel members
# and search for ethernet interface.
PATH_GET_PC_MEMBERS = 'api/mo/sys/intf.json?query-target=subtree&'
PATH_GET_PC_MEMBERS += 'target-subtree-class=pcRsMbrIfs'
|
message = [ 'e', 'k', 'a', 'c', ' ',
'd', 'n', 'u', 'o', 'p', ' ',
'l', 'a', 'e', 't', 's']
def reversed_words(message):
cur_pos = 0
end_pos = len(message)
current_word_start = cur_pos
while cur_pos < end_pos:
if message[cur_pos] == ' ':
message[current_word_start:cur_pos] = reversed(message[current_word_start:cur_pos])
current_word_start = cur_pos + 1
if cur_pos == end_pos - 1:
message[current_word_start:end_pos] = reversed(message[current_word_start:end_pos])
cur_pos += 1
reversed_words(message)
# Prints: 'steal pound cake'
print(''.join(message))
|
def bubble_sort(arry):
n = len(arry) #获得数组的长度
for i in range(n):
for j in range(1,n-i):
if arry[j-1] > arry[j] : #如果前者比后者大
arry[j-1],arry[j] = arry[j],arry[j-1] #则交换两者
return arry
|
class EquationClass:
def __init__(self):
pass
def calculation(self):
...
def info(self):
...
|
class BufferFile:
def __init__(self, write):
self.write = write
def readline(self): pass
def writelines(self, l): map(self.append, l)
def flush(self): pass
def isatty(self): return 1
|
# Class could be designed to be used in cooperative multiple inheritance
# so `super()` could be resolved to some non-object class that is able to receive passed arguments.
class Shape(object):
def __init__(self, shapename, **kwds):
self.shapename = shapename
# in case of ColoredShape the call below will be executed on Colored
# so warning should not be raised
super(Shape, self).__init__(**kwds)
class Colored(object):
def __init__(self, color, **kwds):
self.color = color
super(Colored, self).__init__(**kwds)
class ColoredShape(Shape, Colored):
pass
cs = ColoredShape(color='red', shapename='circle')
|
# Space Walk
# by Sean McManus
# www.sean.co.uk / www.nostarch.com
WIDTH = 800
HEIGHT = 600
player_x = 500
player_y = 550
def draw():
screen.blit(images.backdrop, (0, 0))
|
def countPaths(maze, rows, cols):
if (maze[0][0] == -1):
return 0
# Initializing the leftmost column
for i in range(rows):
if (maze[i][0] == 0):
maze[i][0] = 1
# If we encounter a blocked cell in
# leftmost row, there is no way of
# visiting any cell directly below it.
else:
break
# Similarly initialize the topmost row
for i in range(1, cols, 1):
if (maze[0][i] == 0):
maze[0][i] = 1
# If we encounter a blocked cell in
# bottommost row, there is no way of
# visiting any cell directly below it.
else:
break
# The only difference is that if a cell is -1,
# simply ignore it else recursively compute
# count value maze[i][j]
for i in range(1, rows, 1):
for j in range(1, cols, 1):
# If blockage is found, ignore this cell
if (maze[i][j] == -1):
continue
# If we can reach maze[i][j] from
# maze[i-1][j] then increment count.
if (maze[i - 1][j] > 0):
maze[i][j] = (maze[i][j] +
maze[i - 1][j])
# If we can reach maze[i][j] from
# maze[i][j-1] then increment count.
if (maze[i][j - 1] > 0):
maze[i][j] = (maze[i][j] +
maze[i][j - 1])
# If the final cell is blocked,
# output 0, otherwise the answer
if (maze[rows - 1][cols - 1] > 0):
return maze[rows - 1][cols - 1]
else:
return 0
# Driver code
res=[]
maze=[]
rows, cols = input().split()
rows=int(rows)
cols=int(cols)
for i in range(rows):
col =input()
row=[]
for j in col:
row.append(j)
maze.append(row)
# print(maze)
for i in range(rows):
for j in range(cols):
if(maze[i][j]=='.'):
maze[i][j]=0
else:
maze[i][j]=-1
print(maze)
arr = [x[:] for x in maze]
# print(arr)
for i in range(0, rows):
for j in range(0, cols):
if(maze[i][j]==-1):
maze[i][j]=0
else:
maze[i][j]=-1
n=countPaths(maze, rows, cols)
if(n==0):
print("0", end=' ')
else:
print("1", end=' ')
maze = [x[:] for x in arr]
print()
# # print(countPaths(arr))
# # maze = [[0, 0, 0, 0],
# # [0, -1, 0, 0],
# # [-1, 0, 0, 0],
# # [0, 0, 0, 0 ]]
# # print(countPaths(maze))
# # print(maze)
# # for i in range(0, rows):
# # for j in range(0,cols):
# # print(arr[i][j])
# # print()
# # print(arr)
# # print(arr)
# # This code is contributed by
# # Surendra_Gangwar
|
idade = 20
salario = 490.00
nome = 'Mirella'
verdadeiro = True
falso = False
print('='*50, '\n'*2)
print('\n')
print('Calypso mania')
print('Exalta mania')
print('\tMirella Ohana Bardini','\tApelido: Mi ')
print('Blumenau/SC')
print('HBSIS', '\n'*2)
print(idade)
print('\n'*1)
print(salario)
print('\n'*1)
print(nome)
print('\n'*1)
print(verdadeiro)
print('\n'*1)
print(falso)
print('\n'*2)
print(falso,verdadeiro,nome,idade,salario)
print('\tNome:',nome,'\tIdade:',idade, '\tSalario:',salario,'\tVerdadeiro', verdadeiro, '\tFalso',falso)
print('Nome: {} Idade: {} Salario: {} Verdadeiro {} Falso {}'.format(nome,idade,salario,nome,falso) )
print(f'Nome: {nome} Idade: {idade} Salario: {salario} Verdadeiro {verdadeiro} Falso {falso}')
print('idade',idade,'\n'*2 )
print('='*50)
############################################################################################################################################
#--- Aula 2 06-11-2019
#--- Variáveis
#--- Variável numerica do tipo inteiro
idade = 18
#--- Variável numérica de tipo ponto flutuante
salario = 490.00
#--- Variável texto
nome = 'maykon'
#--- Variávies do tipo booleana
verdadeiro = True
falso = False
#--- Impressaão de texto usando multiplicação
print('='*50, '\n'*2)
#--- Impressão de multiplos textos e variávis usando virgula
print('Idade:',idade, 'Salario:',salario, 'Nome:', nome, 'Verdadeiro:', verdadeiro, 'Falso:', falso)
#--- Impressão de multiplos textos e variávies usando a função format
print('Idade:{} Salario:{} Nome:{} Verdadeiro:{} Falso:{}'.format(idade, salario, nome, verdadeiro, falso) )
#--- Impressão de multiplos textos e variávies usando interpolação de strings
print(f'Idade:{idade} Salario:{salario} Nome:{nome} Verdadeiro:{verdadeiro} Falso:{falso}')
#--- Impressaão de texto usando multiplicação
print('\n'*2, '='*50) |
n,s,t=map(int,input().split())
w=c=0
for i in range(n):
w+=int(input())
c+=s<=w<=t
print(c) |
'''
The prime factors of 13195 are 5, 7, 13 and 29.
What is the largest prime factor of the number 600851475143 ?
'''
# find the factors of num using modulus operator
num = 600851475143
div = 2
highestFactor = 1
while num > 1:
if(num%div==0):
num = num/div
highestFactor = div
else:
div+=1
print(highestFactor)
|
# Problem Statement: https://www.hackerrank.com/challenges/map-and-lambda-expression/problem
cube = lambda x: x**3
def fibonacci(n):
fib = [0, 1]
for i in range(2, n):
fib.append(fib[i - 1] + fib[i - 2])
return fib[0:n] |
def multiply_by(a, b=2, c=1):
return a * b + c
print(multiply_by(3, 47, 0)) # Call function using custom values for all parameters
print(multiply_by(3, 47)) # Call function using default value for c parameter
print(multiply_by(3, c=47)) # Call function using default value for b parameter
print(multiply_by(3)) # Call function using default values for parameters b and c
print(multiply_by(a=7)) # Call function using default values for parameters b and c
def hello(subject, name="Max"):
print(f"Hello {subject}! My name is {name}")
hello("PyCharm", "Jane") # Call "hello" function with "PyCharm as a subject parameter and "Jane" as a name
hello("PyCharm") # Call "hello" function with "PyCharm as a subject parameter and default value for the name
|
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
source("../../shared/qtcreator.py")
def main():
startApplication("qtcreator" + SettingsPath)
if not startedWithoutPluginError():
return
createProject_Qt_GUI(tempDir(), "DesignerTestApp")
selectFromLocator("mainwindow.ui")
widgetIndex = "{container=':qdesigner_internal::WidgetBoxCategoryListView' text='%s' type='QModelIndex'}"
widgets = {"Push Button": 50,
"Check Box": 100}
for current in widgets.keys():
dragAndDrop(waitForObject(widgetIndex % current), 5, 5,
":FormEditorStack_qdesigner_internal::FormWindow", 20, widgets[current], Qt.CopyAction)
connections = []
for record in testData.dataset("connections.tsv"):
connections.append([testData.field(record, col) for col in ["widget", "baseclass",
"signal", "slot"]])
for con in connections:
selectFromLocator("mainwindow.ui")
openContextMenu(waitForObject(con[0]), 5, 5, 0)
snooze(1)
# hack for Squish 5/Qt5.2 problems of handling menus on Mac - remove asap
if platform.system() == 'Darwin':
waitFor("macHackActivateContextMenuItem('Go to slot...', con[0])", 6000)
else:
activateItem(waitForObjectItem("{type='QMenu' unnamed='1' visible='1'}", "Go to slot..."))
try:
# Creator built with Qt <= 5.9
signalWidgetObject = waitForObject(":Select signal.signalList_QTreeWidget", 5000)
signalName = con[2]
except:
# Creator built with Qt >= 5.10
signalWidgetObject = waitForObject(":Select signal.signalList_QTreeView")
signalName = con[1] + "." + con[2]
waitForObjectItem(signalWidgetObject, signalName)
clickItem(signalWidgetObject, signalName, 5, 5, 0, Qt.LeftButton)
clickButton(waitForObject(":Go to slot.OK_QPushButton"))
editor = waitForObject(":Qt Creator_CppEditor::Internal::CPPEditorWidget")
type(editor, "<Up>")
type(editor, "<Up>")
test.verify(waitFor('str(lineUnderCursor(editor)).strip() == con[3]', 1000),
'Comparing line "%s" to expected "%s"' % (lineUnderCursor(editor), con[3]))
invokeMenuItem("File", "Save All")
invokeMenuItem("File", "Exit")
|
# -*- coding: utf-8 -*-
# @Time : 2018/8/13 16:45
# @Author : Dylan
# @File : config.py
# @Email : [email protected]
class Config():
train_imgs_path = "images/train/images"
train_labels_path = "images/train/label"
merge_path = ""
aug_merge_path = "deform/deform_norm2"
aug_train_path = "deform/train/"
aug_label_path = "deform/label/"
test_path = "images/test"
npy_path = "../npydata"
result_np_save = '../results/imgs_mask_test.npy'
save_img_path = "../results/"
checkpoint_path = ""
save_model_path = ""
tensorboard_path = ""
load_model_path = ""
load_model = False
model_train = True
img_type = 'tif'
aug_img_num = 30
norm_size = 512
channels = 1
batch_size = 6
use_gpu = "1"
aug = True
ratio = 0.2
max_epoch = 200
lr = 1e-4
lr_reduce = 0.5
config = Config()
|
tile_colors = {}
for _ in range(400):
path = input()
i = 0
pos_north = 0.0
pos_east = 0.0
while i < len(path):
if path[i] == 'e':
pos_east += 1.0
i += 1
elif path[i] == 'w':
pos_east -= 1.0
i += 1
elif path[i] == 'n' and path[i+1] == 'e':
pos_north += 0.5
pos_east += 0.5
i += 2
elif path[i] == 's' and path[i+1] == 'e':
pos_north -= 0.5
pos_east += 0.5
i += 2
elif path[i] == 's' and path[i+1] == 'w':
pos_north -= 0.5
pos_east -= 0.5
i += 2
else:
pos_north += 0.5
pos_east -= 0.5
i += 2
pos = (pos_east, pos_north)
if pos in tile_colors:
tile_colors[pos] = 'black' if tile_colors.get(pos) == 'white' else 'white'
else:
tile_colors[pos] = 'black'
counter = 0
for pos in tile_colors.keys():
if tile_colors.get(pos) == 'black':
counter += 1
print(counter)
|
# Copyright 2018 Inap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ShellSession(object):
def __init__(self, command_processor):
self.command_processor = command_processor
self.command_processor.write("\n")
self.command_processor.show_prompt()
def receive(self, line):
self.command_processor.logger.debug("received: %s" % line)
try:
processed = self.command_processor.process_command(line)
except TerminalExitSignal:
return False
if not processed:
self.command_processor.logger.info("Command not supported : %s" % line)
self.handle_unknown_command(line)
self.command_processor.show_prompt()
return not self.command_processor.is_done
def handle_unknown_command(self, line):
pass
class TerminalExitSignal(Exception):
pass
|
function_classes = {}
class RegisteringMetaclass(type):
def __new__(*args): # pylint: disable=no-method-argument
cls = type.__new__(*args)
if cls.name:
function_classes[cls.name] = cls
return cls
class Function(metaclass=RegisteringMetaclass):
name = None
min_args = None
max_args = None
def __init__(self, *args):
if self.min_args is not None and len(args) < self.min_args:
raise ValueError(
'Expected min {} args but was {}'.format(self.min_args, len(args))
)
if self.max_args is not None and len(args) > self.max_args:
raise ValueError(
'Expected max {} args but was {}'.format(self.max_args, len(args))
)
self.args = args
def evaluate(self, context):
raise NotImplementedError()
def debug(self, context):
try:
result = self.evaluate(context)
except Exception as e:
result = type(e).__name__
return '<{}({})={}>'.format(
self.name,
', '.join(
f.debug(context) if isinstance(f, Function) else str(f)
for f in self.args
),
result,
)
def to_dict(self):
return {
self.name: [
f.to_dict()
if isinstance(f, Function) else f for f in self.args
]
}
@classmethod
def from_dict(cls, dct):
if (
isinstance(dct, dict) and
len(dct) == 1 and
list(dct.keys())[0] in function_classes
):
func_name, arg_dicts = list(dct.items())[0]
func_class = function_classes[func_name]
return func_class(*[
cls.from_dict(arg_dict) for arg_dict in arg_dicts
])
return dct
class Equals(Function):
name = 'equals'
min_args = 2
max_args = 2
def evaluate(self, context):
result1 = self.args[0].evaluate(context)
result2 = self.args[1].evaluate(context)
return result1 == result2
class Lte(Function):
name = 'lte'
min_args = 2
max_args = 2
def evaluate(self, context):
result1 = self.args[0].evaluate(context)
result2 = self.args[1].evaluate(context)
return result1 <= result2
class Gte(Function):
name = 'gte'
min_args = 2
max_args = 2
def evaluate(self, context):
result1 = self.args[0].evaluate(context)
result2 = self.args[1].evaluate(context)
return result1 >= result2
class In(Function):
name = 'in'
min_args = 2
max_args = 2
def evaluate(self, context):
return self.args[0].evaluate(context) in self.args[1].evaluate(context)
class NotIn(Function):
name = 'notin'
min_args = 2
max_args = 2
def evaluate(self, context):
return self.args[0].evaluate(context) not in self.args[1].evaluate(context)
class Or(Function):
name = 'or'
min_args = 2
max_args = None
def evaluate(self, context):
return any(f.evaluate(context) for f in self.args)
class And(Function):
name = 'and'
min_args = 2
max_args = None
def evaluate(self, context):
return all(f.evaluate(context) for f in self.args)
class Not(Function):
name = 'not'
min_args = 1
max_args = 1
def evaluate(self, context):
return not self.args[0].evaluate(context)
class If(Function):
name = 'if'
min_args = 3
max_args = 3
def evaluate(self, context):
if self.args[0].evaluate(context):
return self.args[1].evaluate(context)
return self.args[2].evaluate(context)
class Constant(Function):
name = 'constant'
min_args = 1
max_args = 1
def evaluate(self, context):
return self.args[0]
def debug(self, context):
return '<constant({})>'.format(str(self.args[0]))
class ParamMixin:
def _evaluate(self, context):
result = context
for item in self.path_items:
if hasattr(result, item):
result = getattr(result, item)
elif isinstance(result, dict):
result = result.get(item)
else:
return None
return result
class Param(Function, ParamMixin):
name = 'param'
min_args = 1
max_args = 1
def __init__(self, path):
super().__init__(path)
self.path_items = path.split('.')
def evaluate(self, context):
return self._evaluate(context)
class ParamWithDefault(Function, ParamMixin):
name = 'dparam'
min_args = 2
max_args = 2
def __init__(self, path, default):
super().__init__(path, default)
self.path_items = path.split('.')
self.default = default
def evaluate(self, context):
result = self._evaluate(context)
if result is None:
return self.default
return result
class Add(Function):
name = 'add'
min_args = 2
max_args = 2
def evaluate(self, context):
return self.args[0].evaluate(context) + self.args[1].evaluate(context)
class Subtract(Function):
name = 'subtract'
min_args = 2
max_args = 2
def evaluate(self, context):
return self.args[0].evaluate(context) - self.args[1].evaluate(context)
class Multiply(Function):
name = 'multiply'
min_args = 2
max_args = 2
def evaluate(self, context):
return self.args[0].evaluate(context) * self.args[1].evaluate(context)
class Divide(Function):
name = 'divide'
min_args = 2
max_args = 2
def evaluate(self, context):
return self.args[0].evaluate(context) / self.args[1].evaluate(context)
|
load("@bazel_skylib//lib:shell.bzl", "shell")
load("//:golink.bzl", "gen_copy_files_script")
def go_proto_link_impl(ctx, **kwargs):
print("Copying generated files for proto library %s" % ctx.attr.dep[OutputGroupInfo])
return gen_copy_files_script(ctx, ctx.attr.dep[OutputGroupInfo].go_generated_srcs.to_list())
_go_proto_link = rule(
implementation = go_proto_link_impl,
attrs = {
"dir": attr.string(),
"dep": attr.label(),
"_template": attr.label(
default = "//:copy_into_workspace.sh",
allow_single_file = True,
),
# It is not used, just used for versioning since this is experimental
"version": attr.string(),
},
)
def go_proto_link(name, **kwargs):
if not "dir" in kwargs:
dir = native.package_name()
kwargs["dir"] = dir
gen_rule_name = "%s_copy_gen" % name
_go_proto_link(name = gen_rule_name, **kwargs)
native.sh_binary(
name = name,
srcs = [":%s" % gen_rule_name]
)
|
# -*- coding: utf-8 -*-
"""Responses.
responses serve both testing purpose aswell as dynamic docstring replacement.
"""
responses = {
"_v3_HistoricalPositions": {
"url": "/openapi/hist/v3/positions/{ClientKey}",
"params": {
'FromDate': '2019-03-01',
'ToDate': '2019-03-10'
},
"response": {
"Data": [
{
"AccountId": "112209INET",
"AccountValueEndOfDay": {
"AccountBalance": 7526.17183,
"CashTransfers": 0,
"Date": "2016-07-19",
"PositionsValue": -978.29753,
"SecurityTransfers": 0,
"TotalValue": 6547.8743
},
"Amount": -1,
"AmountAccountValueCloseRatio": "2:1",
"AmountAccountValueOpenRatio": "2:1",
"ClosingAssetType": "CfdOnIndex",
"ClosingTradeDate": "2016-07-19",
"ClosingValueDate": "2016-07-19",
"CopiedFrom": "1",
"CorrelationType": "None",
"Decimals": 2,
"ExecutionTimeClose": "2016-07-19T07:25:37.000000Z",
"ExecutionTimeOpen": "2016-07-18T10:38:06.000000Z",
"FigureValue": 1,
"InstrumentCcyToAccountCcyRateClose": 1.1020982542939,
"InstrumentCcyToAccountCcyRateOpen": 1.11308229426434,
"InstrumentSymbol": "GER30.I",
"LongShort": {
"PresentationValue": "Short",
"Value": "Short"
},
"OpeningAssetType": "CfdOnIndex",
"OpeningTradeDate": "2016-07-18",
"OpeningValueDate": "2016-07-18",
"PriceClose": 9998,
"PriceGain": 0.004778021102926538,
"PriceOpen": 10046,
"PricePct": -0.4778021102926538,
"ProfitLoss": 52.87,
"ProfitLossAccountValueFraction": 0.00807437613761156,
"Uic": "1373",
"ValueInAccountCurrencyClose": -11018.778346430412,
"ValueInAccountCurrencyOpen": -11182.02472817956
}
]
}
},
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.