code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
"""
Banner object
--
Author : DrLarck
Last update : 27/01/21 by DrLarck
"""
import asyncio
import random
# util
from utility.entity.character import CharacterGetter
from utility.entity.config_parser import ConfigParser
# tool
from utility.global_tool import GlobalTool
class Banner:
def __init__(self):
# Public
self.name = ""
self.image = ""
self.characters = []
self.sorted = False
self.multi = 10
# Private
self.__lr = []
self.__ur = []
self.__ssr = []
self.__sr = []
self.__r = []
self.__n = []
# Droprate as %
self.__lr_droprate = 0.07
self.__ur_droprate = 3
self.__ssr_droprate = 15
self.__sr_droprate = 33
self.__r_droprate = 50
self.__n_droprate = 100
# tool
self.__global_tool = GlobalTool()
# Public
async def sort(self):
"""
Sort the banner
--
:return: `None`
"""
if self.sorted is False:
# Sort the characters
for character in self.characters:
await asyncio.sleep(0)
# Put the character in the LR list
if character.rarity.value == 5:
self.__lr.append(character)
# Put the character in the UR list
elif character.rarity.value == 4:
self.__ur.append(character)
elif character.rarity.value == 3:
self.__ssr.append(character)
elif character.rarity.value == 2:
self.__sr.append(character)
elif character.rarity.value == 1:
self.__r.append(character)
elif character.rarity.value == 0:
self.__n.append(character)
self.sorted = True
return
async def summon(self):
"""
Summon a random character
--
:return: `Character` or `None` in case of problem
"""
# Init
roll = random.uniform(0, 100)
# Get a character according to the player's roll right above
# Check if the list is not empty
if len(self.__lr) > 0 and roll <= self.__lr_droprate:
# If the player get a LR character
# Get a random LR character form the LR list
character = random.choice(self.__lr)
return character
elif len(self.__ur) > 0 and roll <= self.__ur_droprate:
character = random.choice(self.__ur)
return character
elif len(self.__ssr) > 0 and roll <= self.__ssr_droprate:
character = random.choice(self.__ssr)
return character
elif len(self.__sr) > 0 and roll <= self.__sr_droprate:
character = random.choice(self.__sr)
return character
elif len(self.__r) > 0 and roll <= self.__r_droprate:
character = random.choice(self.__r)
return character
elif len(self.__n) > 0 and roll <= self.__n_droprate:
character = random.choice(self.__n)
return character
return
async def multi_summon(self):
"""
Proceed to a multi summon
--
:return: `list` of `Character`
"""
# Init
characters = []
for i in range(self.multi):
await asyncio.sleep(0)
summoned = await self.summon()
if summoned is not None:
characters.append(summoned)
else:
i -= 1
return characters
async def generate(self, client, name="",
image="", characters=""):
"""
Generate a banner object
:param client: discord.ext.commands.Bot
:param name: (`str`)
:param image: (`str`) Valid url
:param characters: (`str`)
--
:return: `Banner`
"""
# Init
getter = CharacterGetter()
# Set the attributes
self.name = name
self.image = image
self.characters = characters
# Get the character instances
self.characters = self.characters.split()
# Init the new character list
new_character_list = []
# The character are stored as reference id in the characters[] attribute
for reference in self.characters:
await asyncio.sleep(0)
# Convert the str reference to int
reference = int(reference)
character = await getter.get_reference_character(reference, client)
# Add the character into the list
new_character_list.append(character)
# Replace the old character list by the new one
self.characters = new_character_list
# Sort the banner
await self.sort()
return self
async def set_unique_id(self, client):
"""
Generate an unique id for the characters that have 'NONE' as unique id
--
:return: `None`
"""
# Get the characters that have 'NONE' as unique id
characters = await client.database.fetch_row("""
SELECT reference
FROM character_unique
WHERE character_unique_id is NULL;
""")
# Generate a unique id for each of them
for character in characters:
await asyncio.sleep(0)
# Get the unique character's reference
reference = character[0]
unique_id = await self.__global_tool.generate_unique_id(reference)
# Update the character's unique id
await client.database.execute("""
UPDATE character_unique
SET character_unique_id = $1
WHERE reference = $2;
""", [unique_id, reference])
return
class BannerGetter:
# Private
__cache = []
__cache_ok = False
__current_banner = 1
# Public
async def set_cache(self, client):
"""
Set the banner cache
--
:return: `None`
"""
if self.__cache_ok is False:
data = await client.database.fetch_row("""
SELECT banner_name, banner_image, banner_content
FROM banner
ORDER BY reference;
""")
if len(data) > 0:
for banner in data:
await asyncio.sleep(0)
# Generate the banner object
banner_ = Banner()
await banner_.generate(client, name=banner[0], image=banner[1], characters=banner[2])
self.__cache.append(banner_)
self.__cache_ok = True
print("Banner Cache : DONE")
else:
print("Banner Cache : The cache has already been filled.")
return
async def get_banner(self, reference):
"""
Return a banner object from the cache
:param reference: (`int`)
--
:return: `Banner` or `None` if not found
"""
# Get the banner object from the cache
if reference > 0 and reference - 1 < len(self.__cache):
return self.__cache[reference - 1]
else:
print(f"Banner {reference} not found.")
return None
async def get_current_banner(self):
"""
Return the last banner out
--
:return: `Banner`
"""
self.__current_banner = await ConfigParser().get_config_for("banner current")
return self.__cache[self.__current_banner - 1]
| [
"random.uniform",
"random.choice",
"utility.entity.character.CharacterGetter",
"utility.entity.config_parser.ConfigParser",
"asyncio.sleep",
"utility.global_tool.GlobalTool"
] | [((881, 893), 'utility.global_tool.GlobalTool', 'GlobalTool', ([], {}), '()\n', (891, 893), False, 'from utility.global_tool import GlobalTool\n'), ((2106, 2128), 'random.uniform', 'random.uniform', (['(0)', '(100)'], {}), '(0, 100)\n', (2120, 2128), False, 'import random\n'), ((4048, 4065), 'utility.entity.character.CharacterGetter', 'CharacterGetter', ([], {}), '()\n', (4063, 4065), False, 'from utility.entity.character import CharacterGetter\n'), ((2430, 2454), 'random.choice', 'random.choice', (['self.__lr'], {}), '(self.__lr)\n', (2443, 2454), False, 'import random\n'), ((2574, 2598), 'random.choice', 'random.choice', (['self.__ur'], {}), '(self.__ur)\n', (2587, 2598), False, 'import random\n'), ((3458, 3474), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (3471, 3474), False, 'import asyncio\n'), ((4487, 4503), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (4500, 4503), False, 'import asyncio\n'), ((5617, 5633), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (5630, 5633), False, 'import asyncio\n'), ((1155, 1171), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (1168, 1171), False, 'import asyncio\n'), ((2720, 2745), 'random.choice', 'random.choice', (['self.__ssr'], {}), '(self.__ssr)\n', (2733, 2745), False, 'import random\n'), ((7992, 8006), 'utility.entity.config_parser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (8004, 8006), False, 'from utility.entity.config_parser import ConfigParser\n'), ((2865, 2889), 'random.choice', 'random.choice', (['self.__sr'], {}), '(self.__sr)\n', (2878, 2889), False, 'import random\n'), ((6902, 6918), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (6915, 6918), False, 'import asyncio\n'), ((3007, 3030), 'random.choice', 'random.choice', (['self.__r'], {}), '(self.__r)\n', (3020, 3030), False, 'import random\n'), ((3148, 3171), 'random.choice', 'random.choice', (['self.__n'], {}), '(self.__n)\n', (3161, 3171), False, 'import random\n')] |
# import the code from this package
from shell_command_logger import print_color
from shell_command_logger.config import load_config, sanitize_config, config_to_parser, save_parser_as_config, parser_to_text, DEFAULT_CONFIG, CONFIG_FILE, _KEY_SECTION
SUBCOMMAND_NAMES = ["c", "config"]
ARG_PARSER_OPTIONS = {
"description": f"This subcommand can be used to view or modify the config file. You can also manually modify the config file located at {CONFIG_FILE}.",
"help": "view and modify config",
}
def populate_agrument_parser(ap) -> None:
"""
Populates an argparse.ArgumentParser or an subcommand argument parser
"""
mutex = ap.add_mutually_exclusive_group()
mutex.add_argument("-d", "--defaults", action="store_true", help="reset all settings back to the defaults")
mutex.add_argument("-g", "--get", metavar="<key>", help="get the value of <key>")
mutex.add_argument("-s", "--set", metavar=("<key>", "<new_value>"), nargs=2, help="set an option: <key> = <new_value>")
def subcommand_main(args) -> int:
"""
This method expects the parsed arguments from an argument parser that was set up with `populate_agrument_parser()`.
It returns an unix-like status code (0 -> success, everything else -> error).
"""
scl_config = load_config()
if args.set:
key, new_value = args.set
parser = config_to_parser(scl_config)
parser[_KEY_SECTION][key] = new_value
save_parser_as_config(parser)
elif args.get:
key = args.get
parser = config_to_parser(scl_config)
try:
print(parser[_KEY_SECTION][key])
except KeyError:
print(f"The value '{key}' does not exist!")
return 1
elif args.defaults:
parser = config_to_parser(DEFAULT_CONFIG)
save_parser_as_config(parser)
else:
# By default print out the current config
parser = config_to_parser(scl_config)
text = parser_to_text(parser)
print(text.rstrip())
try:
sanitize_config(load_config())
return 0
except Exception as ex:
print_color(f"Error validating config: {ex}", "red", bold=True)
return 1
# By default return 0 (success)
| [
"shell_command_logger.config.save_parser_as_config",
"shell_command_logger.config.config_to_parser",
"shell_command_logger.print_color",
"shell_command_logger.config.parser_to_text",
"shell_command_logger.config.load_config"
] | [((1279, 1292), 'shell_command_logger.config.load_config', 'load_config', ([], {}), '()\n', (1290, 1292), False, 'from shell_command_logger.config import load_config, sanitize_config, config_to_parser, save_parser_as_config, parser_to_text, DEFAULT_CONFIG, CONFIG_FILE, _KEY_SECTION\n'), ((1362, 1390), 'shell_command_logger.config.config_to_parser', 'config_to_parser', (['scl_config'], {}), '(scl_config)\n', (1378, 1390), False, 'from shell_command_logger.config import load_config, sanitize_config, config_to_parser, save_parser_as_config, parser_to_text, DEFAULT_CONFIG, CONFIG_FILE, _KEY_SECTION\n'), ((1445, 1474), 'shell_command_logger.config.save_parser_as_config', 'save_parser_as_config', (['parser'], {}), '(parser)\n', (1466, 1474), False, 'from shell_command_logger.config import load_config, sanitize_config, config_to_parser, save_parser_as_config, parser_to_text, DEFAULT_CONFIG, CONFIG_FILE, _KEY_SECTION\n'), ((1534, 1562), 'shell_command_logger.config.config_to_parser', 'config_to_parser', (['scl_config'], {}), '(scl_config)\n', (1550, 1562), False, 'from shell_command_logger.config import load_config, sanitize_config, config_to_parser, save_parser_as_config, parser_to_text, DEFAULT_CONFIG, CONFIG_FILE, _KEY_SECTION\n'), ((2042, 2055), 'shell_command_logger.config.load_config', 'load_config', ([], {}), '()\n', (2053, 2055), False, 'from shell_command_logger.config import load_config, sanitize_config, config_to_parser, save_parser_as_config, parser_to_text, DEFAULT_CONFIG, CONFIG_FILE, _KEY_SECTION\n'), ((2110, 2173), 'shell_command_logger.print_color', 'print_color', (['f"""Error validating config: {ex}"""', '"""red"""'], {'bold': '(True)'}), "(f'Error validating config: {ex}', 'red', bold=True)\n", (2121, 2173), False, 'from shell_command_logger import print_color\n'), ((1764, 1796), 'shell_command_logger.config.config_to_parser', 'config_to_parser', (['DEFAULT_CONFIG'], {}), '(DEFAULT_CONFIG)\n', (1780, 1796), False, 'from shell_command_logger.config import load_config, sanitize_config, config_to_parser, save_parser_as_config, parser_to_text, DEFAULT_CONFIG, CONFIG_FILE, _KEY_SECTION\n'), ((1805, 1834), 'shell_command_logger.config.save_parser_as_config', 'save_parser_as_config', (['parser'], {}), '(parser)\n', (1826, 1834), False, 'from shell_command_logger.config import load_config, sanitize_config, config_to_parser, save_parser_as_config, parser_to_text, DEFAULT_CONFIG, CONFIG_FILE, _KEY_SECTION\n'), ((1912, 1940), 'shell_command_logger.config.config_to_parser', 'config_to_parser', (['scl_config'], {}), '(scl_config)\n', (1928, 1940), False, 'from shell_command_logger.config import load_config, sanitize_config, config_to_parser, save_parser_as_config, parser_to_text, DEFAULT_CONFIG, CONFIG_FILE, _KEY_SECTION\n'), ((1956, 1978), 'shell_command_logger.config.parser_to_text', 'parser_to_text', (['parser'], {}), '(parser)\n', (1970, 1978), False, 'from shell_command_logger.config import load_config, sanitize_config, config_to_parser, save_parser_as_config, parser_to_text, DEFAULT_CONFIG, CONFIG_FILE, _KEY_SECTION\n')] |
"""Takes an unrolled StencilModel and converts it to a C++ AST.
The third stage in processing. Input must be processed with
StencilUnrollNeighborIter first to remove neighbor loops and
InputElementZeroOffset nodes. Done once per call.
"""
import ast
import asp.codegen.cpp_ast as cpp_ast
import asp.codegen.ast_tools as ast_tools
import stencil_model
from assert_utils import *
class StencilConvertAST(ast_tools.ConvertAST):
def __init__(self, model, input_grids, output_grid, inject_failure=None):
assert_has_type(model, stencil_model.StencilModel)
assert len(input_grids) == len(model.input_grids), 'Incorrect number of input grids'
self.model = model
self.input_grids = input_grids
self.output_grid = output_grid
self.output_grid_name = 'out_grid'
self.dim_vars = []
self.var_names = [self.output_grid_name]
self.next_fresh_var = 0
self.inject_failure = inject_failure
super(StencilConvertAST, self).__init__()
def run(self):
self.model = self.visit(self.model)
assert_has_type(self.model, cpp_ast.FunctionBody)
StencilConvertAST.VerifyOnlyCppNodes().visit(self.model)
return self.model
class VerifyOnlyCppNodes(ast_tools.NodeVisitorCustomNodes):
def visit(self, node):
for field, value in ast.iter_fields(node):
if type(value) in [StringType, IntType, LongType, FloatType]:
pass
elif isinstance(value, list):
for item in value:
if ast_tools.is_cpp_node(item):
self.visit(item)
elif ast_tools.is_cpp_node(value):
self.visit(value)
else:
assert False, 'Expected only codepy.cgen.Generable nodes and primitives but found %s' % value
# Visitors
def visit_StencilModel(self, node):
self.argdict = dict()
for i in range(len(node.input_grids)):
self.var_names.append(node.input_grids[i].name)
self.argdict[node.input_grids[i].name] = self.input_grids[i]
self.argdict[self.output_grid_name] = self.output_grid
assert node.border_kernel.body == [], 'Border kernels not yet implemented'
func_name = "kernel"
arg_names = [x.name for x in node.input_grids] + [self.output_grid_name]
args = [cpp_ast.Pointer(cpp_ast.Value("PyObject", x)) for x in arg_names]
body = cpp_ast.Block()
# generate the code to unpack arrays into C++ pointers and macros for accessing
# the arrays
body.extend([self.gen_array_macro_definition(x) for x in self.argdict])
body.extend(self.gen_array_unpack())
body.append(self.visit_interior_kernel(node.interior_kernel))
return cpp_ast.FunctionBody(cpp_ast.FunctionDeclaration(cpp_ast.Value("void", func_name), args),
body)
def visit_interior_kernel(self, node):
cur_node, ret_node = self.gen_loops(node)
body = cpp_ast.Block()
self.output_index_var = cpp_ast.CName(self.gen_fresh_var())
body.append(cpp_ast.Value("int", self.output_index_var))
body.append(cpp_ast.Assign(self.output_index_var,
self.gen_array_macro(
self.output_grid_name, [cpp_ast.CName(x) for x in self.dim_vars])))
replaced_body = None
for gridname in self.argdict.keys():
replaced_body = [ast_tools.ASTNodeReplacer(
ast.Name(gridname, None), ast.Name("_my_"+gridname, None)).visit(x) for x in node.body]
body.extend([self.visit(x) for x in replaced_body])
cur_node.body = body
return ret_node
def visit_OutputAssignment(self, node):
return cpp_ast.Assign(self.visit(stencil_model.OutputElement()), self.visit(node.value))
def visit_Constant(self, node):
return node.value
def visit_ScalarBinOp(self, node):
return super(StencilConvertAST, self).visit_BinOp(ast.BinOp(node.left, node.op, node.right))
def visit_OutputElement(self, node):
return cpp_ast.Subscript("_my_" + self.output_grid_name, self.output_index_var)
def visit_InputElement(self, node):
index = self.gen_array_macro(node.grid.name,
map(lambda x,y: cpp_ast.BinOp(cpp_ast.CName(x), "+", cpp_ast.CNumber(y)),
self.dim_vars,
node.offset_list))
return cpp_ast.Subscript("_my_" + node.grid.name, index)
def visit_InputElementExprIndex(self, node):
return cpp_ast.Subscript("_my_" + node.grid.name, self.visit(node.index))
def visit_MathFunction(self, node):
return cpp_ast.FunctionCall(cpp_ast.CName(node.name), params=map(self.visit, node.args))
# Helper functions
def gen_array_macro_definition(self, arg):
array = self.argdict[arg]
defname = "_"+arg+"_array_macro"
params = "(" + ','.join(["_d"+str(x) for x in xrange(array.dim)]) + ")"
calc = "(_d%d" % (array.dim-1)
for x in range(0,array.dim-1):
calc += "+(_d%s * %s)" % (str(x), str(array.data.strides[x]/array.data.itemsize))
calc += ")"
return cpp_ast.Define(defname+params, calc)
def gen_array_macro(self, arg, point):
name = "_%s_array_macro" % arg
return cpp_ast.Call(cpp_ast.CName(name), point)
def gen_array_unpack(self):
ret = [cpp_ast.Assign(cpp_ast.Pointer(cpp_ast.Value("npy_double", "_my_"+x)),
cpp_ast.TypeCast(cpp_ast.Pointer(cpp_ast.Value("npy_double", "")), cpp_ast.FunctionCall(cpp_ast.CName("PyArray_DATA"), params=[cpp_ast.CName(x)])))
for x in self.argdict.keys()]
return ret
def gen_loops(self, node):
dim = len(self.output_grid.shape)
ret_node = None
cur_node = None
def add_one(n):
if self.inject_failure == 'loop_off_by_one':
return cpp_ast.CNumber(n.num + 1)
else:
return n
for d in xrange(dim):
dim_var = self.gen_fresh_var()
self.dim_vars.append(dim_var)
initial = cpp_ast.CNumber(self.output_grid.ghost_depth)
end = cpp_ast.CNumber(self.output_grid.shape[d]-self.output_grid.ghost_depth-1)
increment = cpp_ast.CNumber(1)
if d == 0:
ret_node = cpp_ast.For(dim_var, add_one(initial), add_one(end), increment, cpp_ast.Block())
cur_node = ret_node
elif d == dim-2:
# add OpenMP parallel pragma to 2nd innermost loop
pragma = cpp_ast.Pragma("omp parallel for")
for_node = cpp_ast.For(dim_var, add_one(initial), add_one(end), increment, cpp_ast.Block())
cur_node.body = cpp_ast.Block(contents=[pragma, for_node])
cur_node = for_node
elif d == dim-1:
# add ivdep pragma to innermost node
pragma = cpp_ast.Pragma("ivdep")
for_node = cpp_ast.For(dim_var, add_one(initial), add_one(end), increment,
cpp_ast.Block())
cur_node.body = cpp_ast.Block(contents=[pragma, for_node])
cur_node = for_node
else:
cur_node.body = cpp_ast.For(dim_var, add_one(initial), add_one(end), increment, cpp_ast.Block())
cur_node = cur_node.body
return (cur_node, ret_node)
def gen_fresh_var(self):
while True:
self.next_fresh_var += 1
var = "x%d" % self.next_fresh_var
if var not in self.var_names:
return var
class StencilConvertASTCilk(StencilConvertAST):
class CilkFor(cpp_ast.For):
def intro_line(self):
return "cilk_for (%s; %s; %s += %s)" % (self.start, self.condition, self.loopvar, self.increment)
def gen_loops(self, node):
dim = len(self.output_grid.shape)
ret_node = None
cur_node = None
for d in xrange(dim):
dim_var = self.gen_fresh_var()
self.dim_vars.append(dim_var)
initial = cpp_ast.CNumber(self.output_grid.ghost_depth)
end = cpp_ast.CNumber(self.output_grid.shape[d]-self.output_grid.ghost_depth-1)
increment = cpp_ast.CNumber(1)
if d == 0:
ret_node = cpp_ast.For(dim_var, add_one(initial), add_one(end), increment, cpp_ast.Block())
cur_node = ret_node
elif d == dim-2:
cur_node.body = StencilConvertASTCilk.CilkFor(dim_var, add_one(initial), add_one(end), increment, cpp_ast.Block())
cur_node = cur_node.body
else:
cur_node.body = cpp_ast.For(dim_var, add_one(initial), add_one(end), increment, cpp_ast.Block())
cur_node = cur_node.body
return (cur_node, ret_node)
| [
"stencil_model.OutputElement",
"asp.codegen.cpp_ast.Subscript",
"asp.codegen.cpp_ast.Define",
"ast.iter_fields",
"asp.codegen.cpp_ast.Value",
"asp.codegen.cpp_ast.Pragma",
"ast.Name",
"asp.codegen.cpp_ast.Block",
"asp.codegen.ast_tools.is_cpp_node",
"ast.BinOp",
"asp.codegen.cpp_ast.CNumber",
"asp.codegen.cpp_ast.CName"
] | [((2514, 2529), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {}), '()\n', (2527, 2529), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((3093, 3108), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {}), '()\n', (3106, 3108), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((4238, 4310), 'asp.codegen.cpp_ast.Subscript', 'cpp_ast.Subscript', (["('_my_' + self.output_grid_name)", 'self.output_index_var'], {}), "('_my_' + self.output_grid_name, self.output_index_var)\n", (4255, 4310), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((4647, 4696), 'asp.codegen.cpp_ast.Subscript', 'cpp_ast.Subscript', (["('_my_' + node.grid.name)", 'index'], {}), "('_my_' + node.grid.name, index)\n", (4664, 4696), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((5405, 5443), 'asp.codegen.cpp_ast.Define', 'cpp_ast.Define', (['(defname + params)', 'calc'], {}), '(defname + params, calc)\n', (5419, 5443), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((1350, 1371), 'ast.iter_fields', 'ast.iter_fields', (['node'], {}), '(node)\n', (1365, 1371), False, 'import ast\n'), ((3206, 3249), 'asp.codegen.cpp_ast.Value', 'cpp_ast.Value', (['"""int"""', 'self.output_index_var'], {}), "('int', self.output_index_var)\n", (3219, 3249), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((4138, 4179), 'ast.BinOp', 'ast.BinOp', (['node.left', 'node.op', 'node.right'], {}), '(node.left, node.op, node.right)\n', (4147, 4179), False, 'import ast\n'), ((4906, 4930), 'asp.codegen.cpp_ast.CName', 'cpp_ast.CName', (['node.name'], {}), '(node.name)\n', (4919, 4930), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((5553, 5572), 'asp.codegen.cpp_ast.CName', 'cpp_ast.CName', (['name'], {}), '(name)\n', (5566, 5572), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((6369, 6414), 'asp.codegen.cpp_ast.CNumber', 'cpp_ast.CNumber', (['self.output_grid.ghost_depth'], {}), '(self.output_grid.ghost_depth)\n', (6384, 6414), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((6433, 6510), 'asp.codegen.cpp_ast.CNumber', 'cpp_ast.CNumber', (['(self.output_grid.shape[d] - self.output_grid.ghost_depth - 1)'], {}), '(self.output_grid.shape[d] - self.output_grid.ghost_depth - 1)\n', (6448, 6510), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((6531, 6549), 'asp.codegen.cpp_ast.CNumber', 'cpp_ast.CNumber', (['(1)'], {}), '(1)\n', (6546, 6549), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((8389, 8434), 'asp.codegen.cpp_ast.CNumber', 'cpp_ast.CNumber', (['self.output_grid.ghost_depth'], {}), '(self.output_grid.ghost_depth)\n', (8404, 8434), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((8453, 8530), 'asp.codegen.cpp_ast.CNumber', 'cpp_ast.CNumber', (['(self.output_grid.shape[d] - self.output_grid.ghost_depth - 1)'], {}), '(self.output_grid.shape[d] - self.output_grid.ghost_depth - 1)\n', (8468, 8530), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((8551, 8569), 'asp.codegen.cpp_ast.CNumber', 'cpp_ast.CNumber', (['(1)'], {}), '(1)\n', (8566, 8569), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((2448, 2476), 'asp.codegen.cpp_ast.Value', 'cpp_ast.Value', (['"""PyObject"""', 'x'], {}), "('PyObject', x)\n", (2461, 2476), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((2900, 2932), 'asp.codegen.cpp_ast.Value', 'cpp_ast.Value', (['"""void"""', 'func_name'], {}), "('void', func_name)\n", (2913, 2932), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((3921, 3950), 'stencil_model.OutputElement', 'stencil_model.OutputElement', ([], {}), '()\n', (3948, 3950), False, 'import stencil_model\n'), ((6160, 6186), 'asp.codegen.cpp_ast.CNumber', 'cpp_ast.CNumber', (['(n.num + 1)'], {}), '(n.num + 1)\n', (6175, 6186), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((5661, 5700), 'asp.codegen.cpp_ast.Value', 'cpp_ast.Value', (['"""npy_double"""', "('_my_' + x)"], {}), "('npy_double', '_my_' + x)\n", (5674, 5700), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((6664, 6679), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {}), '()\n', (6677, 6679), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((6838, 6872), 'asp.codegen.cpp_ast.Pragma', 'cpp_ast.Pragma', (['"""omp parallel for"""'], {}), "('omp parallel for')\n", (6852, 6872), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((7013, 7055), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {'contents': '[pragma, for_node]'}), '(contents=[pragma, for_node])\n', (7026, 7055), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((8684, 8699), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {}), '()\n', (8697, 8699), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((1683, 1711), 'asp.codegen.ast_tools.is_cpp_node', 'ast_tools.is_cpp_node', (['value'], {}), '(value)\n', (1704, 1711), True, 'import asp.codegen.ast_tools as ast_tools\n'), ((3429, 3445), 'asp.codegen.cpp_ast.CName', 'cpp_ast.CName', (['x'], {}), '(x)\n', (3442, 3445), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((4472, 4488), 'asp.codegen.cpp_ast.CName', 'cpp_ast.CName', (['x'], {}), '(x)\n', (4485, 4488), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((4495, 4513), 'asp.codegen.cpp_ast.CNumber', 'cpp_ast.CNumber', (['y'], {}), '(y)\n', (4510, 4513), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((5751, 5782), 'asp.codegen.cpp_ast.Value', 'cpp_ast.Value', (['"""npy_double"""', '""""""'], {}), "('npy_double', '')\n", (5764, 5782), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((5806, 5835), 'asp.codegen.cpp_ast.CName', 'cpp_ast.CName', (['"""PyArray_DATA"""'], {}), "('PyArray_DATA')\n", (5819, 5835), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((6964, 6979), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {}), '()\n', (6977, 6979), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((7199, 7222), 'asp.codegen.cpp_ast.Pragma', 'cpp_ast.Pragma', (['"""ivdep"""'], {}), "('ivdep')\n", (7213, 7222), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((7407, 7449), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {'contents': '[pragma, for_node]'}), '(contents=[pragma, for_node])\n', (7420, 7449), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((8880, 8895), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {}), '()\n', (8893, 8895), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((9052, 9067), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {}), '()\n', (9065, 9067), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((1588, 1615), 'asp.codegen.ast_tools.is_cpp_node', 'ast_tools.is_cpp_node', (['item'], {}), '(item)\n', (1609, 1615), True, 'import asp.codegen.ast_tools as ast_tools\n'), ((3632, 3656), 'ast.Name', 'ast.Name', (['gridname', 'None'], {}), '(gridname, None)\n', (3640, 3656), False, 'import ast\n'), ((3658, 3691), 'ast.Name', 'ast.Name', (["('_my_' + gridname)", 'None'], {}), "('_my_' + gridname, None)\n", (3666, 3691), False, 'import ast\n'), ((7358, 7373), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {}), '()\n', (7371, 7373), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((7600, 7615), 'asp.codegen.cpp_ast.Block', 'cpp_ast.Block', ([], {}), '()\n', (7613, 7615), True, 'import asp.codegen.cpp_ast as cpp_ast\n'), ((5845, 5861), 'asp.codegen.cpp_ast.CName', 'cpp_ast.CName', (['x'], {}), '(x)\n', (5858, 5861), True, 'import asp.codegen.cpp_ast as cpp_ast\n')] |
"""utlity methods running on the underlying database.
Copyright (c) 2016-present, Facebook, Inc.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory.
"""
import uuid
import django
def get_db_time(connection):
cursor = connection.cursor()
cursor.execute("SELECT statement_timestamp();")
return cursor.fetchone()[0]
def format_transaction(tansaction_date=None, transaction_type=False):
# Generating new transaction id using old transaction and date
if tansaction_date is None:
dt = django.utils.timezone.now()
else:
dt = tansaction_date
uuid_transaction = str(uuid.uuid4().hex[:6])
transaction = '{0}id{1}'.format(str(dt.date()).replace('-', ''),
uuid_transaction)
if transaction_type:
return '-%s' % (transaction,)
else:
return transaction
| [
"django.utils.timezone.now",
"uuid.uuid4"
] | [((702, 729), 'django.utils.timezone.now', 'django.utils.timezone.now', ([], {}), '()\n', (727, 729), False, 'import django\n'), ((796, 808), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (806, 808), False, 'import uuid\n')] |
import unittest
from EEGNAS_experiment import get_configurations, parse_args, set_params_by_dataset
from EEGNAS.global_vars import init_config
from EEGNAS import global_vars
from EEGNAS.utilities import NAS_utils
class TestModelGeneration(unittest.TestCase):
def setUp(self):
args = parse_args(['-e', 'tests', '-c', '../configurations/config.ini'])
init_config(args.config)
configs = get_configurations('tests')
assert(len(configs) == 1)
global_vars.set_config(configs[0])
set_params_by_dataset()
def test_perm_ensemble_fitness(self):
global_vars.set('pop_size', 10)
global_vars.set('ensemble_size', 2)
global_vars.set('ga_objective', 'accuracy')
global_vars.set('permanent_ensembles', True)
dummy_weighted_pop = [{'val_raw': [[1-(1/i), 0, 0, 1/i]], 'val_target': [3]} for i in range(1, 11)]
old_len = len(dummy_weighted_pop)
NAS_utils.permanent_ensemble_fitness(dummy_weighted_pop)
NAS_utils.sort_population(dummy_weighted_pop)
assert len(dummy_weighted_pop) == old_len
print(dummy_weighted_pop[-1])
| [
"EEGNAS_experiment.parse_args",
"EEGNAS.global_vars.init_config",
"EEGNAS.utilities.NAS_utils.sort_population",
"EEGNAS.utilities.NAS_utils.permanent_ensemble_fitness",
"EEGNAS_experiment.set_params_by_dataset",
"EEGNAS_experiment.get_configurations",
"EEGNAS.global_vars.set_config",
"EEGNAS.global_vars.set"
] | [((297, 362), 'EEGNAS_experiment.parse_args', 'parse_args', (["['-e', 'tests', '-c', '../configurations/config.ini']"], {}), "(['-e', 'tests', '-c', '../configurations/config.ini'])\n", (307, 362), False, 'from EEGNAS_experiment import get_configurations, parse_args, set_params_by_dataset\n'), ((371, 395), 'EEGNAS.global_vars.init_config', 'init_config', (['args.config'], {}), '(args.config)\n', (382, 395), False, 'from EEGNAS.global_vars import init_config\n'), ((414, 441), 'EEGNAS_experiment.get_configurations', 'get_configurations', (['"""tests"""'], {}), "('tests')\n", (432, 441), False, 'from EEGNAS_experiment import get_configurations, parse_args, set_params_by_dataset\n'), ((484, 518), 'EEGNAS.global_vars.set_config', 'global_vars.set_config', (['configs[0]'], {}), '(configs[0])\n', (506, 518), False, 'from EEGNAS import global_vars\n'), ((527, 550), 'EEGNAS_experiment.set_params_by_dataset', 'set_params_by_dataset', ([], {}), '()\n', (548, 550), False, 'from EEGNAS_experiment import get_configurations, parse_args, set_params_by_dataset\n'), ((602, 633), 'EEGNAS.global_vars.set', 'global_vars.set', (['"""pop_size"""', '(10)'], {}), "('pop_size', 10)\n", (617, 633), False, 'from EEGNAS import global_vars\n'), ((642, 677), 'EEGNAS.global_vars.set', 'global_vars.set', (['"""ensemble_size"""', '(2)'], {}), "('ensemble_size', 2)\n", (657, 677), False, 'from EEGNAS import global_vars\n'), ((686, 729), 'EEGNAS.global_vars.set', 'global_vars.set', (['"""ga_objective"""', '"""accuracy"""'], {}), "('ga_objective', 'accuracy')\n", (701, 729), False, 'from EEGNAS import global_vars\n'), ((738, 782), 'EEGNAS.global_vars.set', 'global_vars.set', (['"""permanent_ensembles"""', '(True)'], {}), "('permanent_ensembles', True)\n", (753, 782), False, 'from EEGNAS import global_vars\n'), ((941, 997), 'EEGNAS.utilities.NAS_utils.permanent_ensemble_fitness', 'NAS_utils.permanent_ensemble_fitness', (['dummy_weighted_pop'], {}), '(dummy_weighted_pop)\n', (977, 997), False, 'from EEGNAS.utilities import NAS_utils\n'), ((1006, 1051), 'EEGNAS.utilities.NAS_utils.sort_population', 'NAS_utils.sort_population', (['dummy_weighted_pop'], {}), '(dummy_weighted_pop)\n', (1031, 1051), False, 'from EEGNAS.utilities import NAS_utils\n')] |
"""
Helper functions for both fabfiles
"""
from fabric.api import runs_once, prompt, abort
from requests import get
from json import loads
def _download_gush_map(muni_name, topojson=False):
r = get('https://raw.githubusercontent.com/niryariv/israel_gushim/master/%s.%s' % (muni_name, 'topojson' if topojson else 'geojson'))
if r.status_code != 200:
abort('Failed to download gushim map')
try:
res = loads(r.text)
except:
abort('Gushim map is an invalid JSON file')
return res
| [
"json.loads",
"fabric.api.abort",
"requests.get"
] | [((201, 334), 'requests.get', 'get', (["('https://raw.githubusercontent.com/niryariv/israel_gushim/master/%s.%s' %\n (muni_name, 'topojson' if topojson else 'geojson'))"], {}), "('https://raw.githubusercontent.com/niryariv/israel_gushim/master/%s.%s' %\n (muni_name, 'topojson' if topojson else 'geojson'))\n", (204, 334), False, 'from requests import get\n'), ((368, 406), 'fabric.api.abort', 'abort', (['"""Failed to download gushim map"""'], {}), "('Failed to download gushim map')\n", (373, 406), False, 'from fabric.api import runs_once, prompt, abort\n'), ((435, 448), 'json.loads', 'loads', (['r.text'], {}), '(r.text)\n', (440, 448), False, 'from json import loads\n'), ((469, 512), 'fabric.api.abort', 'abort', (['"""Gushim map is an invalid JSON file"""'], {}), "('Gushim map is an invalid JSON file')\n", (474, 512), False, 'from fabric.api import runs_once, prompt, abort\n')] |
from enum import Enum
from typing import Dict, List, Optional, Set
from fastapi import Body, Cookie, FastAPI, Path, Query
from pydantic import BaseModel, Field, HttpUrl
# FastAPI object instance
app = FastAPI()
# ----------
# Root end point
@app.get("/")
async def root():
return {"message": "Hello World"}
# ----------
# Dynamic input
# localhost:8000/items/42/
@app.get("/items/{item_id}") # item_id from address path -> input args item_id
async def read_item(item_id: int): # auto type validation with type hints
return {"item_id": item_id}
# Static input
@app.get("/users/me")
async def read_user_me():
return {"user_id": "the current user"}
# Dynamic address str type
@app.get("/users/{user_id}")
async def read_user(user_id: str): # String type
return {"user_id": user_id}
class ModelName(str, Enum):
alexnet = "alexnet"
resnet = "resnet"
lenet = "lenet"
@app.get("/models/{model_name}")
async def get_model(
model_name: ModelName,
): # Use Enum to constraint available types and values
if model_name == ModelName.alexnet:
return {"model_name": model_name, "message": "Deep Learning FTW!"}
if model_name.value == "lenet":
return {"model_name": model_name, "message": "LeCNN all the images"}
return {"model_name": model_name, "message": "Have some residuals"}
@app.get(
"/files/{file_path:path}"
) # :path indicate match any path (i.e. /files//home/johndoe/myfiles.txt)
async def read_file(file_path: str):
return {"file_path": file_path}
# ----------
fake_items_db = [{"item_name": "Foo"}, {"item_name": "Bar"}, {"item_name": "Baz"}]
# localhost:8000/items2/?skip=0&limit=10
@app.get("/items2/")
async def items2(skip: int = 0, limit: int = 10):
return fake_items_db[skip : skip + limit]
@app.get("/items3/{item_id}")
async def items3(
item_id: str, q: Optional[str] = None
): # FastAPI knows optional because of `= None`, Optional is only for editor static check
if q:
return {"item_id": item_id, "q": q}
return {"item_id": item_id}
# localhost:8000/items4/foo?short=True
# localhost:8000/items4/foo?short=true
# localhost:8000/items4/foo?short=1 -> truthy
# localhost:8000/items4/foo?short=on -> truthy
# localhost:8000/items4/foo?short=yes -> truthy
@app.get("/items4/{item_id}")
async def item4(item_id: str, q: Optional[str] = None, short: bool = False):
item = {"item_id": item_id}
if q:
item.update({"q": q})
if not short:
item.update({"description": "long description"})
return item
# localhost:8000/items5/foo?needy=soneedy
@app.get("/items5/{item_id}")
async def items5(item_id: str, needy: str): # required parameters
item = {"item_id": item_id, "needy": needy}
return item
# localhost:8000/item6/foo?needy=soneedy&skip=0&limit=0
@app.get("/items6/{item_id}")
async def items6(
item_id: str, needy: str, skip: int = 0, limit: Optional[int] = None
): # needy = required str, skip = required int with default 0, limit = optional int
item = {"item_id": item_id, "needy": needy, "skip": skip, "limit": limit}
return item
# ----------
class Item(BaseModel):
name: str
description: Optional[str] = None
price: float
tax: Optional[float] = None
# Body
# {
# "name": "some name",
# "prince": 50.1
# }
# {
# "name": "some name",
# "prince": 50.1,
# "description": "some description",
# "tax": "100"
# }
@app.post("/items7/")
async def items7(item: Item): # expect fields from inside the Item class
return item
@app.post("/items8")
async def items8(item: Item):
item_dict = item.dict() # convert item json to dict first
if item.tax: # use dot notation or indexing to access
price_with_tax = item.price + item.tax
item_dict.update({"price_with_tax": price_with_tax})
return item_dict
@app.put("/items9/{item_id}")
async def items9(item_id: int, item: Item):
return {"item_id": item_id, **item.dict()} # Unroll dict
# localhost:8000/100/?q=foo
@app.put("/items10/{item_id}")
async def items10(item_id: int, item: Item, q: Optional[str] = None):
result = {"item_id": item_id, **item.dict()}
if q:
result.update({"q": q})
return result
# ----------
@app.get("/items11/")
async def items11(
q: Optional[str] = Query(None, min_length=3, max_length=50, regex="^fixedquery$")
): # User Query for additional validation, first param set default values,
# None means optional, min_length and max_length limits length for q,
# regex: ^ means start with following chars, fixedquery means exact value fixedquery,
# $ ends there, no more chars after fixedquery
results = {"items": [{"item_id": "Foo"}, {"item_id": "Bar"}]}
if q:
results.update({"q": q})
return results
@app.get("/items12/")
async def items12(
q: str = Query(..., min_length=3, max_length=50)
): # Query first param means default, ... means non-optional, q param is required
results = {"items": [{"item_id": "Foo"}, {"item_id": "Bar"}]}
if q:
results.update({"q": q})
return results
# localhost:8000/items13/?q=foo&q=bar&q=woot
@app.get("/items13/")
async def items13(
q: Optional[List[str]] = Query(None),
): # Optional List accepts multiple input by declaring q multiple times
query_items = {"q": q}
# {
# "q": [
# "foo",
# "bar",
# "woot"
# ]
# }
return query_items
@app.get("/items14/")
async def items14(
q: List[str] = Query(["foo", "bar", "woot"], title="Query string", min_length=3)
): # Required list, default values are declared inside Query
# Can also just use q: list = Query([])
query_items = {"q": q}
return query_items
# for example you want parameter to be item-query
# localhost:8000/items15/?item-query=foobaritem
# this way, Query will help to match item-query and assign correctly to q
@app.get("/items15/")
async def items15(q: Optional[str] = Query(None, alias="item-query")):
results = {"items": [{"item_id": "Foo"}, {"item_id": "Bar"}]}
if q:
results.update({"q": q})
return results
# Add deprecated args = True to indicate endpoint is deprecated
# Will showup in Swagger
@app.get("/items16/")
async def items16(q: Optional[str] = Query(None, deprecated=True)):
results = {"items": [{"item_id": "Foo"}, {"item_id": "Bar"}]}
if q:
results.update({"q": q})
return results
# ----------
@app.get("/items17/{item_id}")
async def items17(
item_id: int = Path(
..., title="The ID of item to get"
), # path parameter is always required, use ... as placeholder
q: Optional[str] = Query(
None, alias="item-query"
), # move q to first pos if it doesnt have default
):
results = {"item_id": item_id}
if q:
results.update({"q": q})
return results
# If order of args matter and q without default needs to be last, use * in the first pos
@app.get("/items18/{item_id}")
async def items18(*, item_id: int = Path(..., title="The ID"), q: str):
results = {"item_id": item_id}
if q:
results.update({"q": q})
return results
# Both Query and Path can use string/number validation
@app.get("/items19/{item_id}")
async def items19(
*,
item_id: int = Path(..., title="ID", ge=0, le=1000),
q: str,
size: float = Query(..., gt=0, lt=10),
):
results = {"item_id": item_id}
if q:
results.update({"q": q})
return results
# ----------
class Item2(BaseModel):
name: str
description: Optional[str] = None
price: float
tax: Optional[float] = None
@app.put("/items20/{item_id}")
async def items20(
*,
item_id: int = Path(..., title="ID", ge=0, le=1000),
q: Optional[str] = None,
item: Optional[Item2] = None, # can set body parameter as optional
):
results = {"item_id": item_id}
if q:
results.update({"q": q})
if item:
results.update({"item": item})
return results
class User(BaseModel):
username: str
full_name: Optional[str] = None
# Body
# {
# "item": {
# "name": "Foo",
# "description": "The pretender",
# "price": 42.0,
# "tax": 3.2
# },
# "user": {
# "username": "dave",
# "full_name": "<NAME>"
# }
# }
@app.put("/items21/{item_id}")
async def items21(item_id: int, item: Item2, user: User):
results = {"item_id": item_id, "item": item, "user": user}
return results
# {
# "item": {
# "name": "Foo",
# "description": "The pretender",
# "price": 42.0,
# "tax": 3.2
# },
# "user": {
# "username": "dave",
# "full_name": "<NAME>"
# },
# "importance": 5
# }
@app.put("/items22/{item_id}")
async def items22(
item_id: int, item: Item2, user: User, importance: int = Body(...)
): # use Body(...) for singular value body
results = {"item_id": item_id, "item": item, "user": user}
return results
@app.put("/items23/{item_id}")
async def items23(
*,
item_id: int,
item: Item2,
user: User,
importance: int = Body(..., gt=0), # Body also support validation
q: Optional[str] = None,
):
results = {"item_id": item_id, "item": item, "user": user, "importance": importance}
if q:
results.update({"q": q})
return results
# Expect body like this
# {
# "item": {
# "name": "Foo",
# "description": "The pretender",
# "price": 42.0,
# "tax": 3.2
# }
# }
# instead of this
# {
# "name": "Foo",
# "description": "The pretender",
# "price": 42.0,
# "tax": 3.2
# }
@app.put("/items24/{item_id}")
async def items24(item_id: int, item: Item2 = Body(..., embed=True)):
results = {"item_id": item_id, "item": item}
return results
# ----------
class Item3(BaseModel):
name: str
description: Optional[str] = Field( # field works the same as Query, Path and Body
None, title="item", max_length=300
)
price: float = Field(..., gt=0, description="price > 0")
tax: Optional[float] = None
@app.put("/items25/{item_id}")
async def items25(item_id: int, item: Item3 = Body(..., embed=True)):
results = {"item_id": item_id, "item": item}
return results
# ----------
class Item4(BaseModel):
name: str
description: Optional[str] = None
price: float
tax: Optional[float] = None
tags: list = [] # define list attribute or use the List typing for type hint
# tags: List[str] = []
# tags: Set[str] = set() for using set
@app.put("/items26/{item_id}")
async def items26(item_id: int, item: Item4):
results = {"item_id": item_id, "item": item}
return results
# ----------
class Image1(BaseModel):
# url: str
url: HttpUrl # use HttpUrl for URL validation, else use string above
name: str
class Item5(BaseModel):
name: str
description: Optional[str] = None
price: float
tax: Optional[float] = None
tags: Set[str] = set()
image: Optional[Image1] = None # Use Image1 type
# Body
# {
# "name": "Foo",
# "description": "The pretender",
# "price": 42.0,
# "tax": 3.2,
# "tags": ["rock", "metal", "bar"],
# "image": {
# "url": "http://example.com/baz.jpg",
# "name": "The Foo live"
# }
# }
@app.put("/items27/{item_id}")
async def items27(item_id: int, item: Item5):
results = {"item_id": item_id, "item": item}
return results
# Body
# {
# "name": "Foo",
# "description": "The pretender",
# "price": 42.0,
# "tax": 3.2,
# "tags": [
# "rock",
# "metal",
# "bar"
# ],
# "images": [
# {
# "url": "http://example.com/baz.jpg",
# "name": "The Foo live"
# },
# {
# "url": "http://example.com/dave.jpg",
# "name": "The Baz"
# }
# ]
# }
class Item6(BaseModel):
name: str
description: Optional[str] = None
price: float
tax: Optional[float] = None
tags: Set[str] = set()
images: Optional[List[Image1]] = None
@app.put("/items28/{item_id}")
async def items28(item_id: int, item: Item6):
results = {"item_id": item_id, "item": item}
return results
class Offer1(BaseModel):
name: str
description: Optional[str] = None
price: float
items: List[Item6] # Deep nested models
@app.post("/offers1/")
async def offers1(offer: Offer1):
return offer
# [
# {
# "url": "http://example.com/baz.jpg",
# "name": "The Foo live"
# },
# {
# "url": "http://example.com/dave.jpg",
# "name": "The Baz"
# }
# ]
@app.post("/images1/multiple")
async def images1(images: List[Image1]): # JSON body with a list at the top level value
return images
# {
# 1: 1.1,
# 2: 2.2
# }
@app.post("/index-weights1")
async def index_weights1(weights: Dict[int, float]):
return weights
# ----------
class Item7(BaseModel):
name: str
description: Optional[str] = None
price: float
tax: Optional[float] = None
class Config: # keyword for pydantic
schema_extra = { # keyword for pydantic
"example": { # example will show up in JSON schema
"name": "Foo",
"description": "A very nice Item",
"price": 35.4,
"tax": 3.2,
}
}
@app.put("/items29/{item_id}")
async def items29(item_id: int, item: Item):
results = {"item_id": item_id, "item": item}
return results
class Item8(BaseModel):
name: str = Field(..., example="Foo") # Example stated will pass to JSON schema
description: Optional[str] = Field(None, example="A very nice item")
price: float = Field(..., example=35.4)
tax: Optional[float] = Field(None, example=3.2)
@app.put("/items30/{item_id}")
async def items30(item_id: int, item: Item):
results = {"item_id": item_id, "item": item}
return results
@app.put("/items31/{item_id}")
async def items31(
item_id: int,
item: Item7 = Body(
...,
example={ # declare examples straight in the Body
"name": "Foo",
"description": "A very nice item",
"price": 35.4,
"tax": 3.2,
},
),
):
results = {"item_id": item_id, "item": item}
return results
@app.put("/items32/{item_id}")
async def items32(
*,
item_id: int,
item: Item7 = Body(
...,
examples={ # multiple examples, shown as dropdown in Swagger UI
"normal": {
"summary": "A normal example",
"description": "A **normal** item works correctly",
"value": {
"name": "Foo",
"description": "A very nice item",
"price": 35.4,
"tax": 3.2,
},
},
"converted": {
"summary": "An example with converted data",
"description": "convert price to actual num",
"value": { # description and tax optional
"name": "Bar",
"price": "35.3",
},
},
"invalid": {
"summary": "Invalid data is rejected with an error",
"value": {
"name": "Baz",
"price": "thirty five point four",
},
},
},
),
):
results = {"item_id": item_id, "item": item}
return results
# ----------
@app.get("/items33/")
async def items33(ads_id: Optional[str] = Cookie(None)):
return {"ads_id": ads_id}
| [
"fastapi.FastAPI",
"pydantic.Field",
"fastapi.Body",
"fastapi.Query",
"fastapi.Path",
"fastapi.Cookie"
] | [((203, 212), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (210, 212), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((4300, 4362), 'fastapi.Query', 'Query', (['None'], {'min_length': '(3)', 'max_length': '(50)', 'regex': '"""^fixedquery$"""'}), "(None, min_length=3, max_length=50, regex='^fixedquery$')\n", (4305, 4362), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((4838, 4877), 'fastapi.Query', 'Query', (['...'], {'min_length': '(3)', 'max_length': '(50)'}), '(..., min_length=3, max_length=50)\n', (4843, 4877), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((5206, 5217), 'fastapi.Query', 'Query', (['None'], {}), '(None)\n', (5211, 5217), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((5512, 5577), 'fastapi.Query', 'Query', (["['foo', 'bar', 'woot']"], {'title': '"""Query string"""', 'min_length': '(3)'}), "(['foo', 'bar', 'woot'], title='Query string', min_length=3)\n", (5517, 5577), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((5967, 5998), 'fastapi.Query', 'Query', (['None'], {'alias': '"""item-query"""'}), "(None, alias='item-query')\n", (5972, 5998), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((6279, 6307), 'fastapi.Query', 'Query', (['None'], {'deprecated': '(True)'}), '(None, deprecated=True)\n', (6284, 6307), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((6522, 6562), 'fastapi.Path', 'Path', (['...'], {'title': '"""The ID of item to get"""'}), "(..., title='The ID of item to get')\n", (6526, 6562), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((6662, 6693), 'fastapi.Query', 'Query', (['None'], {'alias': '"""item-query"""'}), "(None, alias='item-query')\n", (6667, 6693), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((7016, 7041), 'fastapi.Path', 'Path', (['...'], {'title': '"""The ID"""'}), "(..., title='The ID')\n", (7020, 7041), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((7282, 7318), 'fastapi.Path', 'Path', (['...'], {'title': '"""ID"""', 'ge': '(0)', 'le': '(1000)'}), "(..., title='ID', ge=0, le=1000)\n", (7286, 7318), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((7350, 7373), 'fastapi.Query', 'Query', (['...'], {'gt': '(0)', 'lt': '(10)'}), '(..., gt=0, lt=10)\n', (7355, 7373), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((7693, 7729), 'fastapi.Path', 'Path', (['...'], {'title': '"""ID"""', 'ge': '(0)', 'le': '(1000)'}), "(..., title='ID', ge=0, le=1000)\n", (7697, 7729), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((8847, 8856), 'fastapi.Body', 'Body', (['...'], {}), '(...)\n', (8851, 8856), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((9115, 9130), 'fastapi.Body', 'Body', (['...'], {'gt': '(0)'}), '(..., gt=0)\n', (9119, 9130), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((9720, 9741), 'fastapi.Body', 'Body', (['...'], {'embed': '(True)'}), '(..., embed=True)\n', (9724, 9741), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((9898, 9939), 'pydantic.Field', 'Field', (['None'], {'title': '"""item"""', 'max_length': '(300)'}), "(None, title='item', max_length=300)\n", (9903, 9939), False, 'from pydantic import BaseModel, Field, HttpUrl\n'), ((10021, 10062), 'pydantic.Field', 'Field', (['...'], {'gt': '(0)', 'description': '"""price > 0"""'}), "(..., gt=0, description='price > 0')\n", (10026, 10062), False, 'from pydantic import BaseModel, Field, HttpUrl\n'), ((10174, 10195), 'fastapi.Body', 'Body', (['...'], {'embed': '(True)'}), '(..., embed=True)\n', (10178, 10195), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((13598, 13623), 'pydantic.Field', 'Field', (['...'], {'example': '"""Foo"""'}), "(..., example='Foo')\n", (13603, 13623), False, 'from pydantic import BaseModel, Field, HttpUrl\n'), ((13700, 13739), 'pydantic.Field', 'Field', (['None'], {'example': '"""A very nice item"""'}), "(None, example='A very nice item')\n", (13705, 13739), False, 'from pydantic import BaseModel, Field, HttpUrl\n'), ((13759, 13783), 'pydantic.Field', 'Field', (['...'], {'example': '(35.4)'}), '(..., example=35.4)\n', (13764, 13783), False, 'from pydantic import BaseModel, Field, HttpUrl\n'), ((13811, 13835), 'pydantic.Field', 'Field', (['None'], {'example': '(3.2)'}), '(None, example=3.2)\n', (13816, 13835), False, 'from pydantic import BaseModel, Field, HttpUrl\n'), ((14070, 14170), 'fastapi.Body', 'Body', (['...'], {'example': "{'name': 'Foo', 'description': 'A very nice item', 'price': 35.4, 'tax': 3.2}"}), "(..., example={'name': 'Foo', 'description': 'A very nice item',\n 'price': 35.4, 'tax': 3.2})\n", (14074, 14170), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((14457, 14964), 'fastapi.Body', 'Body', (['...'], {'examples': "{'normal': {'summary': 'A normal example', 'description':\n 'A **normal** item works correctly', 'value': {'name': 'Foo',\n 'description': 'A very nice item', 'price': 35.4, 'tax': 3.2}},\n 'converted': {'summary': 'An example with converted data',\n 'description': 'convert price to actual num', 'value': {'name': 'Bar',\n 'price': '35.3'}}, 'invalid': {'summary':\n 'Invalid data is rejected with an error', 'value': {'name': 'Baz',\n 'price': 'thirty five point four'}}}"}), "(..., examples={'normal': {'summary': 'A normal example', 'description':\n 'A **normal** item works correctly', 'value': {'name': 'Foo',\n 'description': 'A very nice item', 'price': 35.4, 'tax': 3.2}},\n 'converted': {'summary': 'An example with converted data',\n 'description': 'convert price to actual num', 'value': {'name': 'Bar',\n 'price': '35.3'}}, 'invalid': {'summary':\n 'Invalid data is rejected with an error', 'value': {'name': 'Baz',\n 'price': 'thirty five point four'}}})\n", (14461, 14964), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n'), ((15634, 15646), 'fastapi.Cookie', 'Cookie', (['None'], {}), '(None)\n', (15640, 15646), False, 'from fastapi import Body, Cookie, FastAPI, Path, Query\n')] |
# from pprint import pprint
from requests import get
from bs4 import BeautifulSoup
from requests.models import Response
# In Google: what is my user agent
HEADERS = {
"user-agent": ("Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/95.0.4638.54 Safari/537.36")
}
def get_porsche(city:str="kansascity", model: str="porsche%20944") -> Response:
"""GET current Craigslist porsche 944 postings"""
if model:
model = build_search(model)
url = f"https://{city}.craigslist.org/d/cars-trucks/search/cta?query={model}&sort=rel"
response = get(url, headers=HEADERS)
response.raise_for_status()
return response
def build_search(model: str) -> str:
"""Build search for craigslist"""
car = ""
search = model.split()
if len(search) <= 1:
pass
else:
for item in range(len(search) - 1):
car = car + search[item] + "%20"
return car + search[-1]
def get_soup(response: Response) -> list:
"""Parse the reponse into a nested list"""
_cars = []
html_soup = BeautifulSoup(response.text, "html.parser")
posts = html_soup.find_all("li", class_= "result-row") # Find all porsche postings
for post in posts:
url = post.find("a", class_="result-title hdrlnk")['href']
_cars.append(
{
"datelisted": post.find("time", class_= "result-date")['datetime'],
"price": post.a.text.strip(),
"title": post.find("a", class_="result-title hdrlnk").text,
"url": url,
"location": url.split("https://")[1].split(".")[0], # Splits url at 'https://' and the 1st . after city
}
)
cars = remove_dupes(_cars)
return cars
def remove_dupes(car_list):
"""Remove duplicate dicts from list"""
x = set()
cars = []
for i in car_list:
if i['title'] not in x:
x.add(i['title'])
cars.append(i)
else:
continue
return cars
if __name__ == "__main__":
response = get_porsche()
x = get_soup(response)
| [
"bs4.BeautifulSoup",
"requests.get"
] | [((661, 686), 'requests.get', 'get', (['url'], {'headers': 'HEADERS'}), '(url, headers=HEADERS)\n', (664, 686), False, 'from requests import get\n'), ((1141, 1184), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.text', '"""html.parser"""'], {}), "(response.text, 'html.parser')\n", (1154, 1184), False, 'from bs4 import BeautifulSoup\n')] |
#!/usr/bin/env python3
"""Permute an LMDB database according to the given key map."""
import argparse
import contextlib
import sys
import lmdb
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('src_db_path', metavar='INPUT-DB',
help='the input database path')
parser.add_argument('tgt_db_path', metavar='OUTPUT-DB',
help='the output database path')
parser.add_argument('key_map_path', metavar='KEY-MAP',
help='a TSV file containing on each line a source key and a target key')
args = parser.parse_args()
with contextlib.ExitStack() as ctx:
key_map_file = ctx.enter_context(open(args.key_map_path))
src_db = ctx.enter_context(
lmdb.open(args.src_db_path, subdir=False, readonly=True, lock=False))
tgt_db = ctx.enter_context(
lmdb.open(args.tgt_db_path, subdir=False, readonly=False, lock=False,
map_size=2 * src_db.info()['map_size']))
src_txn = ctx.enter_context(src_db.begin(buffers=True))
tgt_txn = ctx.enter_context(tgt_db.begin(buffers=True, write=True))
total = 0
missing = 0
for line in key_map_file:
src_key, tgt_key = line.rstrip('\n').split('\t')
val = src_txn.get(src_key.encode())
if val is None:
missing += 1
continue
if not tgt_txn.put(tgt_key.encode(), val, overwrite=False):
raise RuntimeError('Duplicate key')
total += 1
print('Wrote {} / {} entries; {} keys missing'.format(
total, src_db.stat()['entries'], missing), file=sys.stderr)
if __name__ == '__main__':
main()
| [
"contextlib.ExitStack",
"lmdb.open",
"argparse.ArgumentParser"
] | [((171, 215), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (194, 215), False, 'import argparse\n'), ((645, 667), 'contextlib.ExitStack', 'contextlib.ExitStack', ([], {}), '()\n', (665, 667), False, 'import contextlib\n'), ((790, 858), 'lmdb.open', 'lmdb.open', (['args.src_db_path'], {'subdir': '(False)', 'readonly': '(True)', 'lock': '(False)'}), '(args.src_db_path, subdir=False, readonly=True, lock=False)\n', (799, 858), False, 'import lmdb\n')] |
## Extraction code based on: https://github.com/fabiocarrara/visual-sentiment-analysis/blob/main/predict.py
import os, sys, json
import argparse
import numpy as np
import torch
import torchvision.transforms as t
from torch.utils.data import Dataset, DataLoader
from torchvision.datasets.folder import default_loader
from tqdm import tqdm
from t4sa.alexnet import KitModel as AlexNet
from t4sa.vgg19 import KitModel as VGG19
class ImageListDataset (Dataset):
def __init__(self, img_names, root=None, transform=None):
super(ImageListDataset).__init__()
self.list = img_names
self.root = root
self.transform = transform
def __getitem__(self, index):
path = self.list[index]
if self.root:
path = os.path.join(self.root, path)
x = default_loader(path)
if self.transform:
x = self.transform(x)
return x, self.list[index].strip('.png')
def __len__(self):
return len(self.list)
def main(args):
feats1, logits, im_names = [], [], []
data_dict = {}
def feature_hook(module, input, output):
return feats2.extend(output.view(-1,output.shape[1]).data.cpu().numpy().tolist())
transform = t.Compose([
t.Resize((224, 224)),
t.ToTensor(),
t.Lambda(lambda x: x[[2,1,0], ...] * 255), # RGB -> BGR and [0,1] -> [0,255]
t.Normalize(mean=[116.8007, 121.2751, 130.4602], std=[1,1,1]), # mean subtraction
])
dloc = 'data/%s/images'%(args.dset)
image_list = os.listdir(dloc)
data = ImageListDataset(image_list, root=dloc, transform=transform)
dataloader = DataLoader(data, batch_size=args.batch_size, num_workers=2)
model = AlexNet if 'hybrid' in args.model else VGG19
model = model('t4sa/{}.pth'.format(args.model)).to('cuda')
model.eval()
model._modules.get('fc7_1').register_forward_hook(feature_hook)
with torch.no_grad():
for x, im_nms in tqdm(dataloader):
p, logs = model(x.to('cuda')) # order is (NEG, NEU, POS)
logits.extend(logs.cpu().numpy().tolist())
im_names.extend(im_nms)
data_dict['feats_fc7'] = {name:feat for name,feat in zip(im_names, feats1)}
data_dict['logits'] = {name:feat for name,feat in zip(im_names, logits)}
if not os.path.exists('features/image/'):
os.makedirs('features/image/')
json.dump(data_dict, open('features/image/%s_t4sa_%s.json'%(args.dset, args.model), 'w'))
if __name__ == '__main__':
models = ('hybrid_finetuned_fc6+',
'hybrid_finetuned_all',
'vgg19_finetuned_fc6+',
'vgg19_finetuned_all')
parser = argparse.ArgumentParser(description='Extract Visual Sentiment Features')
parser.add_argument('-d', '--dset', default=None, help='Which dataset (clef_en | clef_ar | mediaeval | lesa)')
parser.add_argument('-m', '--model', type=str, choices=models, default='vgg19_finetuned_all', help='Pretrained model')
parser.add_argument('-b', '--batch-size', type=int, default=32, help='Batch size')
args = parser.parse_args()
main(args)
| [
"os.path.exists",
"os.listdir",
"argparse.ArgumentParser",
"os.makedirs",
"tqdm.tqdm",
"os.path.join",
"torchvision.transforms.Lambda",
"torchvision.datasets.folder.default_loader",
"torchvision.transforms.Normalize",
"torch.utils.data.DataLoader",
"torchvision.transforms.Resize",
"torch.no_grad",
"torchvision.transforms.ToTensor"
] | [((1581, 1597), 'os.listdir', 'os.listdir', (['dloc'], {}), '(dloc)\n', (1591, 1597), False, 'import os, sys, json\n'), ((1687, 1746), 'torch.utils.data.DataLoader', 'DataLoader', (['data'], {'batch_size': 'args.batch_size', 'num_workers': '(2)'}), '(data, batch_size=args.batch_size, num_workers=2)\n', (1697, 1746), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((2718, 2790), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Extract Visual Sentiment Features"""'}), "(description='Extract Visual Sentiment Features')\n", (2741, 2790), False, 'import argparse\n'), ((835, 855), 'torchvision.datasets.folder.default_loader', 'default_loader', (['path'], {}), '(path)\n', (849, 855), False, 'from torchvision.datasets.folder import default_loader\n'), ((1972, 1987), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1985, 1987), False, 'import torch\n'), ((2014, 2030), 'tqdm.tqdm', 'tqdm', (['dataloader'], {}), '(dataloader)\n', (2018, 2030), False, 'from tqdm import tqdm\n'), ((2364, 2397), 'os.path.exists', 'os.path.exists', (['"""features/image/"""'], {}), "('features/image/')\n", (2378, 2397), False, 'import os, sys, json\n'), ((2407, 2437), 'os.makedirs', 'os.makedirs', (['"""features/image/"""'], {}), "('features/image/')\n", (2418, 2437), False, 'import os, sys, json\n'), ((780, 809), 'os.path.join', 'os.path.join', (['self.root', 'path'], {}), '(self.root, path)\n', (792, 809), False, 'import os, sys, json\n'), ((1290, 1310), 'torchvision.transforms.Resize', 't.Resize', (['(224, 224)'], {}), '((224, 224))\n', (1298, 1310), True, 'import torchvision.transforms as t\n'), ((1320, 1332), 'torchvision.transforms.ToTensor', 't.ToTensor', ([], {}), '()\n', (1330, 1332), True, 'import torchvision.transforms as t\n'), ((1342, 1385), 'torchvision.transforms.Lambda', 't.Lambda', (['(lambda x: x[[2, 1, 0], ...] * 255)'], {}), '(lambda x: x[[2, 1, 0], ...] * 255)\n', (1350, 1385), True, 'import torchvision.transforms as t\n'), ((1428, 1491), 'torchvision.transforms.Normalize', 't.Normalize', ([], {'mean': '[116.8007, 121.2751, 130.4602]', 'std': '[1, 1, 1]'}), '(mean=[116.8007, 121.2751, 130.4602], std=[1, 1, 1])\n', (1439, 1491), True, 'import torchvision.transforms as t\n')] |
from constants import *
import subprocess
import ffmpeg
import gzip
import numpy as np
class ProcessWrite:
def __init__(self, filename, height, width, framerate):
_args = (
ffmpeg
.input('pipe:', format='rawvideo', pix_fmt='bgr24', s='{}x{}'.format(width, height),
framerate=str(framerate))
.output(filename, pix_fmt='yuv420p', vsync='0', framerate=str(SAVE_FRAMERATE)) # OpenCV uses the BGR format
.overwrite_output()
.compile()
)
self.p = subprocess.Popen(_args, stdin=subprocess.PIPE)
def write(self, _frame):
# TODO Resize here??
self.p.stdin.write(
_frame
.astype(np.uint8)
.tobytes()
)
def close(self):
self.p.stdin.close()
self.p.wait()
def __del__(self):
self.close()
class NumpyWrite:
def __init__(self, filename, compression=True):
if compression:
self.file = gzip.GzipFile(filename, "w")
else:
self.file = filename
self.data = []
print("Warning, output.NumpyWrite saves only when file is closed.")
def write(self, data):
self.data.append(data)
def close(self):
np.save(file=self.file, arr=self.data, allow_pickle=True)
self.file.close()
def __del__(self):
self.close()
| [
"subprocess.Popen",
"gzip.GzipFile",
"numpy.save"
] | [((551, 597), 'subprocess.Popen', 'subprocess.Popen', (['_args'], {'stdin': 'subprocess.PIPE'}), '(_args, stdin=subprocess.PIPE)\n', (567, 597), False, 'import subprocess\n'), ((1269, 1326), 'numpy.save', 'np.save', ([], {'file': 'self.file', 'arr': 'self.data', 'allow_pickle': '(True)'}), '(file=self.file, arr=self.data, allow_pickle=True)\n', (1276, 1326), True, 'import numpy as np\n'), ((1005, 1033), 'gzip.GzipFile', 'gzip.GzipFile', (['filename', '"""w"""'], {}), "(filename, 'w')\n", (1018, 1033), False, 'import gzip\n')] |
import decimal
import logging
import os
import smtplib
import time
import warnings
from datetime import datetime
import pandas as pd
import zmq
from binance.client import Client
from coinmarketcap import Market
from colorama import init, Fore, Back, Style
# noinspection PyUnresolvedReferences
from sentimentAnalyse import SentimentAnalyse
init(convert=True)
warnings.filterwarnings("ignore")
log_file = os.path.join(os.getcwd(), 'logs', str(time.strftime('%Y %m %d %H')) + ' activity.log')
logging.basicConfig(filename=log_file, level=logging.INFO,
format='%(asctime)s:%(levelname)s:%(message)s')
# TODO: read korean jgl 101 tips
# TODO: explain why this code is so good to client write on pad
# TODO: binance.products()
class Coin:
def __init__(self, symbol, mode, take_profit,
backend, client, sentiment_analyse,
interval_list, sma_fast_length, sma_slow_length, user_email, symbol_info):
# symbol info
self.symbolPair = symbol
self.base_currency_symbol = symbol_info['base_asset']
self.quote_currency_symbol = symbol_info['quote_asset']
# price, qty and take profit info for trade buy/sell settings
self.price = float()
self.quantity = float()
self.investment = float(symbol_info['investment'])
self.entry_price = float()
self.exit_price = float()
self.min_step = float(symbol_info[8])
self.round_factor = abs(decimal.Decimal(str(self.min_step)).as_tuple().exponent)
self.take_profit = take_profit / 100
self.in_trade = False
# 1 for automatic - 0 for semi-auto
self.mode = mode
self.local_time = time.strftime("%Y-%m-%d %H:%M:%S")
# binance client and sentiment class objects
self.client = client
self.sentiment_analyse = sentiment_analyse
self.backend = backend
self.gui_dict = dict()
# settings for technical indicator
self.interval_list = interval_list
self.sma_fast_length = sma_fast_length
self.sma_slow_length = sma_slow_length
# base currency and quote currency balance for checking trades can be done
try:
self.base_currency_balance = float(self.client.get_asset_balance(asset=self.base_currency_symbol)['free'])
self.quote_currency_balance = float(self.client.get_asset_balance(asset=self.quote_currency_symbol)['free'])
except:
self.base_currency_balance = 0.0
self.quote_currency_balance = 0.0
# sma market position to keep track of status of technical indicator for the coin
self.sma_market_position = int()
# track of market sentiment
self.sentiment = float()
self.sentiment_list = list()
# email settings
self.bot_email = '<EMAIL>'
self.bot_password = '<PASSWORD>'
self.user_email = user_email
# contains the time, close price for all the intervals of the coin data
self.data = list(pd.DataFrame())
self._init_historic_data()
self._init_sma()
def _init_historic_data(self):
# create pandas data frame for each interval in the interval list and append it to self.data list
for interval in self.interval_list:
# convert the 12 element list from historical klines to a pandas data frame
temp_df = pd.DataFrame(data=self.client.get_historical_klines(self.symbolPair,
interval,
self.__day_count_required(interval))
)
# pick only the time and close price columns
temp_df = temp_df[[0, 4]]
# rename the columns from numeric to strings
temp_df.columns = ['Time', 'Close_price']
temp_df.Time = temp_df.Time.apply(lambda x: self.__binance_time_to_pandas_time(x))
self.data.append(temp_df)
def _init_sma(self):
""" mutates the data frame by adding/replacing the sma_fast and sma_slow columns"""
for data_frame in self.data:
data_frame['sma_fast'] = data_frame.Close_price.rolling(self.sma_fast_length).mean().astype('float64')
data_frame['sma_slow'] = data_frame.Close_price.rolling(self.sma_slow_length).mean().astype('float64')
def __day_count_required(self, interval):
"""
Inputs : interval -> string which tells us about the interval i.e. minutes(m), hour(h)...
sma_slow, sma_fast -> tells us about how much data we need to look for
This function computes the amount of days required in order to compute the latest sma_slow candle,
this function operates in terms of X day ago UTC, we have to compute X such that it will give us
enough candles for at least 1 data point of sma_slow
update: it will work on the biggest interval from the interval list - because giving the time of biggest
interval will always make sure, it works with the smaller time frames
"""
day_time_minutes = 1440
day_count = 1
minutes_required = 1
if interval[-1] == 'm':
minutes_required = int(interval[:-1])
elif interval[-1] == 'h':
minutes_required = int(interval[:-1]) * 60
elif interval[-1] == 'd':
minutes_required = int(interval[:-1]) * 60 * 24
elif interval[-1] == 'w':
minutes_required = int(interval[:-1]) * 60 * 24 * 7
elif interval[-1] == 'M':
minutes_required = int(interval[:-1]) * 60 * 24 * 30
while 1:
time_required = day_count * day_time_minutes / minutes_required
if time_required >= self.sma_slow_length:
break
day_count += 1
return str(day_count) + " day ago UTC"
@staticmethod
def __binance_time_to_pandas_time(gt):
""" Converts binance time from milliseconds in a datetime - time stamp
Then converts from python time to pandas datetime
"""
return pd.to_datetime(datetime.fromtimestamp(gt / 1000))
def monitor(self):
# print("round factor check: ", self.round_factor)
# state variables
self.local_time = time.strftime("%Y-%m-%d %H:%M:%S")
if self.investment:
print('{} - monitoring {}'.format(self.local_time, self.symbolPair))
try:
self.price = float(
[pair['price'] for pair in self.client.get_all_tickers() if pair['symbol'] == self.symbolPair][0])
except:
logging.critical(
'binance rate limit reached - could not retrieve quote, base, and current price balances')
print('binance rate limit reached')
logging.info('\nSymbol name: ' + str(self.symbolPair)
+ '\ncurrent price: ' + str(self.price)
+ '\ncurrent budget allocation: ' + str(self.investment)
+ '\nbase currency: ' + str(self.base_currency_symbol) + '- base currency balance: ' + str(
self.base_currency_balance)
+ '\nquote currency: ' + str(self.quote_currency_symbol) + '- quote currency balance: ' + str(
self.quote_currency_balance))
for idx, (interval, data_frame) in enumerate(zip(self.interval_list, self.data)):
# acquire latest candle from binance api, append it is a row at the end of self.data
try:
latest_candle = self.client.get_klines(symbol=self.symbolPair, interval=interval, limit=1)
except:
print("Binance rate limit reached for the day.")
logging.critical('binance rate limit reached most probably - could not retrieve latest candle')
return
latest_time = self.__binance_time_to_pandas_time(latest_candle[0][0])
latest_price = latest_candle[0][4]
latest_row = pd.Series(data=[latest_time, latest_price, 0, 0], index=data_frame.columns)
# check to see if the latest candle is adding any new data
if data_frame.Time.max() != latest_time:
# append latest row to existing data frame
self.data[idx] = data_frame.append(latest_row, ignore_index=True)
# recalculate sma for latest candle
self._init_sma()
if self.in_trade is False:
self._get_sma_market_position()
self.sentiment = self.sentiment_analyse.get_sentiment()
self.sentiment_list.append(self.sentiment)
logging.info('market position: {}'.format(self.sma_market_position))
logging.info('market sentiment: {}'.format(self.sentiment))
# change this to abs() when working with sell orders also
if self.sma_market_position and self.sentiment > 0.15:
try:
self.base_currency_balance = float(
self.client.get_asset_balance(asset=self.base_currency_symbol)['free'])
self.quote_currency_balance = float(
self.client.get_asset_balance(asset=self.quote_currency_symbol)['free'])
except:
self.base_currency_balance = 0.0
self.quote_currency_balance = 0.0
if self.quote_currency_balance > self.investment and self.quote_currency_balance > 0:
self._buy_order()
self.sma_market_position = 0
return
if self.in_trade:
logging.info('trade was placed earlier, waiting for take profit to sell...')
logging.info('current market price: {} '.format(self.price))
logging.info('entry price at trade: {}'.format(self.entry_price))
logging.info('req to sell: {}'.format(self.exit_price))
if self.price >= round(self.exit_price, 6):
try:
self.base_currency_balance = float(
self.client.get_asset_balance(asset=self.base_currency_symbol)['free'])
self.quote_currency_balance = float(
self.client.get_asset_balance(asset=self.quote_currency_symbol)['free'])
except:
self.base_currency_balance = 0.0
self.quote_currency_balance = 0.0
self._sell_order()
self.sma_market_position = 0
return
self.sma_market_position = 0
return
def _get_sma_market_position(self):
""" function compares sma_fast and sma_slow across all intervals in the self.data and sets market position"""
self.gui_dict['data1'] = (self.data[0].loc[self.data[0].index[-1], 'sma_fast']) \
> (self.data[0].loc[self.data[0].index[-1], 'sma_slow'])
self.gui_dict['data2'] = (self.data[1].loc[self.data[1].index[-1], 'sma_fast']) \
> (self.data[1].loc[self.data[1].index[-1], 'sma_slow'])
self.gui_dict['data3'] = (self.data[2].loc[self.data[2].index[-1], 'sma_fast']) \
> (self.data[2].loc[self.data[2].index[-1], 'sma_slow'])
logging.info('SMA-data 1 state: {} \n SMA-data 2 state: {} \n SMA-data 3 state: {}'.format(
self.gui_dict['data1'], self.gui_dict['data2'], self.gui_dict['data3']
))
if all(data_frame.loc[data_frame.index[-1], 'sma_fast'] > data_frame.loc[data_frame.index[-1], 'sma_slow']
for data_frame in self.data):
self.sma_market_position = 1
elif all(data_frame.loc[data_frame.index[-1], 'sma_fast'] < data_frame.loc[data_frame.index[-1], 'sma_slow']
for data_frame in self.data):
self.sma_market_position = -1
else:
self.sma_market_position = 0
def _buy_order(self):
# checks the sma_market_position and if it is positive or negative - places a market order for buy/sell
# resets the market position intended at the end of the cycle
if self.sma_market_position == 1 and self.sentiment > 0.15:
self.__calculate_qty()
# consume all of the investment in 1 market order
if self.mode:
try:
self.client.order_market_buy(symbol=self.symbolPair, quantity=self.quantity)
self.in_trade = True
self.entry_price = self.price
self.exit_price = round(self.entry_price + (self.entry_price * self.take_profit), 6)
print(Back.WHITE + Fore.RED
+ '{} - placed buy order at market\nentry price: {}\nexit target: {}\ntake profit %: {}'
.format(self.local_time, self.entry_price, self.exit_price, self.take_profit * 100))
self.__send_email_notification()
logging.critical('placing buy order at market for {}'.format(self.symbolPair))
except Exception as e:
logging.critical('buy order: {}'.format(e))
print(e)
else:
self.__send_email_notification()
self.in_trade = True
self.entry_price = self.price
print(Style.RESET_ALL)
def _sell_order(self):
print('placing sell order at market')
logging.critical('placing sell order at market for {}'.format(self.symbolPair))
logging.critical('base currency balance: {}'.format(self.base_currency_balance))
self.sma_market_position = -1
if self.mode:
try:
self.client.order_market_sell(symbol=self.symbolPair,
quantity=round(self.base_currency_balance - self.min_step,
abs(self.round_factor)))
self.in_trade = False
print(
'{}{}{} placed sell order on binance for symbol: {}\nentry price: {}\nexit price: {}\nprofit: {}'
.format(Back.WHITE,
Fore.GREEN,
self.local_time,
self.symbolPair,
self.entry_price,
self.exit_price,
round(self.exit_price - self.entry_price, 6)))
self.__send_email_notification()
self.entry_price = 0
except Exception as e:
logging.critical('sell order {}'.format(e))
print(e)
print(Style.RESET_ALL)
else:
self.__send_email_notification()
self.in_trade = False
self.entry_price = 0
def __calculate_qty(self):
self.quantity = round(self.investment / self.price, self.round_factor)
logging.critical('quantity calculated: {}'.format(self.quantity))
def __send_email_notification(self, special_message=''):
try:
if not special_message:
email_text = str(self.local_time)\
+ 'Placed order on binance \n' \
+ 'Symbol: ' + str(self.symbolPair) + '\n' \
+ 'market position ( 1 = Buy, -1 = Sell): ' + str(self.sma_market_position) + '\n' \
+ 'Quantity: ' + str(self.quantity) + '\n' \
+ 'Entry price: ' + str(self.entry_price) + '\n' \
+ 'Exit price: ' + str(self.exit_price) + '\n' \
+ 'Investment: ' + str(self.investment) + '\n' \
+ 'market sentiment: ' + str(self.sentiment)
else:
email_text = special_message
server = smtplib.SMTP_SSL('smtp.gmail.com', 465)
server.ehlo()
server.login(self.bot_email, self.bot_password)
server.sendmail(self.bot_email, self.user_email, email_text)
server.close()
print('email sent')
logging.critical('email sent')
except Exception as e:
logging.critical('{}'.format(e))
class CCTP:
def __init__(self):
"""
relative to quote currency:
[-len(quote_currency):] will return quote currency
[:-len(quote_currency)] will return base currency
relative to base currency:
[len(base_currency):] will return will return quote currency
[:len(base_currency)] will return will return base currency
"""
# zmq for communication with frontend
self.zmq_context = zmq.Context()
self.backend = self.zmq_context.socket(zmq.REP)
self.backend.bind('tcp://*:17000')
# initialization parameters from input
self.input_dict = self.backend.recv_pyobj()
# binance for communication with binance website
self.public_key = self.input_dict['public']
self.private_key = self.input_dict['private']
self.binance_client = Client(self.public_key, self.private_key)
# sma lengths and intervals for technical indicators
self.sma_fast_len = self.input_dict['sma_fast_len']
self.sma_slow_len = self.input_dict['sma_slow_len']
self.interval_list = self.input_dict['interval_list']
# user email id for informing about trade details
self.user_email_id = self.input_dict['email']
# 1 for automatic - 0 for semi auto
self.mode = self.input_dict['mode']
self.take_profit = self.input_dict['take_profit']
# all USDT pairs - 20 - symbol_count_1 is for internal use only - to speedup load times
self.symbol_count_1 = int()
self.quote_currency_1 = 'USDT'
self.base_currency_1 = str()
self.symbol_count_2 = int(self.input_dict['symbol_count'])
self.quote_currency_2 = self.input_dict['variable_quote']
self.base_currency_2 = str()
# all TUSD/XXX pairs 3 markets - symbol_count_3 is for internal use only
self.symbol_count_3 = int()
self.quote_currency_3 = str()
self.base_currency_3 = 'TUSD'
self.init_dataframe = pd.DataFrame()
# contains the symbol to full name mappings
self.symbol_to_fullname_dict = {}
# contains symbol details from binance
self.symbol_info_dict = {}
self.coin_list = []
print('getting data from binance and coinbase... (one time loading) (apox 60 seconds - 140 seconds)')
logging.info('getting data from binance and coinbase... (one time loading) (apox 60 seconds - 140 seconds)')
self._get_full_name_data()
self._get_binance_data()
if self.binance_client.get_system_status()['status'] == 0:
self.backend.send_pyobj(0)
else:
self.backend.send_pyobj(1)
logging.critical('Binance down - 1')
exit()
# send this dict through zmq to the GUI - the GUI will update the keys and resend back
self.backend.recv_pyobj()
# # iterate this dict on the GUI to get the names of all pairs involved
self.backend.send_pyobj(self.init_dataframe)
# get back updated dataframe with investments
self.init_dataframe = self.backend.recv_pyobj()
print('Total currencies we will be dealing with')
logging.info('Total currencies we will be dealing with')
with pd.option_context('display.max_rows', None, 'display.max_columns', None):
print(Back.BLUE)
print(self.init_dataframe.reset_index())
print(Style.RESET_ALL)
print('getting historical data from binance for technical analysis... (one time loading) (apox 100 seconds)')
logging.info(
'getting historical data from binance for technical analysis... (one time loading) (apox 100 seconds)')
for symbol, row in self.init_dataframe.iterrows():
logging.info(str(symbol))
try:
coin_object = Coin(symbol=symbol,
backend=self.backend, client=self.binance_client,
mode=self.mode, take_profit=self.take_profit,
sentiment_analyse=SentimentAnalyse(row[0]),
interval_list=self.interval_list,
sma_fast_length=self.sma_fast_len, sma_slow_length=self.sma_slow_len,
user_email=self.user_email_id, symbol_info=row
)
self.coin_list.append(coin_object)
except:
print(symbol, ' not available currently.')
logging.debug('something wrong with {}'.format(symbol))
# send back reply once the data collection is complete
self.backend.send_pyobj('')
while 1:
start_time = time.time()
for coin_object in self.coin_list:
coin_object.monitor()
end_time = time.time() - start_time
print('\n')
time.sleep(max(0, 60 - end_time))
def _get_binance_data(self):
self.init_dataframe = pd.DataFrame(self.binance_client.get_ticker(), columns=['symbol', 'quoteVolume'])
self.init_dataframe.quoteVolume = pd.to_numeric(self.init_dataframe.quoteVolume)
# Adding name and investment columns
self.init_dataframe['name'] = ''
self.init_dataframe['investment'] = float()
self.init_dataframe['base_asset'] = ''
self.init_dataframe['quote_asset'] = ''
self.init_dataframe['base_asset_balance'] = float()
self.init_dataframe['quote_asset_balance'] = float()
self.init_dataframe['min_investment'] = float()
self.init_dataframe['min_qty'] = float()
self.init_dataframe['min_step'] = float()
# filtering selected pairs for trade out of all 398 cryptocurrencies
# filter for all USDT pairs
condition1 = self.init_dataframe.symbol.str[-len(self.quote_currency_1):] == self.quote_currency_1
condition1 = self.init_dataframe.loc[condition1, :]
condition1 = condition1.loc[condition1.quoteVolume > 1, :]
# condition1 = condition1.nlargest(self.symbol_count_1, 'quoteVolume')
# filter all BNB pairs excluding TUSD as base currency and pick top 10 by 24H volume
condition2 = (self.init_dataframe.symbol.str[-len(self.quote_currency_2):] == self.quote_currency_2) & \
(self.init_dataframe.symbol.str[:-len(self.quote_currency_2)] != self.base_currency_3)
condition2 = self.init_dataframe.loc[condition2, :]
condition2 = condition2.nlargest(self.symbol_count_2, 'quoteVolume')
# pick all pairs with base currency as TUSD
condition3 = (self.init_dataframe.symbol.str[:len(self.base_currency_3)] == self.base_currency_3) & \
(self.init_dataframe.symbol.str[len(self.base_currency_3):] != 'USDT')
condition3 = self.init_dataframe.loc[condition3, :]
# condition3 = condition3.nlargest(self.symbol_count_3, 'quoteVolume')
self.init_dataframe = pd.concat([condition1, condition2, condition3])
# make index as name of the symbol to make it behave like a dict-ish
self.init_dataframe = self.init_dataframe[['symbol', 'name', 'investment', 'base_asset', 'base_asset_balance',
'quote_asset', 'quote_asset_balance',
'min_investment',
'min_qty', 'min_step']]
self.init_dataframe = self.init_dataframe.drop_duplicates(subset=['symbol'], keep=False)
self.__get_symbol_info_data()
self.init_dataframe.base_asset = self.init_dataframe.symbol.apply(
lambda x: self.symbol_info_dict[x]['base_asset']
)
self.init_dataframe.base_asset_balance = self.init_dataframe.symbol.apply(
lambda x: self.symbol_info_dict[x]['base_asset_balance']
)
self.init_dataframe.name = self.init_dataframe.base_asset.apply(
lambda x: self._get_symbol_to_full_name(x)
)
self.init_dataframe.min_investment = self.init_dataframe.symbol.apply(
lambda x: self.symbol_info_dict[x]['min_investment']
)
self.init_dataframe.min_qty = self.init_dataframe.symbol.apply(
lambda x: self.symbol_info_dict[x]['min_qty']
)
self.init_dataframe.quote_asset = self.init_dataframe.symbol.apply(
lambda x: self.symbol_info_dict[x]['quote_asset']
)
self.init_dataframe.quote_asset_balance = self.init_dataframe.symbol.apply(
lambda x: self.symbol_info_dict[x]['quote_asset_balance']
)
self.init_dataframe.min_step = self.init_dataframe.symbol.apply(
lambda x: self.symbol_info_dict[x]['min_step']
)
self.init_dataframe.set_index('symbol', inplace=True)
def _get_full_name_data(self):
""" returns a dict which converts crypto symbol to full name from coin market cap """
cmp = Market()
cmp_data = cmp.listings()
for details in cmp_data['data']:
self.symbol_to_fullname_dict[details['symbol']] = details['website_slug']
self.symbol_to_fullname_dict['BCC'] = 'Bitcoin Cash'
self.symbol_to_fullname_dict['IOTA'] = 'IOTA'
def _get_symbol_to_full_name(self, symbol):
try:
return self.symbol_to_fullname_dict[symbol]
except:
return symbol
def __get_symbol_info_data(self):
for index, row in self.init_dataframe.iterrows():
pair = row[0]
symbol_info = self.binance_client.get_symbol_info(pair)
base_asset = symbol_info['baseAsset']
quote_asset = symbol_info['quoteAsset']
min_investment = float(symbol_info['filters'][2]['minNotional'])
min_qty = float(symbol_info['filters'][1]['minQty'])
min_step = float(symbol_info['filters'][1]['stepSize'])
base_asset_balance = float(self.binance_client.get_asset_balance(asset=base_asset)['free'])
quote_asset_balance = float(self.binance_client.get_asset_balance(asset=quote_asset)['free'])
self.symbol_info_dict[pair] = {'base_asset': base_asset, 'quote_asset': quote_asset,
'min_investment': min_investment, 'min_qty': min_qty, 'min_step': min_step,
'base_asset_balance': base_asset_balance,
'quote_asset_balance': quote_asset_balance}
try:
CCTP()
except Exception as bigE:
logging.exception('program crashed {}'.format(bigE))
| [
"smtplib.SMTP_SSL",
"zmq.Context",
"pandas.option_context",
"logging.info",
"colorama.init",
"binance.client.Client",
"logging.critical",
"pandas.DataFrame",
"sentimentAnalyse.SentimentAnalyse",
"coinmarketcap.Market",
"time.time",
"warnings.filterwarnings",
"logging.basicConfig",
"pandas.Series",
"datetime.datetime.fromtimestamp",
"time.strftime",
"os.getcwd",
"pandas.to_numeric",
"pandas.concat"
] | [((341, 359), 'colorama.init', 'init', ([], {'convert': '(True)'}), '(convert=True)\n', (345, 359), False, 'from colorama import init, Fore, Back, Style\n'), ((361, 394), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (384, 394), False, 'import warnings\n'), ((493, 604), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': 'log_file', 'level': 'logging.INFO', 'format': '"""%(asctime)s:%(levelname)s:%(message)s"""'}), "(filename=log_file, level=logging.INFO, format=\n '%(asctime)s:%(levelname)s:%(message)s')\n", (512, 604), False, 'import logging\n'), ((419, 430), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (428, 430), False, 'import os\n'), ((1701, 1735), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""'], {}), "('%Y-%m-%d %H:%M:%S')\n", (1714, 1735), False, 'import time\n'), ((6428, 6462), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""'], {}), "('%Y-%m-%d %H:%M:%S')\n", (6441, 6462), False, 'import time\n'), ((17214, 17227), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (17225, 17227), False, 'import zmq\n'), ((17621, 17662), 'binance.client.Client', 'Client', (['self.public_key', 'self.private_key'], {}), '(self.public_key, self.private_key)\n', (17627, 17662), False, 'from binance.client import Client\n'), ((18769, 18783), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (18781, 18783), True, 'import pandas as pd\n'), ((19106, 19224), 'logging.info', 'logging.info', (['"""getting data from binance and coinbase... (one time loading) (apox 60 seconds - 140 seconds)"""'], {}), "(\n 'getting data from binance and coinbase... (one time loading) (apox 60 seconds - 140 seconds)'\n )\n", (19118, 19224), False, 'import logging\n'), ((19952, 20008), 'logging.info', 'logging.info', (['"""Total currencies we will be dealing with"""'], {}), "('Total currencies we will be dealing with')\n", (19964, 20008), False, 'import logging\n'), ((20340, 20466), 'logging.info', 'logging.info', (['"""getting historical data from binance for technical analysis... (one time loading) (apox 100 seconds)"""'], {}), "(\n 'getting historical data from binance for technical analysis... (one time loading) (apox 100 seconds)'\n )\n", (20352, 20466), False, 'import logging\n'), ((21924, 21970), 'pandas.to_numeric', 'pd.to_numeric', (['self.init_dataframe.quoteVolume'], {}), '(self.init_dataframe.quoteVolume)\n', (21937, 21970), True, 'import pandas as pd\n'), ((23786, 23833), 'pandas.concat', 'pd.concat', (['[condition1, condition2, condition3]'], {}), '([condition1, condition2, condition3])\n', (23795, 23833), True, 'import pandas as pd\n'), ((25802, 25810), 'coinmarketcap.Market', 'Market', ([], {}), '()\n', (25808, 25810), False, 'from coinmarketcap import Market\n'), ((444, 472), 'time.strftime', 'time.strftime', (['"""%Y %m %d %H"""'], {}), "('%Y %m %d %H')\n", (457, 472), False, 'import time\n'), ((3039, 3053), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (3051, 3053), True, 'import pandas as pd\n'), ((6257, 6290), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['(gt / 1000)'], {}), '(gt / 1000)\n', (6279, 6290), False, 'from datetime import datetime\n'), ((16374, 16413), 'smtplib.SMTP_SSL', 'smtplib.SMTP_SSL', (['"""smtp.gmail.com"""', '(465)'], {}), "('smtp.gmail.com', 465)\n", (16390, 16413), False, 'import smtplib\n'), ((16644, 16674), 'logging.critical', 'logging.critical', (['"""email sent"""'], {}), "('email sent')\n", (16660, 16674), False, 'import logging\n'), ((19455, 19491), 'logging.critical', 'logging.critical', (['"""Binance down - 1"""'], {}), "('Binance down - 1')\n", (19471, 19491), False, 'import logging\n'), ((20022, 20094), 'pandas.option_context', 'pd.option_context', (['"""display.max_rows"""', 'None', '"""display.max_columns"""', 'None'], {}), "('display.max_rows', None, 'display.max_columns', None)\n", (20039, 20094), True, 'import pandas as pd\n'), ((21518, 21529), 'time.time', 'time.time', ([], {}), '()\n', (21527, 21529), False, 'import time\n'), ((8233, 8308), 'pandas.Series', 'pd.Series', ([], {'data': '[latest_time, latest_price, 0, 0]', 'index': 'data_frame.columns'}), '(data=[latest_time, latest_price, 0, 0], index=data_frame.columns)\n', (8242, 8308), True, 'import pandas as pd\n'), ((9985, 10061), 'logging.info', 'logging.info', (['"""trade was placed earlier, waiting for take profit to sell..."""'], {}), "('trade was placed earlier, waiting for take profit to sell...')\n", (9997, 10061), False, 'import logging\n'), ((21640, 21651), 'time.time', 'time.time', ([], {}), '()\n', (21649, 21651), False, 'import time\n'), ((6781, 6898), 'logging.critical', 'logging.critical', (['"""binance rate limit reached - could not retrieve quote, base, and current price balances"""'], {}), "(\n 'binance rate limit reached - could not retrieve quote, base, and current price balances'\n )\n", (6797, 6898), False, 'import logging\n'), ((7944, 8049), 'logging.critical', 'logging.critical', (['"""binance rate limit reached most probably - could not retrieve latest candle"""'], {}), "(\n 'binance rate limit reached most probably - could not retrieve latest candle'\n )\n", (7960, 8049), False, 'import logging\n'), ((20853, 20877), 'sentimentAnalyse.SentimentAnalyse', 'SentimentAnalyse', (['row[0]'], {}), '(row[0])\n', (20869, 20877), False, 'from sentimentAnalyse import SentimentAnalyse\n')] |
# !/usr/bin/env python3
import math
import logging
import csv
import time
from planet import Direction
logger = logging.getLogger('Odometry')
class Odometry:
def __init__(self, robo):
"""
Initializes odometry module
"""
self.robot = robo
self.distance_per_tick = (5.5 * math.pi) / self.robot.m_left.count_per_rot
self.wheelbase = 15 # min(12.3) max(17.4)
self.dataList = []
self.rot = 0
self.pos = [0,0]
self.direction = Direction.NORTH
self.firstNode = True
self.oldNode = [1, 1]
self.currentNode = [1, 1]
self.fromDirection = Direction.NORTH
self.toDirection = Direction.NORTH
def addData(self, left, right):
"""
adds left and right motor position to dataList
:param int: left motor position
:param int: right motor position
"""
d = [left, right]
self.dataList.append(d)
def calc(self, color):
self.oldNode = self.currentNode
# first node was set by mothership
if self.firstNode:
# self.robot.comm.set_testplanet(self.robot.comm._planetName)
self.robot.comm.send_ready()
time.sleep(3)
return
prev = self.dataList[0]
gamma = self.rot
delta_x = 0
delta_y = 0
for el in self.dataList:
dist_left = self.motorPosToDist(prev[0], el[0])
dist_right = self.motorPosToDist(prev[1], el[1])
alpha = (dist_right - dist_left) / self.wheelbase
beta = alpha / 2
if alpha != 0:
s = ((dist_right + dist_left) / alpha) * math.sin(beta)
else:
s = dist_left
dx = math.sin(gamma + beta) * s
dy = math.cos(gamma + beta) * s
gamma -= alpha
delta_x += dx
delta_y += dy
prev = el
self.pos[0] += delta_x
self.pos[1] += delta_y
self.dataList.clear()
self.rot = gamma
self.rot = self.normAngleRad(self.rot)
self.rot = self.roundRotation()
self.direction = self.angleToDirection(self.rot)
self.pos[0], self.pos[1] = self.roundPos()
x, y = self.getNodeCoord()
self.currentNode = [x, y]
# send path to mothership
e = self.currentNode.copy()
self.toDirection = self.oppositeDirection(self.direction)
endNode = [e[0], e[1], self.toDirection]
s = self.oldNode.copy()
startNode = [s[0], s[1], self.fromDirection]
logger.debug('Start Node: ')
logger.debug(startNode)
logger.debug('End Node: ')
logger.debug(endNode)
self.robot.comm.sendPath(startNode, endNode, "free")
def radToDeg(self, rad):
"""
converts an angle from radians to degrees
:param float: angle (rad)
:return float: angle (deg)
"""
deg = math.degrees(rad)
return deg
def degToRad(self, deg):
"""
converts an angle from degrees to radians
:param float: angle (deg)
:return float: angle(rad)
"""
rad = math.radians(deg)
return rad
def addOffset(self, offset):
"""
adds an offset to the current view
:param float: angle in degrees
"""
self.rot += self.degToRad(offset)
self.rot = self.normAngleRad(self.rot)
self.direction = self.angleToDirection(self.rot)
def normAngleRad(self, angle):
"""
normalizes an angle (0/2*pi)
:param float: angle
:return void
"""
angle = angle % (2*math.pi)
return angle
def normAngleDeg(self, angle):
"""
normalizes an angle(0/360)
:param float: angle
:return void
"""
#while(angle <= -180):
# angle += 360
#while(angle > 180):
# angle -= 360
angle = angle % 360
return angle
def currentDirection(self):
"""
returns the current direction
:return Direction
"""
return self.angleToDirection(self.rot)
def angleToDirection(self, angle):
"""
returns a directions from an angle
:param float: angle
:return Direction
"""
angle = angle % (2*math.pi)
if angle <= (math.pi * 1/4) or angle > (math.pi * 7/4):
return Direction.NORTH
elif angle >= (math.pi * 1/4) and angle < (math.pi * 3/4):
return Direction.EAST
elif angle >= (math.pi * 3/4) and angle < (math.pi * 5/4):
return Direction.SOUTH
elif angle >= (math.pi * 5/4) and angle < (math.pi * 7/4):
return Direction.WEST
def directionToAngle(self, dir: Direction):
if dir == Direction.NORTH:
return 0
elif dir == Direction.EAST:
return 90
elif dir == Direction.SOUTH:
return 180
elif dir == Direction.WEST:
return 270
def directionToRadian(self, dir: Direction):
if dir == Direction.NORTH:
return 0
elif dir == Direction.EAST:
return (1/2 * math.pi)
elif dir == Direction.SOUTH:
return math.pi
elif dir == Direction.WEST:
return (6/4 * math.pi)
def motorPosToDist(self, start, end):
"""
converts two motor positions to a distance
:param int: start
:param int: end
:return float: distance
"""
dist = (end-start) * self.distance_per_tick
return dist
def roundPos(self):
"""
rounds the current position to n*50
:return int, int
"""
return (round(self.pos[0]/50) * 50), (round(self.pos[1]/50) * 50)
def getNodeCoord(self):
"""
returns the current position in node coordinates
return int, int
"""
posX, posY = self.roundPos()
return (posX//50), (posY//50)
def roundRotation(self):
"""
rounds the current rotation
:return float
"""
deg = self.radToDeg(self.rot)
deg = round(deg / 90) * 90
return self.degToRad(deg)
def updateRobo(self, posX, posY, direction):
self.pos[0] = posX * 50
self.pos[1] = posY * 50
self.direction = self.oppositeDirection(Direction(direction))
self.toDirection = self.direction
self.currentNode = [posX, posY]
self.rot = self.directionToRadian(self.direction)
logger.debug('Updated Robo: ' + str(self.pos[0]) + '/' + str(self.pos[1]) + ' ' + str(self.direction))
def setupRobo(self, posX, posY, startDirection):
self.pos[0] = posX * 50
self.pos[1] = posY * 50
self.direction = Direction(startDirection)
self.oldNode = [posX, posY]
self.currentNode = [posX, posY]
logger.debug('Robot was set up...')
def oppositeDirection(self, dir: Direction):
if dir == Direction.NORTH:
return Direction.SOUTH
elif dir == Direction.SOUTH:
return Direction.NORTH
elif dir == Direction.WEST:
return Direction.EAST
elif dir == Direction.EAST:
return Direction.WEST | [
"logging.getLogger",
"math.degrees",
"time.sleep",
"math.radians",
"math.cos",
"planet.Direction",
"math.sin"
] | [((114, 143), 'logging.getLogger', 'logging.getLogger', (['"""Odometry"""'], {}), "('Odometry')\n", (131, 143), False, 'import logging\n'), ((3052, 3069), 'math.degrees', 'math.degrees', (['rad'], {}), '(rad)\n', (3064, 3069), False, 'import math\n'), ((3275, 3292), 'math.radians', 'math.radians', (['deg'], {}), '(deg)\n', (3287, 3292), False, 'import math\n'), ((6955, 6980), 'planet.Direction', 'Direction', (['startDirection'], {}), '(startDirection)\n', (6964, 6980), False, 'from planet import Direction\n'), ((1239, 1252), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1249, 1252), False, 'import time\n'), ((6538, 6558), 'planet.Direction', 'Direction', (['direction'], {}), '(direction)\n', (6547, 6558), False, 'from planet import Direction\n'), ((1816, 1838), 'math.sin', 'math.sin', (['(gamma + beta)'], {}), '(gamma + beta)\n', (1824, 1838), False, 'import math\n'), ((1860, 1882), 'math.cos', 'math.cos', (['(gamma + beta)'], {}), '(gamma + beta)\n', (1868, 1882), False, 'import math\n'), ((1703, 1717), 'math.sin', 'math.sin', (['beta'], {}), '(beta)\n', (1711, 1717), False, 'import math\n')] |
from flask import request
from app.models import User, UserType
from flask_restful import Resource
from app.requests.users import PostRequest, PutRequest
from app.middlewares.validation import validate
from app.middlewares.auth import user_auth, admin_auth
from app.utils import decoded_qs
class UserResource(Resource):
@admin_auth
def get(self, user_id):
# exists? ...
user = User.query.get(user_id)
if not user:
return {
'success': False,
'message': 'User not found.',
}, 404
fields = decoded_qs()
if fields and fields.get('fields') is not None:
fields = fields.get('fields').split(',')
return {
'success': True,
'message': 'User successfully retrieved.',
'user': user.to_dict(fields=fields)
}
@admin_auth
@validate(PutRequest)
def put(self, user_id):
# check exists? ...
user = User.query.get(user_id)
if not user:
return {
'success': False,
'message': 'User not found',
}, 404
fields = decoded_qs()
if fields and fields.get('fields') is not None:
fields = fields.get('fields').split(',')
role = request.json.get('role')
if role and role == UserType.SUPER_ADMIN:
return {
'success': False,
'message': 'Only one super admin is allowed',
}, 400
# now update...
user.update(request.json)
return {
'success': True,
'message': 'User successfully updated.',
'user': user.to_dict(fields=fields)
}
@admin_auth
def delete(self, user_id):
# exists? ...
user = User.query.get(user_id)
if not user:
return {
'success': False,
'message': 'User not found.',
}, 404
if user.is_super_admin():
return {
'success': False,
'message': 'You cannot delete this users account.',
}, 401
user.delete()
return {
'success': True,
'message': 'User successfully deleted.',
}
class UserListResource(Resource):
@admin_auth
def get(self):
resp = User.paginate(
filters=decoded_qs(),
name='users'
)
resp['message'] = 'Successfully retrieved users.'
resp['success'] = True
return resp
@admin_auth
@validate(PostRequest)
def post(self):
user = User.create(request.json)
return {
'success': True,
'message': 'Successfully saved user.',
'user': user.to_dict()
}, 201
| [
"app.utils.decoded_qs",
"app.middlewares.validation.validate",
"app.models.User.create",
"flask.request.json.get",
"app.models.User.query.get"
] | [((891, 911), 'app.middlewares.validation.validate', 'validate', (['PutRequest'], {}), '(PutRequest)\n', (899, 911), False, 'from app.middlewares.validation import validate\n'), ((2604, 2625), 'app.middlewares.validation.validate', 'validate', (['PostRequest'], {}), '(PostRequest)\n', (2612, 2625), False, 'from app.middlewares.validation import validate\n'), ((403, 426), 'app.models.User.query.get', 'User.query.get', (['user_id'], {}), '(user_id)\n', (417, 426), False, 'from app.models import User, UserType\n'), ((587, 599), 'app.utils.decoded_qs', 'decoded_qs', ([], {}), '()\n', (597, 599), False, 'from app.utils import decoded_qs\n'), ((983, 1006), 'app.models.User.query.get', 'User.query.get', (['user_id'], {}), '(user_id)\n', (997, 1006), False, 'from app.models import User, UserType\n'), ((1165, 1177), 'app.utils.decoded_qs', 'decoded_qs', ([], {}), '()\n', (1175, 1177), False, 'from app.utils import decoded_qs\n'), ((1304, 1328), 'flask.request.json.get', 'request.json.get', (['"""role"""'], {}), "('role')\n", (1320, 1328), False, 'from flask import request\n'), ((1829, 1852), 'app.models.User.query.get', 'User.query.get', (['user_id'], {}), '(user_id)\n', (1843, 1852), False, 'from app.models import User, UserType\n'), ((2661, 2686), 'app.models.User.create', 'User.create', (['request.json'], {}), '(request.json)\n', (2672, 2686), False, 'from app.models import User, UserType\n'), ((2424, 2436), 'app.utils.decoded_qs', 'decoded_qs', ([], {}), '()\n', (2434, 2436), False, 'from app.utils import decoded_qs\n')] |
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
FileStudyTreeConfig,
)
from antarest.study.storage.rawstudy.model.filesystem.folder_node import (
FolderNode,
)
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
from antarest.study.storage.rawstudy.model.filesystem.root.desktop import (
Desktop,
)
from antarest.study.storage.rawstudy.model.filesystem.root.input.input import (
Input,
)
from antarest.study.storage.rawstudy.model.filesystem.root.layers.layers import (
Layers,
)
from antarest.study.storage.rawstudy.model.filesystem.root.logs import Logs
from antarest.study.storage.rawstudy.model.filesystem.root.output.output import (
Output,
)
from antarest.study.storage.rawstudy.model.filesystem.root.settings.settings import (
Settings,
)
from antarest.study.storage.rawstudy.model.filesystem.root.study_antares import (
StudyAntares,
)
from antarest.study.storage.rawstudy.model.filesystem.root.user import User
class FileStudyTree(FolderNode):
"""
Top level node of antares tree structure
"""
def build(self) -> TREE:
children: TREE = {
"Desktop": Desktop(
self.context, self.config.next_file("Desktop.ini")
),
"study": StudyAntares(
self.context, self.config.next_file("study.antares")
),
"settings": Settings(
self.context, self.config.next_file("settings")
),
"layers": Layers(self.context, self.config.next_file("layers")),
"logs": Logs(self.context, self.config.next_file("logs")),
"input": Input(self.context, self.config.next_file("input")),
"user": User(self.context, self.config.next_file("user")),
}
if self.config.outputs:
output_config = self.config.next_file("output")
output_config.path = self.config.output_path or output_config.path
children["output"] = Output(self.context, output_config)
return children
| [
"antarest.study.storage.rawstudy.model.filesystem.root.output.output.Output"
] | [((2007, 2042), 'antarest.study.storage.rawstudy.model.filesystem.root.output.output.Output', 'Output', (['self.context', 'output_config'], {}), '(self.context, output_config)\n', (2013, 2042), False, 'from antarest.study.storage.rawstudy.model.filesystem.root.output.output import Output\n')] |
import asyncio
import navilog
class TaskScheduler:
def __init__(self, bot):
"""Componente composto de um dicionário de rotinas, capaz de agendar uma rotina para ser executada de acordo com seu intervalo.
Args:
bot (NaviBot): A instância do bot em questão.
"""
self._tasks = {}
self._bot = bot
def schedule(self, task, key=None, append=False):
"""Recebe uma tarefa para ser agendada, ou seja, rodar em um loop a cada intervalo de tempo determinado.
Args:
task (NaviRoutine): A rotina a ser rodada constantemente.
key (str, optional): Chave que identifica qual o conjunto de rotinas, caso omitida, será utilizado task.name.
append (bool, optional): Permitir mais de única rotina em um conjunto de rotinas.
"""
# Se a chave não for informada, utilizar o próprio nome da tarefa
if key is None:
key = task.name
if not key in self._tasks:
self._tasks[key] = [task]
else:
# Define que, caso append seja falso, apenas inclua uma única tarefa por chave
if len(self._tasks[key]) == 0 or append:
self._tasks[key].append(task)
else:
self._bot.log.write(f"A tarefa '{task.name}' foi solicitada, porém já existe", logtype=navilog.WARNING)
return
task.running_task = asyncio.get_running_loop().create_task(self._loopTask(task, key))
async def _loopTask(self, task, key):
"""Procedimento utilizado para continuar executando em loop a tarefa/rotina.
Args:
task (NaviRoutine): A rotina a ser rodada constantemente.
key (str): Chave pertencente.
"""
try:
segundos = task.get_timespan_seconds()
while task.enabled:
task.running = True
await asyncio.sleep(segundos - task.get_timespent())
if task.enabled:
await task.run(self._bot)
if task.get_timespent() >= segundos:
self._bot.log.write(f"Perdido um ciclo de execução da tarefa '{task.name}', timespent={task.get_timespent():.3f}, timespan={segundos}s", logtype=navilog.WARNING)
task.running = False
except asyncio.CancelledError:
self._bot.log.write(f"Cancelado a tarefa '{task.name}'", logtype=navilog.WARNING)
task.running_task = None
finally:
# Tenta retirar do cojunto de rotinas.
self.cancel(task, key)
def cancel(self, task, key=None):
"""Pede o cancelamento da tarefa, caso esteja em execução e também retira do conjunto de tarefas pertencente.
Args:
task (NaviRoutine): A rotina a ser cancelada.
key (str, optional): A chave que representa o conjunto, caso omitida, será utilizado por padrão task.name.
"""
if key is None:
key = task.name
if key in self._tasks.keys():
# Caso esteja já desabilitada, é só uma questão de tempo até ser cancelada, não faça nada.
if task.running_task != None and task.enabled:
try:
task.running_task.cancel()
except asyncio.CancelledError:
self._bot.log.write(f"Ignorando cancelamento da tarefa '{task.name}' pois a mesma já foi cancelada", logtype=navilog.WARNING)
try:
self._tasks[key].remove(task)
except ValueError:
# Não está na lista
pass
def get(self, key):
"""Retorna a lista de tarefas presente em uma chave.
Args:
key (str): A chave presentando o conjunto.
Returns:
list(NaviRoutine), None: Uma lista de rotinas, caso não exista nenhuma na determinada chave, retorna None.
"""
try:
return self._tasks[key]
except KeyError:
return None
def get_all_keys(self):
"""Retorna todas as chaves existentes.
Returns:
list(str): Lista de chaves presentes.
"""
return self._tasks.keys() | [
"asyncio.get_running_loop"
] | [((1282, 1308), 'asyncio.get_running_loop', 'asyncio.get_running_loop', ([], {}), '()\n', (1306, 1308), False, 'import asyncio\n')] |
import pandas as pd
from tl.exceptions import RequiredInputParameterMissingException, UnsupportTypeError
from tl.file_formats_validator import FFV
def get_kg_links(score_column, file_path=None, df=None, label_column='label', top_k=5, k_rows=False):
if file_path is None and df is None:
raise RequiredInputParameterMissingException(
'One of the input parameters is required: {} or {}'.format("file_path", "df"))
if score_column is None:
raise RequiredInputParameterMissingException(
'One of the input parameters is required: {}'.format('score_column'))
if file_path:
df = pd.read_csv(file_path, dtype=object)
df[score_column].fillna(0.0, inplace=True)
df.fillna("", inplace=True)
df = df.astype(dtype={score_column: "float64"})
ffv = FFV()
if not (ffv.is_candidates_file(df)):
raise UnsupportTypeError("The input file is not a candidate file!")
topk_df = df.groupby(['column', 'row']).apply(lambda x: x.sort_values([score_column], ascending=False)) \
.reset_index(drop=True)
is_gt_present = 'evaluation_label' in df.columns
final_list = []
grouped_obj = topk_df.groupby(['column', 'row'])
for key, grouped in grouped_obj:
grouped['rank'] = list(grouped[score_column].rank(method='first', ascending=False).astype(int))
grouped.drop_duplicates(subset='kg_id', inplace=True)
new_top_k = top_k
gt_rank = -1
if is_gt_present:
gt_rank_values = grouped[grouped['evaluation_label'].astype(int) == 1]['rank'].values
if len(gt_rank_values) > 0:
gt_rank = gt_rank_values[0]
if gt_rank > top_k:
new_top_k -= 1
if not (k_rows):
_ = {}
kg_ids = list(grouped['kg_id'])[:new_top_k]
kg_labels = list(grouped['kg_labels'])[:new_top_k]
kg_descriptions = list(grouped['kg_descriptions'])[:new_top_k]
kg_aliases = list(grouped['kg_aliases'])[:new_top_k]
scores = [str(score) for score in list(grouped[score_column])[:new_top_k]]
if gt_rank > top_k:
kg_ids.extend(list(grouped['kg_id'])[gt_rank])
kg_labels.extend(list(grouped['kg_labels'])[gt_rank])
kg_descriptions.extend(list(grouped['kg_descriptions'])[gt_rank])
kg_aliases.extend(list(grouped['kg_aliases'])[gt_rank])
scores.append(str(list(grouped[score_column])[gt_rank]))
_['column'] = key[0]
_['row'] = key[1]
_['label'] = grouped[label_column].unique()[0]
_['kg_id'] = '|'.join(kg_ids)
_['kg_labels'] = '|'.join(kg_labels)
_['kg_descriptions'] = '|'.join(kg_descriptions)
_['kg_aliases'] = '|'.join(kg_aliases)
_['ranking_score'] = '|'.join(scores)
final_list.append(_)
else:
if gt_rank > top_k:
topk_df_row = pd.concat([grouped.head(new_top_k), grouped[grouped['rank'] == gt_rank]])
else:
topk_df_row = grouped.head(new_top_k)
final_list.extend(topk_df_row.to_dict(orient='records'))
odf = pd.DataFrame(final_list)
return odf
| [
"pandas.DataFrame",
"tl.exceptions.UnsupportTypeError",
"tl.file_formats_validator.FFV",
"pandas.read_csv"
] | [((813, 818), 'tl.file_formats_validator.FFV', 'FFV', ([], {}), '()\n', (816, 818), False, 'from tl.file_formats_validator import FFV\n'), ((3225, 3249), 'pandas.DataFrame', 'pd.DataFrame', (['final_list'], {}), '(final_list)\n', (3237, 3249), True, 'import pandas as pd\n'), ((635, 671), 'pandas.read_csv', 'pd.read_csv', (['file_path'], {'dtype': 'object'}), '(file_path, dtype=object)\n', (646, 671), True, 'import pandas as pd\n'), ((874, 935), 'tl.exceptions.UnsupportTypeError', 'UnsupportTypeError', (['"""The input file is not a candidate file!"""'], {}), "('The input file is not a candidate file!')\n", (892, 935), False, 'from tl.exceptions import RequiredInputParameterMissingException, UnsupportTypeError\n')] |
"""
This file is borrowed directly from allennlp
"""
import logging
logger = logging.getLogger(__name__)
class ConfigurationError(Exception):
"""
The exception is raised by any mv object when it's misconfigured
"""
def __init__(self, message):
super(ConfigurationError, self).__init__()
self.message = message
def __str__(self):
return repr(self.message)
def log_pytorch_version_info(): #
import torch
logger.info("Pytorch version is: {}".format(torch.__version__))
def check_dimensions_match(dim_1: int,
dim_2: int,
dim_1_name: str,
dim_2_name: str) -> None:
if dim_1 != dim_2:
raise ConfigurationError("{} must match {}, but got {} and {} instead".format(dim_1_name, dim_2_name,
dim_1, dim_2))
| [
"logging.getLogger"
] | [((79, 106), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (96, 106), False, 'import logging\n')] |
# -*- coding: utf-8 -*-
import scrapy
class LoginSpider(scrapy.Spider):
name = 'login'
allowed_domains = ['webscraping.com']
start_urls = ['http://example.webscraping.com/places/default/user/login']
def parse(self, response):
return scrapy.FormRequest.from_response(
response,
formdata={
'email': '<EMAIL>',
'password': '<PASSWORD>'
},
callback=self.after_login
)
def after_login(self, response):
# check login succeed before going on
if response.css('div#pagination').get():
self.logger.info("Login Success!")
return
| [
"scrapy.FormRequest.from_response"
] | [((264, 394), 'scrapy.FormRequest.from_response', 'scrapy.FormRequest.from_response', (['response'], {'formdata': "{'email': '<EMAIL>', 'password': '<PASSWORD>'}", 'callback': 'self.after_login'}), "(response, formdata={'email': '<EMAIL>',\n 'password': '<PASSWORD>'}, callback=self.after_login)\n", (296, 394), False, 'import scrapy\n')] |
import glob
import logging
import os
import shutil
import subprocess
import sys
import unittest
import time
from os import chdir as cd
from os import mkdir
TESTDIR='/tmp/git-bzr-test'
BZRBRANCHNAME='bzrtest'
BZRBRANCH='%s/%s' % (TESTDIR, BZRBRANCHNAME)
ROOTDIR=os.path.dirname(os.path.dirname(__file__))
VENDOR=os.path.join(ROOTDIR, 'vendor')
GITBZR=os.path.join(ROOTDIR, 'git-bzr')
PYFASTIMPORT=os.path.join(VENDOR, 'python-fastimport')
PLUGINDIR=os.path.join(VENDOR, 'plugins')
BZRFASTIMPORT=os.path.join(PLUGINDIR, 'fastimport')
BZRFASTIMPORT_STABLE=os.path.join(VENDOR, 'fastimport_stable')
BZRFASTIMPORT_STABLE_TARBALL=os.path.join(VENDOR, 'bzr-fastimport-0.10.0')
BZRFASTIMPORT_HEAD=os.path.join(VENDOR, 'fastimport_head')
BZRPATH = os.path.join(VENDOR, 'bzr-%s')
BZR = os.path.join(VENDOR, 'bzr')
VERSIONS = [
('2.2', '2.2.0'),
('2.2', '2.2.1'),
('2.2', '2.2.2'),
('2.2', '2.2.3'),
('2.2', '2.2.4'),
('2.3', '2.3.0'),
('2.3', '2.3.1')
]
# Set a timestamp at load time so that we can memoize our setup step
TIMESTAMP = time.time()
# From python 2.7
def check_output(*popenargs, **kwargs):
r"""Run command with arguments and return its output as a byte string.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
The arguments are the same as for the Popen constructor. Example:
>>> check_output(["ls", "-l", "/dev/null"])
'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use stderr=STDOUT.
>>> check_output(["/bin/sh", "-c",
... "ls -l non_existent_file ; exit 0"],
... stderr=STDOUT)
'ls: non_existent_file: No such file or directory\n'
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
logging.debug(' '.join(popenargs[0]))
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd)
return output
def bzr(*args):
return check_output(['bzr'] + list(args))
def git(*args):
return check_output(['git'] + list(args))
def gitbzr(*args):
return check_output([GITBZR] + list(args))
def rmdir(path):
try:
shutil.rmtree(path)
except Exception:
pass
class SetupVendorOnly(object):
BZR = BZRPATH % '2.3.1'
BZRFASTIMPORT = BZRFASTIMPORT_STABLE
def setup_vendor(self):
logging.getLogger().setLevel(logging.INFO)
self._ensure_checkouts()
self._symlink_plugin()
def _symlink_plugin(self):
try:
os.unlink(BZRFASTIMPORT)
except Exception:
pass
os.symlink(self.BZRFASTIMPORT, BZRFASTIMPORT)
def _symlink_bzr(self, force=None):
try:
os.unlink(BZR)
except Exception:
pass
path = force and force or self.BZR
os.symlink(path, BZR)
def _ensure_checkouts(self):
exec_path = ('PATH' in os.environ
and os.environ['PATH']
or '')
if not exec_path.startswith(BZR):
os.environ['PATH'] = '%s:%s' % (BZR, exec_path)
download_url = 'http://launchpad.net/bzr/%s/%s/+download/bzr-%s.tar.gz'
tarball = 'bzr-%s.tar.gz'
for v in VERSIONS:
if not os.path.exists(BZRPATH % v[1]):
logging.info('Downloading %s', download_url % (v[0], v[1], v[1]))
cd(VENDOR)
check_output(['curl', '-O', '-L',
download_url % (v[0], v[1], v[1])
])
check_output(['tar', '-xzf', tarball % v[1]])
# we need a functional bzr on our path to get anything else
self._symlink_bzr(BZRPATH % '2.3.1')
bzr_head = BZRPATH % 'head'
if not os.path.exists(bzr_head):
logging.info('Getting HEAD of bzr')
cd(VENDOR)
bzr('branch', 'lp:bzr', BZRPATH % 'head')
if not os.path.exists(PYFASTIMPORT):
logging.info('Getting a HEAD of python-fastimport')
cd(VENDOR)
bzr('branch', 'lp:python-fastimport')
if not os.path.exists(PLUGINDIR):
os.mkdir(PLUGINDIR)
if not os.path.exists(BZRFASTIMPORT_STABLE):
logging.info('Getting revision 307 of bzr-fastimport')
cd(VENDOR)
bzr('branch', 'lp:bzr-fastimport', '-r', '307', BZRFASTIMPORT_STABLE)
if not os.path.exists(BZRFASTIMPORT_HEAD):
logging.info('Getting HEAD of bzr-fastimport')
cd(VENDOR)
bzr('branch', 'lp:bzr-fastimport', BZRFASTIMPORT_HEAD)
if not os.path.exists(BZRFASTIMPORT_STABLE_TARBALL):
logging.info('Downloading bzr-fastimport version 0.10')
cd(VENDOR)
check_output(['curl', '-O', '-L',
'http://launchpad.net/bzr-fastimport/trunk/'
'0.10.0/+download/bzr-fastimport-0.10.0.tar.gz'
])
check_output(['tar', '-xzf', 'bzr-fastimport-0.10.0.tar.gz'])
python_path = ('PYTHONPATH' in os.environ
and os.environ['PYTHONPATH']
or '')
if not python_path.startswith(PYFASTIMPORT):
os.environ['PYTHONPATH'] = '%s:%s' % (PYFASTIMPORT, BZR)
os.environ['BZR_PLUGIN_PATH'] = PLUGINDIR
os.environ['BZR_PDB'] = '1'
class GitBzrTest(SetupVendorOnly, unittest.TestCase):
BZR = BZRPATH % '2.3.1'
BZRFASTIMPORT = BZRFASTIMPORT_STABLE
def setUp(self):
#SetupVendorOnly.setUp(self)
self._ensure_checkouts()
self._symlink_plugin()
self._setup_bzr_branches()
def tearDown(self):
pass
def _setup_bzr_branches(self):
memo = '%s_%s_%s' % (TESTDIR, self.__class__.__name__, TIMESTAMP)
if os.path.exists(memo):
rmdir(TESTDIR)
shutil.copytree(memo, TESTDIR)
else:
# make a bzr branch to interact with
rmdir(TESTDIR)
mkdir(TESTDIR)
cd(TESTDIR)
bzr('init', BZRBRANCH)
cd(BZRBRANCH)
open('touch.txt', 'w').write('touch')
bzr('add', '-v', 'touch.txt')
bzr('commit', '-v', '-m', 'touch test')
open('touch2.txt', 'w').write('touch2')
bzr('add', 'touch2.txt')
bzr('commit', '-m', 'touch2 test')
bzr('tag', 'some_tag')
# make another branch to test import later
cd(TESTDIR)
bzr('branch', BZRBRANCH, '%s_imported' % BZRBRANCH)
# make a default clone
cd(TESTDIR)
gitbzr('clone', BZRBRANCH, '%s_cloned' % BZRBRANCHNAME)
# clear old memos and copy it to our memo
old_memo_glob = '%s_%s_*' % (TESTDIR, self.__class__.__name__)
old_memos = glob.iglob(old_memo_glob)
for path in old_memos:
shutil.rmtree(path)
shutil.copytree(TESTDIR, memo)
def test_all(self):
"""Test most of the functionality.
This test is a bit large, it is ported directly from a shell script.
"""
# TEST: clone with git-bzr-ng
# it should guess the name correctly but notice that the directory already
# exists and failed
cd(TESTDIR)
self.assertRaises(subprocess.CalledProcessError,
gitbzr, 'clone', BZRBRANCH)
# TEST: clone it again with a better name
gitbzr('clone', BZRBRANCH, '%s_git' % BZRBRANCHNAME)
# Check for the branches we want
cd('%s_git' % BZRBRANCH)
branches = git('branch', '-a')
if 'bzr/master' not in branches:
self.fail('no bzr/master branch')
if '* master' not in branches:
self.fail('not on master branch')
# Check for files we expect
self.assertEqual('touch', open('touch.txt').read())
# push to a new branch
git('checkout', '-b', 'pushed')
open('touch2.txt', 'w').write('touch3')
git('add', 'touch2.txt')
git('commit', '-m', 'touch3 test')
gitbzr('push', '%s_pushed' % BZRBRANCH)
# do it again
open('touch2.txt', 'w').write('touch4')
git('add', 'touch2.txt')
git('commit', '-m', 'touch4 test')
gitbzr('push')
# update the bzr branch and sync the changes
# that bzr repo is not a working tree repo so we need to branch it in bzr
# and then push the changes back
cd(TESTDIR)
bzr('branch', '%s_pushed' % BZRBRANCH, '%s_branched' % BZRBRANCH)
cd('%s_branched' % BZRBRANCH)
open('touch2.txt', 'w').write('touch5')
bzr('commit', '-m', 'touch5')
bzr('push', '%s_pushed' % BZRBRANCH)
cd('%s_git' % BZRBRANCH)
gitbzr('sync')
# try to push again from git, should fail because we have not merged the
# changes
self.assertEquals('touch4', open('touch2.txt').read())
self.assertRaises(subprocess.CalledProcessError, gitbzr, 'push')
# this one should fail since there is nothing to commit
git('pull', '.', '--', 'bzr/pushed')
self.assertEquals('touch5', open('touch2.txt').read())
self.assertRaises(subprocess.CalledProcessError, gitbzr, 'push')
# edit a file and try to push
open('touch2.txt', 'w').write('touch6')
git('add', 'touch2.txt')
git('commit', '-m', 'touch6')
gitbzr('push')
# pull in our bzr branch and make sure we get the change
cd('%s_branched' % BZRBRANCH)
bzr('pull')
self.assertEquals('touch6', open('touch2.txt').read())
# TEST: import another branch and pull changes from `pushed`
cd('%s_git' % BZRBRANCH)
gitbzr('import', '%s_imported' % BZRBRANCH, 'imported')
git('checkout', 'imported')
git('pull', '.', '--', 'pushed')
gitbzr('push')
def test_push_relative_path(self):
cd('%s_cloned' % BZRBRANCH)
open('touch2.txt', 'w').write('CLONED')
git('add', 'touch2.txt')
git('commit', '-m', 'touched touch2')
# push back to previous bzr branch
gitbzr('push', '../%s' % BZRBRANCHNAME)
self.assertEqual('CLONED', open('%s/touch2.txt' % BZRBRANCH).read())
open('touch2.txt', 'w').write('CLONED2')
git('add', 'touch2.txt')
git('commit', '-m', 'touched2 touch2')
gitbzr('push')
self.assertEqual('CLONED2', open('%s/touch2.txt' % BZRBRANCH).read())
# push to a new repo
gitbzr('push', '../%s_new' % BZRBRANCHNAME)
cd('%s_new' % BZRBRANCH)
bzr('checkout', '.')
self.assertEqual('CLONED2', open('%s_new/touch2.txt' % BZRBRANCH).read())
def test_import_no_url(self):
self.assertRaises(subprocess.CalledProcessError, gitbzr, 'import')
def test_import_strip_tags(self):
# assert that the imported repo has our tag
cd(TESTDIR)
cd('%s_cloned' % BZRBRANCHNAME)
rv = git('tag')
self.assert_('some_tag' in rv)
# add an invalid tag and make sure it doesn't get imported
cd('%s_imported' % BZRBRANCH)
bzr('tag', 'some~invalid!tag')
cd(TESTDIR)
cd('%s_cloned' % BZRBRANCHNAME)
# the first try should fail due to an invalid tag
self.assertRaises(subprocess.CalledProcessError,
gitbzr,
'import',
'%s_imported' % BZRBRANCH,
'import_fail')
gitbzr('import', '--strip_tags', '%s_imported' % BZRBRANCH, 'import_win')
rv = git('tag')
self.assert_('some~invalid!tag' not in rv)
# test that clone supports the flag also
cd(TESTDIR)
self.assertRaises(subprocess.CalledProcessError,
gitbzr, 'clone', '%s_imported' % BZRBRANCH, 'import_fail')
gitbzr('clone', '--strip_tags', '%s_imported' % BZRBRANCH, 'import_win')
def test_gitbzr_init_master(self):
# make a new git repo
INITGIT = os.path.join(TESTDIR, 'init_master_git')
INITBZR = os.path.join(TESTDIR, 'init_master_bzr')
cd(TESTDIR)
git('init', INITGIT)
cd(INITGIT)
open('touch.txt', 'w').write('touch')
git('add', 'touch.txt')
git('commit', '-a', '-m', 'touch1')
gitbzr('init')
gitbzr('push', INITBZR)
cd(TESTDIR)
bzr('branch', INITBZR, '%s_working' % INITBZR)
cd('%s_working' % INITBZR)
self.assertEquals('touch', open('touch.txt').read())
def test_gitbzr_init_branch(self):
# make a new git repo
INITGIT = os.path.join(TESTDIR, 'init_branch_git')
INITBZR = os.path.join(TESTDIR, 'init_branch_bzr')
cd(TESTDIR)
git('init', INITGIT)
cd(INITGIT)
open('touch.txt', 'w').write('touch')
git('add', 'touch.txt')
git('commit', '-a', '-m', 'touch1')
git('checkout', '-b', 'new_branch')
open('touch.txt', 'w').write('touch2')
git('commit', '-a', '-m', 'touch2')
gitbzr('init')
gitbzr('push', INITBZR)
cd(TESTDIR)
bzr('branch', INITBZR, '%s_working' % INITBZR)
cd('%s_working' % INITBZR)
self.assertEquals('touch2', open('touch.txt').read())
class GitBzrHeadTest(GitBzrTest):
BZRFASTIMPORT = BZRFASTIMPORT_HEAD
class GitBzrHeadHeadTest(GitBzrTest):
BZR = BZRPATH % 'head'
BZRFASTIMPORT = BZRFASTIMPORT_HEAD
class GitBzrStableTarballTest(GitBzrTest):
BZRFASTIMPORT = BZRFASTIMPORT_STABLE_TARBALL
class GitBzrStable_2_2_0(GitBzrStableTarballTest):
BZR = BZRPATH % '2.2.0'
class GitBzrStable_2_2_1(GitBzrStableTarballTest):
BZR = BZRPATH % '2.2.1'
class GitBzrStable_2_2_2(GitBzrStableTarballTest):
BZR = BZRPATH % '2.2.2'
class GitBzrStable_2_2_3(GitBzrStableTarballTest):
BZR = BZRPATH % '2.2.3'
class GitBzrStable_2_2_4(GitBzrStableTarballTest):
BZR = BZRPATH % '2.2.4'
class GitBzrStable_2_3_0(GitBzrStableTarballTest):
BZR = BZRPATH % '2.3.0'
class GitBzrStable_2_2_0(GitBzrStableTarballTest):
BZR = BZRPATH % '2.2.0'
| [
"logging.getLogger",
"os.path.exists",
"glob.iglob",
"subprocess.Popen",
"subprocess.CalledProcessError",
"os.path.join",
"os.symlink",
"logging.info",
"os.chdir",
"os.path.dirname",
"shutil.copytree",
"os.unlink",
"os.mkdir",
"shutil.rmtree",
"time.time"
] | [((314, 345), 'os.path.join', 'os.path.join', (['ROOTDIR', '"""vendor"""'], {}), "(ROOTDIR, 'vendor')\n", (326, 345), False, 'import os\n'), ((353, 385), 'os.path.join', 'os.path.join', (['ROOTDIR', '"""git-bzr"""'], {}), "(ROOTDIR, 'git-bzr')\n", (365, 385), False, 'import os\n'), ((399, 440), 'os.path.join', 'os.path.join', (['VENDOR', '"""python-fastimport"""'], {}), "(VENDOR, 'python-fastimport')\n", (411, 440), False, 'import os\n'), ((451, 482), 'os.path.join', 'os.path.join', (['VENDOR', '"""plugins"""'], {}), "(VENDOR, 'plugins')\n", (463, 482), False, 'import os\n'), ((497, 534), 'os.path.join', 'os.path.join', (['PLUGINDIR', '"""fastimport"""'], {}), "(PLUGINDIR, 'fastimport')\n", (509, 534), False, 'import os\n'), ((556, 597), 'os.path.join', 'os.path.join', (['VENDOR', '"""fastimport_stable"""'], {}), "(VENDOR, 'fastimport_stable')\n", (568, 597), False, 'import os\n'), ((627, 672), 'os.path.join', 'os.path.join', (['VENDOR', '"""bzr-fastimport-0.10.0"""'], {}), "(VENDOR, 'bzr-fastimport-0.10.0')\n", (639, 672), False, 'import os\n'), ((692, 731), 'os.path.join', 'os.path.join', (['VENDOR', '"""fastimport_head"""'], {}), "(VENDOR, 'fastimport_head')\n", (704, 731), False, 'import os\n'), ((742, 772), 'os.path.join', 'os.path.join', (['VENDOR', '"""bzr-%s"""'], {}), "(VENDOR, 'bzr-%s')\n", (754, 772), False, 'import os\n'), ((779, 806), 'os.path.join', 'os.path.join', (['VENDOR', '"""bzr"""'], {}), "(VENDOR, 'bzr')\n", (791, 806), False, 'import os\n'), ((1063, 1074), 'time.time', 'time.time', ([], {}), '()\n', (1072, 1074), False, 'import time\n'), ((280, 305), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (295, 305), False, 'import os\n'), ((2041, 2103), 'subprocess.Popen', 'subprocess.Popen', (['*popenargs'], {'stdout': 'subprocess.PIPE'}), '(*popenargs, stdout=subprocess.PIPE, **kwargs)\n', (2057, 2103), False, 'import subprocess\n'), ((2274, 2317), 'subprocess.CalledProcessError', 'subprocess.CalledProcessError', (['retcode', 'cmd'], {}), '(retcode, cmd)\n', (2303, 2317), False, 'import subprocess\n'), ((2554, 2573), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (2567, 2573), False, 'import shutil\n'), ((2938, 2983), 'os.symlink', 'os.symlink', (['self.BZRFASTIMPORT', 'BZRFASTIMPORT'], {}), '(self.BZRFASTIMPORT, BZRFASTIMPORT)\n', (2948, 2983), False, 'import os\n'), ((3129, 3150), 'os.symlink', 'os.symlink', (['path', 'BZR'], {}), '(path, BZR)\n', (3139, 3150), False, 'import os\n'), ((5831, 5851), 'os.path.exists', 'os.path.exists', (['memo'], {}), '(memo)\n', (5845, 5851), False, 'import os\n'), ((7123, 7134), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (7125, 7134), True, 'from os import chdir as cd\n'), ((7384, 7408), 'os.chdir', 'cd', (["('%s_git' % BZRBRANCH)"], {}), "('%s_git' % BZRBRANCH)\n", (7386, 7408), True, 'from os import chdir as cd\n'), ((8224, 8235), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (8226, 8235), True, 'from os import chdir as cd\n'), ((8310, 8339), 'os.chdir', 'cd', (["('%s_branched' % BZRBRANCH)"], {}), "('%s_branched' % BZRBRANCH)\n", (8312, 8339), True, 'from os import chdir as cd\n'), ((8463, 8487), 'os.chdir', 'cd', (["('%s_git' % BZRBRANCH)"], {}), "('%s_git' % BZRBRANCH)\n", (8465, 8487), True, 'from os import chdir as cd\n'), ((9184, 9213), 'os.chdir', 'cd', (["('%s_branched' % BZRBRANCH)"], {}), "('%s_branched' % BZRBRANCH)\n", (9186, 9213), True, 'from os import chdir as cd\n'), ((9359, 9383), 'os.chdir', 'cd', (["('%s_git' % BZRBRANCH)"], {}), "('%s_git' % BZRBRANCH)\n", (9361, 9383), True, 'from os import chdir as cd\n'), ((9574, 9601), 'os.chdir', 'cd', (["('%s_cloned' % BZRBRANCH)"], {}), "('%s_cloned' % BZRBRANCH)\n", (9576, 9601), True, 'from os import chdir as cd\n'), ((10163, 10187), 'os.chdir', 'cd', (["('%s_new' % BZRBRANCH)"], {}), "('%s_new' % BZRBRANCH)\n", (10165, 10187), True, 'from os import chdir as cd\n'), ((10484, 10495), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (10486, 10495), True, 'from os import chdir as cd\n'), ((10500, 10531), 'os.chdir', 'cd', (["('%s_cloned' % BZRBRANCHNAME)"], {}), "('%s_cloned' % BZRBRANCHNAME)\n", (10502, 10531), True, 'from os import chdir as cd\n'), ((10655, 10684), 'os.chdir', 'cd', (["('%s_imported' % BZRBRANCH)"], {}), "('%s_imported' % BZRBRANCH)\n", (10657, 10684), True, 'from os import chdir as cd\n'), ((10724, 10735), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (10726, 10735), True, 'from os import chdir as cd\n'), ((10740, 10771), 'os.chdir', 'cd', (["('%s_cloned' % BZRBRANCHNAME)"], {}), "('%s_cloned' % BZRBRANCHNAME)\n", (10742, 10771), True, 'from os import chdir as cd\n'), ((11223, 11234), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (11225, 11234), True, 'from os import chdir as cd\n'), ((11524, 11564), 'os.path.join', 'os.path.join', (['TESTDIR', '"""init_master_git"""'], {}), "(TESTDIR, 'init_master_git')\n", (11536, 11564), False, 'import os\n'), ((11579, 11619), 'os.path.join', 'os.path.join', (['TESTDIR', '"""init_master_bzr"""'], {}), "(TESTDIR, 'init_master_bzr')\n", (11591, 11619), False, 'import os\n'), ((11624, 11635), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (11626, 11635), True, 'from os import chdir as cd\n'), ((11665, 11676), 'os.chdir', 'cd', (['INITGIT'], {}), '(INITGIT)\n', (11667, 11676), True, 'from os import chdir as cd\n'), ((11838, 11849), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (11840, 11849), True, 'from os import chdir as cd\n'), ((11905, 11931), 'os.chdir', 'cd', (["('%s_working' % INITBZR)"], {}), "('%s_working' % INITBZR)\n", (11907, 11931), True, 'from os import chdir as cd\n'), ((12067, 12107), 'os.path.join', 'os.path.join', (['TESTDIR', '"""init_branch_git"""'], {}), "(TESTDIR, 'init_branch_git')\n", (12079, 12107), False, 'import os\n'), ((12122, 12162), 'os.path.join', 'os.path.join', (['TESTDIR', '"""init_branch_bzr"""'], {}), "(TESTDIR, 'init_branch_bzr')\n", (12134, 12162), False, 'import os\n'), ((12167, 12178), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (12169, 12178), True, 'from os import chdir as cd\n'), ((12208, 12219), 'os.chdir', 'cd', (['INITGIT'], {}), '(INITGIT)\n', (12210, 12219), True, 'from os import chdir as cd\n'), ((12504, 12515), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (12506, 12515), True, 'from os import chdir as cd\n'), ((12571, 12597), 'os.chdir', 'cd', (["('%s_working' % INITBZR)"], {}), "('%s_working' % INITBZR)\n", (12573, 12597), True, 'from os import chdir as cd\n'), ((2876, 2900), 'os.unlink', 'os.unlink', (['BZRFASTIMPORT'], {}), '(BZRFASTIMPORT)\n', (2885, 2900), False, 'import os\n'), ((3038, 3052), 'os.unlink', 'os.unlink', (['BZR'], {}), '(BZR)\n', (3047, 3052), False, 'import os\n'), ((3972, 3996), 'os.path.exists', 'os.path.exists', (['bzr_head'], {}), '(bzr_head)\n', (3986, 3996), False, 'import os\n'), ((4004, 4039), 'logging.info', 'logging.info', (['"""Getting HEAD of bzr"""'], {}), "('Getting HEAD of bzr')\n", (4016, 4039), False, 'import logging\n'), ((4046, 4056), 'os.chdir', 'cd', (['VENDOR'], {}), '(VENDOR)\n', (4048, 4056), True, 'from os import chdir as cd\n'), ((4117, 4145), 'os.path.exists', 'os.path.exists', (['PYFASTIMPORT'], {}), '(PYFASTIMPORT)\n', (4131, 4145), False, 'import os\n'), ((4153, 4204), 'logging.info', 'logging.info', (['"""Getting a HEAD of python-fastimport"""'], {}), "('Getting a HEAD of python-fastimport')\n", (4165, 4204), False, 'import logging\n'), ((4211, 4221), 'os.chdir', 'cd', (['VENDOR'], {}), '(VENDOR)\n', (4213, 4221), True, 'from os import chdir as cd\n'), ((4278, 4303), 'os.path.exists', 'os.path.exists', (['PLUGINDIR'], {}), '(PLUGINDIR)\n', (4292, 4303), False, 'import os\n'), ((4311, 4330), 'os.mkdir', 'os.mkdir', (['PLUGINDIR'], {}), '(PLUGINDIR)\n', (4319, 4330), False, 'import os\n'), ((4343, 4379), 'os.path.exists', 'os.path.exists', (['BZRFASTIMPORT_STABLE'], {}), '(BZRFASTIMPORT_STABLE)\n', (4357, 4379), False, 'import os\n'), ((4387, 4441), 'logging.info', 'logging.info', (['"""Getting revision 307 of bzr-fastimport"""'], {}), "('Getting revision 307 of bzr-fastimport')\n", (4399, 4441), False, 'import logging\n'), ((4448, 4458), 'os.chdir', 'cd', (['VENDOR'], {}), '(VENDOR)\n', (4450, 4458), True, 'from os import chdir as cd\n'), ((4547, 4581), 'os.path.exists', 'os.path.exists', (['BZRFASTIMPORT_HEAD'], {}), '(BZRFASTIMPORT_HEAD)\n', (4561, 4581), False, 'import os\n'), ((4589, 4635), 'logging.info', 'logging.info', (['"""Getting HEAD of bzr-fastimport"""'], {}), "('Getting HEAD of bzr-fastimport')\n", (4601, 4635), False, 'import logging\n'), ((4642, 4652), 'os.chdir', 'cd', (['VENDOR'], {}), '(VENDOR)\n', (4644, 4652), True, 'from os import chdir as cd\n'), ((4726, 4770), 'os.path.exists', 'os.path.exists', (['BZRFASTIMPORT_STABLE_TARBALL'], {}), '(BZRFASTIMPORT_STABLE_TARBALL)\n', (4740, 4770), False, 'import os\n'), ((4778, 4833), 'logging.info', 'logging.info', (['"""Downloading bzr-fastimport version 0.10"""'], {}), "('Downloading bzr-fastimport version 0.10')\n", (4790, 4833), False, 'import logging\n'), ((4840, 4850), 'os.chdir', 'cd', (['VENDOR'], {}), '(VENDOR)\n', (4842, 4850), True, 'from os import chdir as cd\n'), ((5880, 5910), 'shutil.copytree', 'shutil.copytree', (['memo', 'TESTDIR'], {}), '(memo, TESTDIR)\n', (5895, 5910), False, 'import shutil\n'), ((5991, 6005), 'os.mkdir', 'mkdir', (['TESTDIR'], {}), '(TESTDIR)\n', (5996, 6005), False, 'from os import mkdir\n'), ((6012, 6023), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (6014, 6023), True, 'from os import chdir as cd\n'), ((6059, 6072), 'os.chdir', 'cd', (['BZRBRANCH'], {}), '(BZRBRANCH)\n', (6061, 6072), True, 'from os import chdir as cd\n'), ((6402, 6413), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (6404, 6413), True, 'from os import chdir as cd\n'), ((6508, 6519), 'os.chdir', 'cd', (['TESTDIR'], {}), '(TESTDIR)\n', (6510, 6519), True, 'from os import chdir as cd\n'), ((6718, 6743), 'glob.iglob', 'glob.iglob', (['old_memo_glob'], {}), '(old_memo_glob)\n', (6728, 6743), False, 'import glob\n'), ((6807, 6837), 'shutil.copytree', 'shutil.copytree', (['TESTDIR', 'memo'], {}), '(TESTDIR, memo)\n', (6822, 6837), False, 'import shutil\n'), ((2732, 2751), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (2749, 2751), False, 'import logging\n'), ((3520, 3550), 'os.path.exists', 'os.path.exists', (['(BZRPATH % v[1])'], {}), '(BZRPATH % v[1])\n', (3534, 3550), False, 'import os\n'), ((3560, 3625), 'logging.info', 'logging.info', (['"""Downloading %s"""', '(download_url % (v[0], v[1], v[1]))'], {}), "('Downloading %s', download_url % (v[0], v[1], v[1]))\n", (3572, 3625), False, 'import logging\n'), ((3634, 3644), 'os.chdir', 'cd', (['VENDOR'], {}), '(VENDOR)\n', (3636, 3644), True, 'from os import chdir as cd\n'), ((6781, 6800), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (6794, 6800), False, 'import shutil\n')] |
import calendar
from datetime import datetime
from typing import List, Optional, Tuple, Union
from discord import Colour, Embed
from discord.ext.commands import Context
from discord.utils import escape_markdown
import emojis
import pss_assert
import pss_core as core
import pss_entity as entity
from pss_exception import Error
import pss_fleet as fleet
import pss_login as login
import pss_lookups as lookups
import pss_sprites as sprites
import pss_tournament as tourney
import pss_user as user
import settings
from typehints import EntitiesData, EntityInfo
import utils
# ---------- Constants ----------
ALLOWED_DIVISION_LETTERS: List[str] = sorted([letter for letter in lookups.DIVISION_CHAR_TO_DESIGN_ID.keys() if letter != '-'])
DIVISION_DESIGN_BASE_PATH: str = 'DivisionService/ListAllDivisionDesigns2'
DIVISION_DESIGN_DESCRIPTION_PROPERTY_NAME: str = 'DivisionName'
DIVISION_DESIGN_KEY_NAME: str = 'DivisionDesignId'
STARS_BASE_PATH: str = 'AllianceService/ListAlliancesWithDivision'
TOP_FLEETS_BASE_PATH: str = 'AllianceService/ListAlliancesByRanking?skip=0&take='
# ---------- Top fleets info ----------
async def get_top_fleets(ctx: Context, take: int = 100, as_embed: bool = settings.USE_EMBEDS) -> Union[List[Embed], List[str]]:
tourney_running = tourney.is_tourney_running()
divisions_designs_data = await divisions_designs_retriever.get_data_dict3()
fleets_divisions_max_ranks = [int(fleet_division_design_info['MaxRank']) for fleet_division_design_info in __get_fleet_division_designs(divisions_designs_data).values()]
raw_data = await core.get_data_from_path(TOP_FLEETS_BASE_PATH + str(take))
data = utils.convert.xmltree_to_dict3(raw_data)
if data:
title = f'Top {take} fleets'
prepared_data = __prepare_top_fleets(data)
body_lines = __create_body_lines_top_fleets(prepared_data, tourney_running, fleets_divisions_max_ranks)
if tourney_running:
footer = f'Properties displayed: Ranking. Fleet name (Trophy count {emojis.trophy} Member count {emojis.members} Star count {emojis.star})'
else:
footer = f'Properties displayed: Ranking. Fleet name (Trophy count {emojis.trophy} Member count {emojis.members})'
if as_embed:
colour = utils.discord.get_bot_member_colour(ctx.bot, ctx.guild)
return __create_top_embeds(title, body_lines, colour, footer)
else:
result = [
f'**{title}**',
*body_lines,
footer,
]
return result
else:
raise Error(f'An unknown error occured while retrieving the top fleets. Please contact the bot\'s author!')
def __create_body_lines_top_fleets(prepared_data: List[Tuple[int, str, str, str, str]], tourney_running: bool, fleets_divisions_max_ranks: List[int]) -> List[str]:
if tourney_running:
result = [
f'**{position}.** {fleet_name} ({trophies} {emojis.trophy} {number_of_approved_members} {emojis.members} {stars} {emojis.star})'
for position, fleet_name, trophies, stars, number_of_approved_members
in prepared_data
]
else:
result = [
f'**{position}.** {fleet_name} ({trophies} {emojis.trophy} {number_of_approved_members} {emojis.members})'
for position, fleet_name, trophies, _, number_of_approved_members
in prepared_data
]
for rank in sorted(fleets_divisions_max_ranks, reverse=True):
if rank < len(result):
result.insert(rank, utils.discord.ZERO_WIDTH_SPACE)
return result
def __prepare_top_fleets(fleets_data: EntitiesData) -> List[Tuple[int, str, str, str, str]]:
"""
Returns:
List[
Tuple[
fleet rank (int),
fleet name (str),
fleet trophies (str),
fleet stars (str),
number of approved members (str)
]
]
"""
result = [
(
position,
escape_markdown(fleet_info[fleet.FLEET_DESCRIPTION_PROPERTY_NAME]),
fleet_info['Trophy'],
fleet_info['Score'],
fleet_info['NumberOfApprovedMembers']
) for position, fleet_info in enumerate(fleets_data.values(), start=1)
]
return result
# ---------- Top captains info ----------
async def get_top_captains(ctx: Context, take: int = 100, as_embed: bool = settings.USE_EMBEDS) -> Union[List[Embed], List[str]]:
skip = 0
data = await __get_top_captains_data(skip, take)
if data:
title = f'Top {take} captains'
prepared_data = __prepare_top_captains(data, skip, take)
body_lines = __create_body_lines_top_captains(prepared_data)
footer = f'Properties displayed: Ranking. Player name (Fleet name) - Trophies {emojis.trophy}'
if as_embed:
colour = utils.discord.get_bot_member_colour(ctx.bot, ctx.guild)
result = __create_top_embeds(title, body_lines, colour, footer)
else:
result = [
f'**{title}**',
*body_lines,
footer,
]
return result
else:
raise Error(f'An unknown error occured while retrieving the top captains. Please contact the bot\'s author!')
def __create_body_lines_top_captains(prepared_data: List[Tuple[int, str, str, str]]) -> List[str]:
result = [
f'**{position}.** {user_name} ({fleet_name}) - {trophies} {emojis.trophy}'
for position, user_name, fleet_name, trophies
in prepared_data
]
return result
async def __get_top_captains_data(skip: int, take: int) -> EntitiesData:
path = await __get_top_captains_path(skip, take)
raw_data = await core.get_data_from_path(path)
data = utils.convert.xmltree_to_dict3(raw_data)
return data
async def __get_top_captains_path(skip: int, take: int) -> str:
skip += 1
access_token = await login.DEVICES.get_access_token()
result = f'LadderService/ListUsersByRanking?accessToken={access_token}&from={skip}&to={take}'
return result
def __prepare_top_captains(users_data: EntitiesData, skip: int, take: int) -> List[Tuple]:
start = skip + 1
end = skip + take
result = [
(
position,
escape_markdown(user_info[user.USER_DESCRIPTION_PROPERTY_NAME]),
escape_markdown(user_info[fleet.FLEET_DESCRIPTION_PROPERTY_NAME]),
user_info['Trophy']
)
for position, user_info
in enumerate(users_data.values(), start=start)
if position >= start and position <= end
]
return result
# ---------- Stars info ----------
async def get_division_stars(ctx: Context, division: str = None, fleet_data: dict = None, retrieved_date: datetime = None, as_embed: bool = settings.USE_EMBEDS) -> Union[List[Embed], List[str]]:
if division:
pss_assert.valid_parameter_value(division, 'division', min_length=1, allowed_values=ALLOWED_DIVISION_LETTERS)
if division == '-':
division = None
else:
division = None
if fleet_data is None or retrieved_date is None:
fleet_infos = await get_alliances_with_division()
else:
fleet_infos = fleet_data
divisions_designs_infos = await divisions_designs_retriever.get_data_dict3()
divisions = {}
if division:
division_design_id = lookups.DIVISION_CHAR_TO_DESIGN_ID[division.upper()]
divisions[division_design_id] = [fleet_info for fleet_info in fleet_infos.values() if fleet_info[DIVISION_DESIGN_KEY_NAME] == division_design_id]
else:
for division_design_id in lookups.DIVISION_DESIGN_ID_TO_CHAR.keys():
if division_design_id != '0':
divisions[division_design_id] = [fleet_info for fleet_info in fleet_infos.values() if fleet_info[DIVISION_DESIGN_KEY_NAME] == division_design_id]
if divisions:
divisions_texts = []
for division_design_id, fleet_infos in divisions.items():
divisions_texts.append((division_design_id, __get_division_stars_as_text(fleet_infos)))
result = []
footer = f'Properties displayed: Rank. Stars (Difference to next) Fleet name (Total trophies {emojis.trophy}, Member count {emojis.members})'
historic_data_note = utils.datetime.get_historic_data_note(retrieved_date)
if historic_data_note:
if as_embed:
footer += f'\n\n{historic_data_note}'
else:
footer += f'\n{historic_data_note}'
colour = utils.discord.get_bot_member_colour(ctx.bot, ctx.guild)
for division_design_id, division_text in divisions_texts:
if as_embed:
division_title = get_division_title(division_design_id, divisions_designs_infos, False, retrieved_date)
thumbnail_url = await sprites.get_download_sprite_link(divisions_designs_infos[division_design_id]['BackgroundSpriteId'])
embed_bodies = utils.discord.create_posts_from_lines(division_text, utils.discord.MAXIMUM_CHARACTERS_EMBED_DESCRIPTION)
for i, embed_body in enumerate(embed_bodies):
thumbnail_url = thumbnail_url if i == 0 else None
embed = utils.discord.create_embed(division_title, description=embed_body, footer=footer, thumbnail_url=thumbnail_url, colour=colour)
result.append(embed)
else:
division_title = get_division_title(division_design_id, divisions_designs_infos, True, retrieved_date)
result.append(division_title)
result.extend(division_text)
result.append(utils.discord.ZERO_WIDTH_SPACE)
if not as_embed:
result = result[:-1]
if footer:
result.append(f'```{footer}```')
return result
else:
raise Error(f'An unknown error occured while retrieving division info. Please contact the bot\'s author!')
def __get_division_stars_as_text(fleet_infos: List[EntityInfo]) -> List[str]:
lines = []
fleet_infos = entity.sort_entities_by(fleet_infos, [('Score', int, True)])
fleet_infos_count = len(fleet_infos)
for i, fleet_info in enumerate(fleet_infos, start=1):
fleet_name = escape_markdown(fleet_info['AllianceName'])
additional_info: List[Tuple[str, str]] = []
trophies = fleet_info.get('Trophy')
if trophies:
additional_info.append((trophies, emojis.trophy))
member_count = fleet_info.get('NumberOfMembers')
if member_count:
additional_info.append((str(member_count), emojis.members))
stars = fleet_info['Score']
if i < fleet_infos_count:
difference = int(stars) - int(fleet_infos[i]['Score'])
else:
difference = 0
if additional_info:
additional_str = f' ({" ".join([" ".join(info) for info in additional_info])})'
else:
additional_str = ''
lines.append(f'**{i:d}.** {stars} (+{difference}) {emojis.star} {fleet_name}{additional_str}')
return lines
def get_division_title(division_design_id: str, divisions_designs_infos: EntitiesData, include_markdown: bool, retrieved_date: datetime) -> str:
title = divisions_designs_infos[division_design_id][DIVISION_DESIGN_DESCRIPTION_PROPERTY_NAME]
if retrieved_date:
is_monthly_data = (retrieved_date + utils.datetime.ONE_DAY).month != retrieved_date.month
if is_monthly_data:
title = f'{title} - {calendar.month_abbr[retrieved_date.month]} {retrieved_date.year}'
else:
title = f'{title} - {calendar.month_abbr[retrieved_date.month]} {retrieved_date.day}, {retrieved_date.year}'
if include_markdown:
return f'__**{title}**__'
else:
return title
# ---------- Helper functions ----------
def filter_targets(user_infos: List[EntityInfo], division_design_id: str, last_month_user_data: EntitiesData, current_fleet_data: EntitiesData = {}, min_star_value: int = None, max_star_value: int = None, min_trophies_value: int = None, max_trophies_value: int = None, max_highest_trophies: int = None) -> List[EntityInfo]:
result = []
for user_info in user_infos:
current_division_design_id = current_fleet_data.get(user_info.get(fleet.FLEET_KEY_NAME), {}).get(DIVISION_DESIGN_KEY_NAME)
user_division_design_id = user_info.get('Alliance', {}).get(DIVISION_DESIGN_KEY_NAME, '0')
alliance_division_design_id = current_division_design_id or user_division_design_id
trophies = int(user_info.get('Trophy', 0))
highest_trophies = int(user_info.get('HighestTrophy', 0))
division_matches = division_design_id == alliance_division_design_id
if division_matches and (not min_trophies_value or trophies >= min_trophies_value) and (not max_trophies_value or trophies <= max_trophies_value) and (not max_highest_trophies or highest_trophies <= max_highest_trophies):
star_value, _ = user.get_star_value_from_user_info(user_info, star_count=user_info.get('AllianceScore'))
if (not min_star_value or star_value >= min_star_value) and (not max_star_value or star_value <= max_star_value):
user_id = user_info[user.USER_KEY_NAME]
user_info['StarValue'] = star_value or 0
user_info['LastMonthStarValue'] = last_month_user_data.get(user_id, {}).get('AllianceScore') or '-'
result.append(user_info)
result = sorted(result, key=lambda user_info: (user_info.get('StarValue', 0), int(user_info.get('AllianceScore', 0)), int(user_info.get('Trophy', 0))), reverse=True)
return result
async def get_alliances_with_division() -> EntitiesData:
data = await core.get_data_from_path(STARS_BASE_PATH)
fleet_infos = utils.convert.xmltree_to_dict3(data)
return fleet_infos
def get_targets_parameters(star_value: str = None, trophies: str = None, highest_trophies: int = None) -> Tuple[List[str], Optional[int], Optional[int], Optional[int], Optional[int], Optional[int]]:
star_values = [int(value) for value in (star_value or '').split('-') if value]
trophies_values = [int(value) for value in (trophies or '').split('-') if value]
criteria_lines = []
if star_values and len(star_values) > 2:
raise ValueError('Only 1 minimum and 1 maximum star value may be specified.')
min_star_value, max_star_value = None, None
if star_values:
min_star_value = min(star_values)
if len(star_values) > 1:
max_star_value = max(star_values)
criteria_lines.append(f'Star value: {min_star_value} - {max_star_value}')
else:
max_star_value = None
criteria_lines.append(f'Minimum star value: {min_star_value}')
if trophies_values and len(trophies_values) > 2:
raise ValueError('Only 1 minimum and 1 maximum trophy count may be specified.')
min_trophies_value, max_trophies_value = None, None
if trophies_values:
max_trophies_value = max(trophies_values)
if len(trophies_values) > 1:
min_trophies_value = min(trophies_values)
criteria_lines.append(f'Trophy count: {min_trophies_value} - {max_trophies_value}')
else:
min_trophies_value = None
criteria_lines.append(f'Maximum trophy count: {max_trophies_value}')
if highest_trophies is not None:
if highest_trophies < 0:
raise ValueError('The highest trophy count must not be negative.')
elif any(value > highest_trophies for value in trophies_values):
raise ValueError('The highest trophy count for a player must not be lower than any current trophy count value.')
criteria_lines.append(f'Maximum highest trophy count: {highest_trophies}')
return criteria_lines, min_star_value, max_star_value, min_trophies_value, max_trophies_value, highest_trophies
def is_valid_division_letter(div_letter: str) -> bool:
if div_letter is None:
result = True
else:
result = div_letter.lower() in [letter.lower() for letter in ALLOWED_DIVISION_LETTERS]
return result
def make_target_output_lines(user_infos: List[EntityInfo], include_fleet_name: bool = True) -> List[str]:
footer = f'Properties displayed: Star value (Current, Last month\'s star count) {emojis.star} Trophies (Max Trophies) {emojis.trophy} Player name'
if include_fleet_name:
footer += ' (Fleet name)'
result = []
for user_rank, user_info in enumerate(user_infos, 1):
player_star_value = user_info.get('StarValue', 0)
stars = int(user_info.get('AllianceScore', 0))
user_name = escape_markdown(user_info.get(user.USER_DESCRIPTION_PROPERTY_NAME, ''))
fleet_name = escape_markdown(user_info.get('Alliance', {}).get(fleet.FLEET_DESCRIPTION_PROPERTY_NAME, ''))
trophies = int(user_info.get('Trophy', 0))
highest_trophies = int(user_info.get('HighestTrophy', 0)) or '-'
last_month_stars = user_info.get('LastMonthStarValue', '-')
line = f'**{user_rank}.** {player_star_value} ({stars}, {last_month_stars}) {emojis.star} {trophies} ({highest_trophies}) {emojis.trophy} {user_name}'
if include_fleet_name:
line += f' ({fleet_name})'
if user_rank > 1 or not result:
result.append(line)
else:
result[-1] += f'\n{line}'
return footer, result
def __create_top_embeds(title: str, body_lines: List[str], colour: Colour, footer: str) -> List[Embed]:
bodies = utils.discord.create_posts_from_lines(body_lines, utils.discord.MAXIMUM_CHARACTERS_EMBED_DESCRIPTION)
result = []
for body in bodies:
result.append(utils.discord.create_embed(title, description=body, colour=colour, footer=footer))
return result
def __get_fleet_division_designs(divisions_designs_data: EntitiesData) -> EntitiesData:
result = {key: value for key, value in divisions_designs_data.items() if value.get('DivisionType') == 'Fleet'}
return result
# ---------- Initilization ----------
divisions_designs_retriever: entity.EntityRetriever = entity.EntityRetriever(
DIVISION_DESIGN_BASE_PATH,
DIVISION_DESIGN_KEY_NAME,
DIVISION_DESIGN_DESCRIPTION_PROPERTY_NAME,
cache_name='DivisionDesigns'
) | [
"pss_entity.EntityRetriever",
"utils.convert.xmltree_to_dict3",
"utils.datetime.get_historic_data_note",
"pss_entity.sort_entities_by",
"utils.discord.get_bot_member_colour",
"discord.utils.escape_markdown",
"utils.discord.create_embed",
"pss_login.DEVICES.get_access_token",
"pss_lookups.DIVISION_CHAR_TO_DESIGN_ID.keys",
"pss_lookups.DIVISION_DESIGN_ID_TO_CHAR.keys",
"pss_assert.valid_parameter_value",
"pss_core.get_data_from_path",
"pss_sprites.get_download_sprite_link",
"utils.discord.create_posts_from_lines",
"pss_exception.Error",
"pss_tournament.is_tourney_running"
] | [((18187, 18339), 'pss_entity.EntityRetriever', 'entity.EntityRetriever', (['DIVISION_DESIGN_BASE_PATH', 'DIVISION_DESIGN_KEY_NAME', 'DIVISION_DESIGN_DESCRIPTION_PROPERTY_NAME'], {'cache_name': '"""DivisionDesigns"""'}), "(DIVISION_DESIGN_BASE_PATH, DIVISION_DESIGN_KEY_NAME,\n DIVISION_DESIGN_DESCRIPTION_PROPERTY_NAME, cache_name='DivisionDesigns')\n", (18209, 18339), True, 'import pss_entity as entity\n'), ((1277, 1305), 'pss_tournament.is_tourney_running', 'tourney.is_tourney_running', ([], {}), '()\n', (1303, 1305), True, 'import pss_tournament as tourney\n'), ((1650, 1690), 'utils.convert.xmltree_to_dict3', 'utils.convert.xmltree_to_dict3', (['raw_data'], {}), '(raw_data)\n', (1680, 1690), False, 'import utils\n'), ((5769, 5809), 'utils.convert.xmltree_to_dict3', 'utils.convert.xmltree_to_dict3', (['raw_data'], {}), '(raw_data)\n', (5799, 5809), False, 'import utils\n'), ((10098, 10158), 'pss_entity.sort_entities_by', 'entity.sort_entities_by', (['fleet_infos', "[('Score', int, True)]"], {}), "(fleet_infos, [('Score', int, True)])\n", (10121, 10158), True, 'import pss_entity as entity\n'), ((13842, 13878), 'utils.convert.xmltree_to_dict3', 'utils.convert.xmltree_to_dict3', (['data'], {}), '(data)\n', (13872, 13878), False, 'import utils\n'), ((17601, 17707), 'utils.discord.create_posts_from_lines', 'utils.discord.create_posts_from_lines', (['body_lines', 'utils.discord.MAXIMUM_CHARACTERS_EMBED_DESCRIPTION'], {}), '(body_lines, utils.discord.\n MAXIMUM_CHARACTERS_EMBED_DESCRIPTION)\n', (17638, 17707), False, 'import utils\n'), ((2584, 2694), 'pss_exception.Error', 'Error', (['f"""An unknown error occured while retrieving the top fleets. Please contact the bot\'s author!"""'], {}), '(\n f"An unknown error occured while retrieving the top fleets. Please contact the bot\'s author!"\n )\n', (2589, 2694), False, 'from pss_exception import Error\n'), ((5173, 5285), 'pss_exception.Error', 'Error', (['f"""An unknown error occured while retrieving the top captains. Please contact the bot\'s author!"""'], {}), '(\n f"An unknown error occured while retrieving the top captains. Please contact the bot\'s author!"\n )\n', (5178, 5285), False, 'from pss_exception import Error\n'), ((5728, 5757), 'pss_core.get_data_from_path', 'core.get_data_from_path', (['path'], {}), '(path)\n', (5751, 5757), True, 'import pss_core as core\n'), ((5931, 5963), 'pss_login.DEVICES.get_access_token', 'login.DEVICES.get_access_token', ([], {}), '()\n', (5961, 5963), True, 'import pss_login as login\n'), ((6882, 6995), 'pss_assert.valid_parameter_value', 'pss_assert.valid_parameter_value', (['division', '"""division"""'], {'min_length': '(1)', 'allowed_values': 'ALLOWED_DIVISION_LETTERS'}), "(division, 'division', min_length=1,\n allowed_values=ALLOWED_DIVISION_LETTERS)\n", (6914, 6995), False, 'import pss_assert\n'), ((7636, 7677), 'pss_lookups.DIVISION_DESIGN_ID_TO_CHAR.keys', 'lookups.DIVISION_DESIGN_ID_TO_CHAR.keys', ([], {}), '()\n', (7675, 7677), True, 'import pss_lookups as lookups\n'), ((8297, 8350), 'utils.datetime.get_historic_data_note', 'utils.datetime.get_historic_data_note', (['retrieved_date'], {}), '(retrieved_date)\n', (8334, 8350), False, 'import utils\n'), ((8548, 8603), 'utils.discord.get_bot_member_colour', 'utils.discord.get_bot_member_colour', (['ctx.bot', 'ctx.guild'], {}), '(ctx.bot, ctx.guild)\n', (8583, 8603), False, 'import utils\n'), ((9884, 9993), 'pss_exception.Error', 'Error', (['f"""An unknown error occured while retrieving division info. Please contact the bot\'s author!"""'], {}), '(\n f"An unknown error occured while retrieving division info. Please contact the bot\'s author!"\n )\n', (9889, 9993), False, 'from pss_exception import Error\n'), ((10279, 10322), 'discord.utils.escape_markdown', 'escape_markdown', (["fleet_info['AllianceName']"], {}), "(fleet_info['AllianceName'])\n", (10294, 10322), False, 'from discord.utils import escape_markdown\n'), ((13783, 13823), 'pss_core.get_data_from_path', 'core.get_data_from_path', (['STARS_BASE_PATH'], {}), '(STARS_BASE_PATH)\n', (13806, 13823), True, 'import pss_core as core\n'), ((678, 719), 'pss_lookups.DIVISION_CHAR_TO_DESIGN_ID.keys', 'lookups.DIVISION_CHAR_TO_DESIGN_ID.keys', ([], {}), '()\n', (717, 719), True, 'import pss_lookups as lookups\n'), ((2268, 2323), 'utils.discord.get_bot_member_colour', 'utils.discord.get_bot_member_colour', (['ctx.bot', 'ctx.guild'], {}), '(ctx.bot, ctx.guild)\n', (2303, 2323), False, 'import utils\n'), ((3995, 4061), 'discord.utils.escape_markdown', 'escape_markdown', (['fleet_info[fleet.FLEET_DESCRIPTION_PROPERTY_NAME]'], {}), '(fleet_info[fleet.FLEET_DESCRIPTION_PROPERTY_NAME])\n', (4010, 4061), False, 'from discord.utils import escape_markdown\n'), ((4859, 4914), 'utils.discord.get_bot_member_colour', 'utils.discord.get_bot_member_colour', (['ctx.bot', 'ctx.guild'], {}), '(ctx.bot, ctx.guild)\n', (4894, 4914), False, 'import utils\n'), ((6275, 6338), 'discord.utils.escape_markdown', 'escape_markdown', (['user_info[user.USER_DESCRIPTION_PROPERTY_NAME]'], {}), '(user_info[user.USER_DESCRIPTION_PROPERTY_NAME])\n', (6290, 6338), False, 'from discord.utils import escape_markdown\n'), ((6352, 6417), 'discord.utils.escape_markdown', 'escape_markdown', (['user_info[fleet.FLEET_DESCRIPTION_PROPERTY_NAME]'], {}), '(user_info[fleet.FLEET_DESCRIPTION_PROPERTY_NAME])\n', (6367, 6417), False, 'from discord.utils import escape_markdown\n'), ((17765, 17851), 'utils.discord.create_embed', 'utils.discord.create_embed', (['title'], {'description': 'body', 'colour': 'colour', 'footer': 'footer'}), '(title, description=body, colour=colour, footer=\n footer)\n', (17791, 17851), False, 'import utils\n'), ((8984, 9093), 'utils.discord.create_posts_from_lines', 'utils.discord.create_posts_from_lines', (['division_text', 'utils.discord.MAXIMUM_CHARACTERS_EMBED_DESCRIPTION'], {}), '(division_text, utils.discord.\n MAXIMUM_CHARACTERS_EMBED_DESCRIPTION)\n', (9021, 9093), False, 'import utils\n'), ((8853, 8957), 'pss_sprites.get_download_sprite_link', 'sprites.get_download_sprite_link', (["divisions_designs_infos[division_design_id]['BackgroundSpriteId']"], {}), "(divisions_designs_infos[division_design_id\n ]['BackgroundSpriteId'])\n", (8885, 8957), True, 'import pss_sprites as sprites\n'), ((9249, 9379), 'utils.discord.create_embed', 'utils.discord.create_embed', (['division_title'], {'description': 'embed_body', 'footer': 'footer', 'thumbnail_url': 'thumbnail_url', 'colour': 'colour'}), '(division_title, description=embed_body, footer=\n footer, thumbnail_url=thumbnail_url, colour=colour)\n', (9275, 9379), False, 'import utils\n')] |
import os
import sys
import numpy as np
import skimage
import argparse
import onnxruntime
import fine_grained_segmentation
from fine_grained_segmentation import model
from fine_grained_segmentation import visualize
from fine_grained_segmentation import utils
# list of fashion class names
CLASS_NAMES = ['BG', 'shirt, blouse', 'top, t-shirt, sweatshirt', 'sweater',
'cardigan', 'jacket', 'vest', 'pants', 'shorts', 'skirt', 'coat',
'dress', 'jumpsuit', 'cape', 'glasses', 'hat',
'headband, head covering, hair accessory', 'tie', 'glove', 'watch',
'belt', 'leg warmer', 'tights, stockings', 'sock', 'shoe',
'bag, wallet', 'scarf', 'umbrella', 'hood', 'collar', 'lapel',
'epaulette', 'sleeve', 'pocket', 'neckline', 'buckle', 'zipper',
'applique', 'bead', 'bow', 'flower', 'fringe', 'ribbon', 'rivet',
'ruffle', 'sequin', 'tassel']
LIB_DIR = fine_grained_segmentation.__path__[0]
def generate_image(images, molded_images, windows, results):
results_final = []
for i, image in enumerate(images):
final_rois, final_class_ids, final_scores, final_masks = \
model.unmold_detections(results[0][i], results[3][i], # detections[i], mrcnn_mask[i]
image.shape, molded_images[i].shape,
windows[i])
results_final.append({
"rois": final_rois,
"class_ids": final_class_ids,
"scores": final_scores,
"masks": final_masks,
})
r = results_final[i]
visualize.display_instances(image, r['rois'], r['masks'], r['class_ids'],
CLASS_NAMES, r['scores'])
return results_final
def detect(filename):
BATCH_SIZE = 1
ONNX_WEIGHTS_URL = r"https://github.com/vinny-palumbo/fine_grained_segmentation/releases/download/v0.1-alpha/mrcnn.onnx"
# get onnx weights
model_file_name = os.path.join(LIB_DIR, 'mrcnn.onnx')
# download onnx weights if it doesn't exist
if not os.path.exists(model_file_name):
utils.download_file(ONNX_WEIGHTS_URL, model_file_name)
# create onnx runtime session
session = onnxruntime.InferenceSession(model_file_name)
# get image
print("* Running detection on:", filename)
image = skimage.io.imread(filename)[:,:,:3]
image = utils.modify_white_pixels(image)
images = [image]
# preprocessing
molded_images, image_metas, windows = model.mold_inputs(images)
anchors = model.get_anchors(molded_images[0].shape)
anchors = np.broadcast_to(anchors, (BATCH_SIZE,) + anchors.shape)
# run inference
results = \
session.run(None, {"input_image": molded_images.astype(np.float32),
"input_anchors": anchors,
"input_image_meta": image_metas.astype(np.float32)})
# postprocessing
results_final = generate_image(images, molded_images, windows, results)
| [
"fine_grained_segmentation.model.unmold_detections",
"os.path.exists",
"fine_grained_segmentation.utils.download_file",
"onnxruntime.InferenceSession",
"os.path.join",
"fine_grained_segmentation.visualize.display_instances",
"skimage.io.imread",
"fine_grained_segmentation.model.mold_inputs",
"fine_grained_segmentation.utils.modify_white_pixels",
"fine_grained_segmentation.model.get_anchors",
"numpy.broadcast_to"
] | [((2035, 2070), 'os.path.join', 'os.path.join', (['LIB_DIR', '"""mrcnn.onnx"""'], {}), "(LIB_DIR, 'mrcnn.onnx')\n", (2047, 2070), False, 'import os\n'), ((2284, 2329), 'onnxruntime.InferenceSession', 'onnxruntime.InferenceSession', (['model_file_name'], {}), '(model_file_name)\n', (2312, 2329), False, 'import onnxruntime\n'), ((2454, 2486), 'fine_grained_segmentation.utils.modify_white_pixels', 'utils.modify_white_pixels', (['image'], {}), '(image)\n', (2479, 2486), False, 'from fine_grained_segmentation import utils\n'), ((2575, 2600), 'fine_grained_segmentation.model.mold_inputs', 'model.mold_inputs', (['images'], {}), '(images)\n', (2592, 2600), False, 'from fine_grained_segmentation import model\n'), ((2615, 2656), 'fine_grained_segmentation.model.get_anchors', 'model.get_anchors', (['molded_images[0].shape'], {}), '(molded_images[0].shape)\n', (2632, 2656), False, 'from fine_grained_segmentation import model\n'), ((2671, 2726), 'numpy.broadcast_to', 'np.broadcast_to', (['anchors', '((BATCH_SIZE,) + anchors.shape)'], {}), '(anchors, (BATCH_SIZE,) + anchors.shape)\n', (2686, 2726), True, 'import numpy as np\n'), ((1224, 1330), 'fine_grained_segmentation.model.unmold_detections', 'model.unmold_detections', (['results[0][i]', 'results[3][i]', 'image.shape', 'molded_images[i].shape', 'windows[i]'], {}), '(results[0][i], results[3][i], image.shape,\n molded_images[i].shape, windows[i])\n', (1247, 1330), False, 'from fine_grained_segmentation import model\n'), ((1651, 1754), 'fine_grained_segmentation.visualize.display_instances', 'visualize.display_instances', (['image', "r['rois']", "r['masks']", "r['class_ids']", 'CLASS_NAMES', "r['scores']"], {}), "(image, r['rois'], r['masks'], r['class_ids'],\n CLASS_NAMES, r['scores'])\n", (1678, 1754), False, 'from fine_grained_segmentation import visualize\n'), ((2135, 2166), 'os.path.exists', 'os.path.exists', (['model_file_name'], {}), '(model_file_name)\n', (2149, 2166), False, 'import os\n'), ((2176, 2230), 'fine_grained_segmentation.utils.download_file', 'utils.download_file', (['ONNX_WEIGHTS_URL', 'model_file_name'], {}), '(ONNX_WEIGHTS_URL, model_file_name)\n', (2195, 2230), False, 'from fine_grained_segmentation import utils\n'), ((2406, 2433), 'skimage.io.imread', 'skimage.io.imread', (['filename'], {}), '(filename)\n', (2423, 2433), False, 'import skimage\n')] |
import os
import time
import torch
import argparse
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
from SceneRecognitionCNN import SceneRecognitionCNN
from Libs.Datasets.Places365Dataset import Places365Dataset
from Libs.Utils import utils
import numpy as np
import yaml
import pickle
"""
Evaluation file to obtain all the necessary Scene Recognition
statistics for the attribution method.
Fully developed by <NAME>
"""
parser = argparse.ArgumentParser(description='Semantic-Aware Scene Recognition Evaluation')
parser.add_argument('--ConfigPath', metavar='DIR', help='Configuration file path', required=True)
def evaluationDataLoader(dataloader, model, set):
batch_time = utils.AverageMeter()
losses = utils.AverageMeter()
top1 = utils.AverageMeter()
top2 = utils.AverageMeter()
top5 = utils.AverageMeter()
ClassTPs_Top1 = torch.zeros(1, len(classes), dtype=torch.uint8).cuda()
ClassTPs_Top2 = torch.zeros(1, len(classes), dtype=torch.uint8).cuda()
ClassTPs_Top5 = torch.zeros(1, len(classes), dtype=torch.uint8).cuda()
Predictions = list()
SceneGTLabels = list()
# Extract batch size
batch_size = CONFIG['VALIDATION']['BATCH_SIZE']['TEST']
# Enable the extract of semantic occlusions
sem_oclusions = CONFIG['VALIDATION']['SEM_OCCLUSIONS']
SampleCounter = 1
# Start data time
data_time_start = time.time()
with torch.no_grad():
for i, (mini_batch) in enumerate(dataloader):
start_time = time.time()
if USE_CUDA:
RGB_image = mini_batch['Image'].cuda()
semantic_mask = mini_batch['Semantic'].cuda()
sceneLabelGT = mini_batch['Scene Index'].cuda()
# Model Forward
outputSceneLabel = model(RGB_image)
# Get predictions
batch_predictions = utils.obtainPredictedClasses(outputSceneLabel)
# Save Predictions and Ground-Truth
Predictions.extend(batch_predictions.tolist())
SceneGTLabels.extend(sceneLabelGT.cpu().numpy().tolist())
# Analyze Semantic Occlusions
if sem_oclusions:
# Mean ImageNet value
mean = [0.485, 0.456, 0.406]
# Get Top1 Semantic Labels
semantic_mask = semantic_mask[:, 2, :, :]
# Intermediate array to save results
results_array = np.zeros([CONFIG['DATASET']['N_CLASSES_SEM'], CONFIG['DATASET']['N_CLASSES_SCENE'], batch_size])
# Original Scene Prediction goes in the first row of the matrix
results_array[0, :, :] = np.transpose(torch.nn.functional.softmax(outputSceneLabel, dim=1).cpu().numpy())
# There is no semantic label = 0 (or it is not-annotated label) so we start in label = 1
for j in range(1, CONFIG['DATASET']['N_CLASSES_SEM']):
RGB_image_occluded = RGB_image.clone()
# Select areas of the image corresponding to semantic class j
indices = (semantic_mask == j)
# Change RGB Images in those indices pixels. Original paper puts RGB values to the mean ImageNet Distribution value
R = RGB_image_occluded[:, 0, :, :]
G = RGB_image_occluded[:, 1, :, :]
B = RGB_image_occluded[:, 2, :, :]
R[indices] = mean[0]
G[indices] = mean[1]
B[indices] = mean[2]
R = torch.unsqueeze(R, dim=1)
G = torch.unsqueeze(G, dim=1)
B = torch.unsqueeze(B, dim=1)
# Reconstruct again the images
RGB_image_occluded = torch.cat((R, G, B), dim=1)
# Obtained new predictions with the occluded RGB images
outputSceneLabel_occluded = model(RGB_image_occluded)
# Save the correspondent results
results_array[j, :, :] = np.transpose(torch.nn.functional.softmax(outputSceneLabel_occluded, dim=1).cpu().numpy())
for k in range(batch_size):
# Save each matrix in an indeppendent file
# sio.savemat('Occlusion Matrices Results/Mat Files/RGB_matrix_pred_' + str(SampleCounter).zfill(5) + '.mat',
# {'image_' + str(SampleCounter): results_array[:, :, k]})
with open(os.path.join(ResultPathMatrices, 'RGB_matrix_pred_' + str(SampleCounter).zfill(5) + '.pkl'), 'wb') as filehandle:
# store the data as binary data stream
pickle.dump(results_array[:, :, k], filehandle)
SampleCounter += 1
# Compute class accuracy
ClassTPs = utils.getclassAccuracy(outputSceneLabel, sceneLabelGT, len(classes), topk=(1, 2, 5))
ClassTPs_Top1 += ClassTPs[0]
ClassTPs_Top2 += ClassTPs[1]
ClassTPs_Top5 += ClassTPs[2]
# Compute Loss
loss = model.loss(outputSceneLabel, sceneLabelGT)
# Measure Top1, Top2 and Top5 accuracy
prec1, prec2, prec5 = utils.accuracy(outputSceneLabel.data, sceneLabelGT, topk=(1, 2, 5))
# Update values
losses.update(loss.item(), batch_size)
top1.update(prec1.item(), batch_size)
top2.update(prec2.item(), batch_size)
top5.update(prec5.item(), batch_size)
# Measure batch elapsed time
batch_time.update(time.time() - start_time)
# Print information
if i % CONFIG['VALIDATION']['PRINT_FREQ'] == 0:
print('Testing {} set batch: [{}/{}] '
'Batch Time {batch_time.val:.3f} (avg: {batch_time.avg:.3f}) '
'Loss {loss.val:.3f} (avg: {loss.avg:.3f}) '
'Prec@1 {top1.val:.3f} (avg: {top1.avg:.3f}) '
'Prec@2 {top2.val:.3f} (avg: {top2.avg:.3f}) '
'Prec@5 {top5.val:.3f} (avg: {top5.avg:.3f})'.
format(set, i, len(dataloader), set, batch_time=batch_time, loss=losses,
top1=top1, top2=top2, top5=top5))
ClassTPDic = {'Top1': ClassTPs_Top1.cpu().numpy(),
'Top2': ClassTPs_Top2.cpu().numpy(), 'Top5': ClassTPs_Top5.cpu().numpy()}
print('Elapsed time for {} set evaluation {time:.3f} seconds'.format(set, time=time.time() - data_time_start))
print("")
# Save predictions and Scene GT in pickle files
with open(os.path.join(ResultPath, set + '_Predictions.pkl'), 'wb') as filehandle:
# store the data as binary data stream
pickle.dump(Predictions, filehandle)
with open(os.path.join(ResultPath, set + 'SceneGTLabels.pkl'), 'wb') as filehandle:
# store the data as binary data stream
pickle.dump(SceneGTLabels, filehandle)
return top1.avg, top2.avg, top5.avg, losses.avg, ClassTPDic
global USE_CUDA, classes, CONFIG
# ----------------------------- #
# Configuration #
# ----------------------------- #
# Decode CONFIG file information
args = parser.parse_args()
CONFIG = yaml.safe_load(open(args.ConfigPath, 'r'))
USE_CUDA = torch.cuda.is_available()
print('-' * 65)
print("Evaluation starting...")
print('-' * 65)
ResultPath = os.path.join(CONFIG['RESULTS']['OUTPUT_DIR'], CONFIG['DATASET']['NAME'])
ResultPathMatrices = os.path.join(ResultPath, 'Occlusion Matrices')
if not os.path.isdir(ResultPathMatrices):
os.makedirs(ResultPathMatrices)
# ----------------------------- #
# Model #
# ----------------------------- #
print('Evaluating Scene Recognition model.')
print('Selected Scene Recognition architecture: ' + CONFIG['MODEL']['ARCH'])
model = SceneRecognitionCNN(arch=CONFIG['MODEL']['ARCH'], scene_classes=CONFIG['DATASET']['N_CLASSES_SCENE'])
# Load the trained model
completePath = CONFIG['MODEL']['PATH'] + CONFIG['MODEL']['NAME'] + '.pth.tar'
if os.path.isfile(completePath):
print("Loading model {} from path {}...".format(CONFIG['MODEL']['NAME'], completePath))
checkpoint = torch.load(completePath)
best_prec1 = checkpoint['best_prec1']
model.load_state_dict(checkpoint['state_dict'])
print("Loaded model {} from path {}.".format(CONFIG['MODEL']['NAME'], completePath))
print(" Epochs {}".format(checkpoint['epoch']))
print(" Single crop reported precision {}".format(best_prec1))
else:
print("No checkpoint found at '{}'. Check configuration file MODEL field".format(completePath))
quit()
# Move Model to GPU an set it to evaluation mode
if USE_CUDA:
model.cuda()
cudnn.benchmark = USE_CUDA
model.eval()
# Model Parameters
model_parameters = filter(lambda p: p.requires_grad, model.parameters())
params = sum([np.prod(p.size()) for p in model_parameters])
# ----------------------------- #
# Dataset #
# ----------------------------- #
print('-' * 65)
print('Loading dataset {}...'.format(CONFIG['DATASET']['NAME']))
traindir = os.path.join(CONFIG['DATASET']['ROOT'], CONFIG['DATASET']['NAME'])
valdir = os.path.join(CONFIG['DATASET']['ROOT'], CONFIG['DATASET']['NAME'])
val_dataset = Places365Dataset(valdir, "val")
val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=CONFIG['VALIDATION']['BATCH_SIZE']['TEST'],
shuffle=False, num_workers=CONFIG['DATALOADER']['NUM_WORKERS'], pin_memory=True)
classes = val_dataset.classes
# Get Histogram of class samples
ValHist = utils.getHistogramOfClasses(val_loader, classes)
# ----------------------------- #
# Printing Info #
# ----------------------------- #
# Print dataset information
print('Dataset loaded!')
print('Dataset Information:')
print('Validation set. Size {}. Batch size {}. Nbatches {}'
.format(len(val_loader) * CONFIG['VALIDATION']['BATCH_SIZE']['TEST'], CONFIG['VALIDATION']['BATCH_SIZE']['TEST'], len(val_loader)))
print('Number of scenes: {}' .format(len(classes)))
print('-' * 65)
print('Computing histogram of scene classes...')
print('-' * 65)
print('Number of params: {}'. format(params))
print('-' * 65)
print('GPU in use: {} with {} memory'.format(torch.cuda.get_device_name(0), torch.cuda.max_memory_allocated(0)))
print('-' * 65)
# ----------------------------- #
# Evaluation #
# ----------------------------- #
print('Evaluating dataset ...')
# Evaluate model on validation set
val_top1, val_top2, val_top5, val_loss, val_ClassTPDic = evaluationDataLoader(val_loader, model, set='Validation')
# Save Validation Class Accuracy
val_ClassAcc_top1 = (val_ClassTPDic['Top1'] / (ValHist + 0.0001)) * 100
# Print complete evaluation information
print('-' * 65)
print('Evaluation statistics:')
print('Validation results: Loss {val_loss:.3f}, Prec@1 {top1:.3f}, Prec@2 {top2:.3f}, Prec@5 {top5:.3f}, '
'Mean Class Accuracy {MCA:.3f}'.format(val_loss=val_loss, top1=val_top1, top2=val_top2, top5=val_top5, MCA=np.mean(val_ClassAcc_top1)))
| [
"torch.cuda.is_available",
"torch.nn.functional.softmax",
"numpy.mean",
"argparse.ArgumentParser",
"torch.unsqueeze",
"Libs.Utils.utils.obtainPredictedClasses",
"os.path.isdir",
"Libs.Utils.utils.getHistogramOfClasses",
"os.path.isfile",
"Libs.Utils.utils.accuracy",
"torch.cuda.max_memory_allocated",
"time.time",
"torch.cat",
"Libs.Datasets.Places365Dataset.Places365Dataset",
"torch.cuda.get_device_name",
"pickle.dump",
"os.makedirs",
"torch.load",
"os.path.join",
"SceneRecognitionCNN.SceneRecognitionCNN",
"Libs.Utils.utils.AverageMeter",
"numpy.zeros",
"torch.utils.data.DataLoader",
"torch.no_grad"
] | [((492, 579), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Semantic-Aware Scene Recognition Evaluation"""'}), "(description=\n 'Semantic-Aware Scene Recognition Evaluation')\n", (515, 579), False, 'import argparse\n'), ((7407, 7432), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (7430, 7432), False, 'import torch\n'), ((7512, 7584), 'os.path.join', 'os.path.join', (["CONFIG['RESULTS']['OUTPUT_DIR']", "CONFIG['DATASET']['NAME']"], {}), "(CONFIG['RESULTS']['OUTPUT_DIR'], CONFIG['DATASET']['NAME'])\n", (7524, 7584), False, 'import os\n'), ((7606, 7652), 'os.path.join', 'os.path.join', (['ResultPath', '"""Occlusion Matrices"""'], {}), "(ResultPath, 'Occlusion Matrices')\n", (7618, 7652), False, 'import os\n'), ((7966, 8072), 'SceneRecognitionCNN.SceneRecognitionCNN', 'SceneRecognitionCNN', ([], {'arch': "CONFIG['MODEL']['ARCH']", 'scene_classes': "CONFIG['DATASET']['N_CLASSES_SCENE']"}), "(arch=CONFIG['MODEL']['ARCH'], scene_classes=CONFIG[\n 'DATASET']['N_CLASSES_SCENE'])\n", (7985, 8072), False, 'from SceneRecognitionCNN import SceneRecognitionCNN\n'), ((8175, 8203), 'os.path.isfile', 'os.path.isfile', (['completePath'], {}), '(completePath)\n', (8189, 8203), False, 'import os\n'), ((9237, 9303), 'os.path.join', 'os.path.join', (["CONFIG['DATASET']['ROOT']", "CONFIG['DATASET']['NAME']"], {}), "(CONFIG['DATASET']['ROOT'], CONFIG['DATASET']['NAME'])\n", (9249, 9303), False, 'import os\n'), ((9313, 9379), 'os.path.join', 'os.path.join', (["CONFIG['DATASET']['ROOT']", "CONFIG['DATASET']['NAME']"], {}), "(CONFIG['DATASET']['ROOT'], CONFIG['DATASET']['NAME'])\n", (9325, 9379), False, 'import os\n'), ((9395, 9426), 'Libs.Datasets.Places365Dataset.Places365Dataset', 'Places365Dataset', (['valdir', '"""val"""'], {}), "(valdir, 'val')\n", (9411, 9426), False, 'from Libs.Datasets.Places365Dataset import Places365Dataset\n'), ((9440, 9626), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['val_dataset'], {'batch_size': "CONFIG['VALIDATION']['BATCH_SIZE']['TEST']", 'shuffle': '(False)', 'num_workers': "CONFIG['DATALOADER']['NUM_WORKERS']", 'pin_memory': '(True)'}), "(val_dataset, batch_size=CONFIG['VALIDATION'][\n 'BATCH_SIZE']['TEST'], shuffle=False, num_workers=CONFIG['DATALOADER'][\n 'NUM_WORKERS'], pin_memory=True)\n", (9467, 9626), False, 'import torch\n'), ((9733, 9781), 'Libs.Utils.utils.getHistogramOfClasses', 'utils.getHistogramOfClasses', (['val_loader', 'classes'], {}), '(val_loader, classes)\n', (9760, 9781), False, 'from Libs.Utils import utils\n'), ((742, 762), 'Libs.Utils.utils.AverageMeter', 'utils.AverageMeter', ([], {}), '()\n', (760, 762), False, 'from Libs.Utils import utils\n'), ((776, 796), 'Libs.Utils.utils.AverageMeter', 'utils.AverageMeter', ([], {}), '()\n', (794, 796), False, 'from Libs.Utils import utils\n'), ((808, 828), 'Libs.Utils.utils.AverageMeter', 'utils.AverageMeter', ([], {}), '()\n', (826, 828), False, 'from Libs.Utils import utils\n'), ((840, 860), 'Libs.Utils.utils.AverageMeter', 'utils.AverageMeter', ([], {}), '()\n', (858, 860), False, 'from Libs.Utils import utils\n'), ((872, 892), 'Libs.Utils.utils.AverageMeter', 'utils.AverageMeter', ([], {}), '()\n', (890, 892), False, 'from Libs.Utils import utils\n'), ((1433, 1444), 'time.time', 'time.time', ([], {}), '()\n', (1442, 1444), False, 'import time\n'), ((7661, 7694), 'os.path.isdir', 'os.path.isdir', (['ResultPathMatrices'], {}), '(ResultPathMatrices)\n', (7674, 7694), False, 'import os\n'), ((7700, 7731), 'os.makedirs', 'os.makedirs', (['ResultPathMatrices'], {}), '(ResultPathMatrices)\n', (7711, 7731), False, 'import os\n'), ((8314, 8338), 'torch.load', 'torch.load', (['completePath'], {}), '(completePath)\n', (8324, 8338), False, 'import torch\n'), ((1455, 1470), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1468, 1470), False, 'import torch\n'), ((10408, 10437), 'torch.cuda.get_device_name', 'torch.cuda.get_device_name', (['(0)'], {}), '(0)\n', (10434, 10437), False, 'import torch\n'), ((10439, 10473), 'torch.cuda.max_memory_allocated', 'torch.cuda.max_memory_allocated', (['(0)'], {}), '(0)\n', (10470, 10473), False, 'import torch\n'), ((1551, 1562), 'time.time', 'time.time', ([], {}), '()\n', (1560, 1562), False, 'import time\n'), ((1909, 1955), 'Libs.Utils.utils.obtainPredictedClasses', 'utils.obtainPredictedClasses', (['outputSceneLabel'], {}), '(outputSceneLabel)\n', (1937, 1955), False, 'from Libs.Utils import utils\n'), ((5278, 5345), 'Libs.Utils.utils.accuracy', 'utils.accuracy', (['outputSceneLabel.data', 'sceneLabelGT'], {'topk': '(1, 2, 5)'}), '(outputSceneLabel.data, sceneLabelGT, topk=(1, 2, 5))\n', (5292, 5345), False, 'from Libs.Utils import utils\n'), ((6844, 6880), 'pickle.dump', 'pickle.dump', (['Predictions', 'filehandle'], {}), '(Predictions, filehandle)\n', (6855, 6880), False, 'import pickle\n'), ((7037, 7075), 'pickle.dump', 'pickle.dump', (['SceneGTLabels', 'filehandle'], {}), '(SceneGTLabels, filehandle)\n', (7048, 7075), False, 'import pickle\n'), ((11195, 11221), 'numpy.mean', 'np.mean', (['val_ClassAcc_top1'], {}), '(val_ClassAcc_top1)\n', (11202, 11221), True, 'import numpy as np\n'), ((2478, 2579), 'numpy.zeros', 'np.zeros', (["[CONFIG['DATASET']['N_CLASSES_SEM'], CONFIG['DATASET']['N_CLASSES_SCENE'],\n batch_size]"], {}), "([CONFIG['DATASET']['N_CLASSES_SEM'], CONFIG['DATASET'][\n 'N_CLASSES_SCENE'], batch_size])\n", (2486, 2579), True, 'import numpy as np\n'), ((6708, 6758), 'os.path.join', 'os.path.join', (['ResultPath', "(set + '_Predictions.pkl')"], {}), "(ResultPath, set + '_Predictions.pkl')\n", (6720, 6758), False, 'import os\n'), ((6900, 6951), 'os.path.join', 'os.path.join', (['ResultPath', "(set + 'SceneGTLabels.pkl')"], {}), "(ResultPath, set + 'SceneGTLabels.pkl')\n", (6912, 6951), False, 'import os\n'), ((3597, 3622), 'torch.unsqueeze', 'torch.unsqueeze', (['R'], {'dim': '(1)'}), '(R, dim=1)\n', (3612, 3622), False, 'import torch\n'), ((3647, 3672), 'torch.unsqueeze', 'torch.unsqueeze', (['G'], {'dim': '(1)'}), '(G, dim=1)\n', (3662, 3672), False, 'import torch\n'), ((3697, 3722), 'torch.unsqueeze', 'torch.unsqueeze', (['B'], {'dim': '(1)'}), '(B, dim=1)\n', (3712, 3722), False, 'import torch\n'), ((3816, 3843), 'torch.cat', 'torch.cat', (['(R, G, B)'], {'dim': '(1)'}), '((R, G, B), dim=1)\n', (3825, 3843), False, 'import torch\n'), ((5648, 5659), 'time.time', 'time.time', ([], {}), '()\n', (5657, 5659), False, 'import time\n'), ((4745, 4792), 'pickle.dump', 'pickle.dump', (['results_array[:, :, k]', 'filehandle'], {}), '(results_array[:, :, k], filehandle)\n', (4756, 4792), False, 'import pickle\n'), ((6583, 6594), 'time.time', 'time.time', ([], {}), '()\n', (6592, 6594), False, 'import time\n'), ((2710, 2762), 'torch.nn.functional.softmax', 'torch.nn.functional.softmax', (['outputSceneLabel'], {'dim': '(1)'}), '(outputSceneLabel, dim=1)\n', (2737, 2762), False, 'import torch\n'), ((4107, 4168), 'torch.nn.functional.softmax', 'torch.nn.functional.softmax', (['outputSceneLabel_occluded'], {'dim': '(1)'}), '(outputSceneLabel_occluded, dim=1)\n', (4134, 4168), False, 'import torch\n')] |
import pandas as pd
import pytlwall
import pytlwall.plot_util as plot
read_cfg = pytlwall.CfgIo('ex_surface_impedance/ex_surface_impedance.cfg')
mywall = read_cfg.read_pytlwall()
mywall.calc_ZLong()
mywall.calc_ZTrans()
# save all the data in a dataframe
savedir = 'ex_surface_impedance/output/'
data = {'f': mywall.f,
'ZLong real': mywall.ZLong.real,
'ZLong imag': mywall.ZLong.imag,
'ZTrans real': mywall.ZTrans.real,
'ZTrans imag': mywall.ZTrans.imag,
'ZLong Equivalent Surface real': mywall.ZLongSurf.real,
'ZLong Equivalent Surface imag': mywall.ZLongSurf.imag,
'ZTrans Equivalent Surface real': mywall.ZTransSurf.real,
'ZTrans Equivalent Surface imag': mywall.ZTransSurf.imag}
df = pd.DataFrame(data)
df.to_excel(savedir + 'output.xlsx')
# plot the impedances
savedir = 'ex_surface_impedance/img/'
savename = 'ZLongSurf.png'
title = 'Eq. Long. Surface imp.'
plot.plot_Z_vs_f_simple(mywall.f, mywall.ZLongSurf, 'S', title,
savedir, savename,
xscale='log', yscale='log')
savename = 'ZTrans.png'
title = 'Eq. Trans. Surface imp.'
plot.plot_Z_vs_f_simple(mywall.f, mywall.ZTransSurf, 'S', title,
savedir, savename,
xscale='log', yscale='log')
| [
"pandas.DataFrame",
"pytlwall.plot_util.plot_Z_vs_f_simple",
"pytlwall.CfgIo"
] | [((82, 145), 'pytlwall.CfgIo', 'pytlwall.CfgIo', (['"""ex_surface_impedance/ex_surface_impedance.cfg"""'], {}), "('ex_surface_impedance/ex_surface_impedance.cfg')\n", (96, 145), False, 'import pytlwall\n'), ((754, 772), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (766, 772), True, 'import pandas as pd\n'), ((931, 1045), 'pytlwall.plot_util.plot_Z_vs_f_simple', 'plot.plot_Z_vs_f_simple', (['mywall.f', 'mywall.ZLongSurf', '"""S"""', 'title', 'savedir', 'savename'], {'xscale': '"""log"""', 'yscale': '"""log"""'}), "(mywall.f, mywall.ZLongSurf, 'S', title, savedir,\n savename, xscale='log', yscale='log')\n", (954, 1045), True, 'import pytlwall.plot_util as plot\n'), ((1149, 1264), 'pytlwall.plot_util.plot_Z_vs_f_simple', 'plot.plot_Z_vs_f_simple', (['mywall.f', 'mywall.ZTransSurf', '"""S"""', 'title', 'savedir', 'savename'], {'xscale': '"""log"""', 'yscale': '"""log"""'}), "(mywall.f, mywall.ZTransSurf, 'S', title, savedir,\n savename, xscale='log', yscale='log')\n", (1172, 1264), True, 'import pytlwall.plot_util as plot\n')] |
# !/usr/bin/env python3
# -*- Coding: UTF-8 -*- #
# -*- System: Linux -*- #
# -*- Usage: *.py -*- #
# Owner: Cloud-Technology LLC.
# Source: gitlab.cloud-technology.io
# License: BSD 3-Clause License
"""
...
"""
# =============================================================================
# Local Imports
# =============================================================================
import dataclasses
import ipaddress
from . import *
import Mongo.Gaming.Schemas.Base
# =============================================================================
# Reference Type(s)
# =============================================================================
Scheme = Mongo.Gaming.Schemas.Base.Model
Setup = Scheme.Configuration
# =============================================================================
# Class (Schema) Initialization
# =============================================================================
__module__ = __name__
class Address(Scheme):
"""
...
"""
Assignment: String = Field(
...,
alias = "Assignment",
description = "Game Server Public IPv4 || IPv6 Address"
)
FQDN: Optional[String] = Field(
...,
alias = "FQDN",
description = "Public-Facing Hostname via DNS A Record"
)
V6: Boolean = Field(
...,
alias = "V6",
description = "Boolean Flag Evaluating the IP-V6 Assignment Property"
)
class Port(Scheme):
"""
...
"""
Name: String = Field(
...,
alias = "Name",
description = "Canonical Human-Readable Title of Attributed Port"
)
Value: Integer = Field(
...,
alias = "Value",
description = "Network Assigned Port Value"
)
Protocol: String = Field(
...,
alias = "Protocol",
description = "UDP, TCP, UDP+TCP Enumeration Communication Protocol"
)
Type: String = Field(
"Port",
alias = "Network-Type",
description = "XOR Port || Socket"
)
Description: String = Field(
...,
alias = "Description",
description = "Server Port Functional Purpose + Description"
)
class Base(Scheme):
"""
...
"""
Host: Address = Field(
default = Address(
Assignment = "0.0.0.0",
FQDN = None,
V6 = False
),
description = "Game Server Public IP(v4|v6) Address"
)
Game: Port = Field(
default = Port(
Name = "Game",
Type = "Port",
Value = 27015,
Protocol = "UDP+TCP",
Description = """\
Game Transmission, Ping(s) and RCON Forward Port via TCP + UDP
""".strip()
), alias = "Game-Port",
description = "Game Transmission Port"
)
Client: Port = Field(
default = Port(
Name = "Client",
Type = "Port",
Value = 27005,
Protocol = "UDP",
Description = "Client Connection Port"
), alias = "Client-Port",
description = "Client Transmission Port"
)
TV: Port = Field(
default = Port(
Name = "Valve-T-V",
Type = "Port",
Value = 27020,
Protocol = "UDP",
Description = "Valve TV Connection Port"
), alias = "Source-T-V-Port",
description = "Valve TV Transmission Port"
)
Map: String = Field(
default = "de_mirage",
alias = "Default-Map",
description = "The Default Server Map"
)
Maps: Optional[List[String]] = Field(
default = ["de_mirage"],
alias = "Map-Rotation",
description = "Available Server Ingame Map Rotation"
)
Players: Integer = Field(
default = 16,
alias = "Max-Players",
description = "Maximum Server-Connected Players"
)
Tick: Integer = Field(
default = 64,
alias = "Tick-Rate",
description = "Ingame Server-State Refresh Rate"
)
class Config(Setup):
title = "{0}".format(__module__.split(".").pop())
class Create(Base):
"""
...
"""
class Config(Base.Config): title = Base.Config.title + "-" + "Create"
class Shard(Base):
"""
...
"""
class Config(Base.Config): title = Base.Config.title + "-" + "Shard"
class Update(Base):
"""
...
"""
class Config(Base.Config): title = Base.Config.title + "-" + "Update"
class Delete(Scheme):
"""
...
"""
class Config(Base.Config): title = Base.Config.title + "-" + "Delete"
class Schema(Base):
"""
...
"""
_id: PydanticObjectId = None
ID: Union[PydanticObjectId, String, None] = String(_id) if _id else None
class Config(Base.Config): title = Base.Config.title + "-" + "Schema"
class Association(Schema):
"""
...
"""
class Config(Base.Config): title = Base.Config.title + "-" + "Association"
class Generator(Schema):
"""
...
"""
class Config(Base.Config): title = Base.Config.title + "-" + "Generator"
class JSON(Generator):
"""
...
"""
class Config(Base.Config): title = Base.Config.title + "-" + "JSON"
@dataclasses.dataclass()
class Response:
Mutation: String = Field(...)
Status: Integer = Field(...)
Message: String = Field(...)
@dataclasses.dataclass()
class Success(Response):
Mutation: String = Field("N/A")
Status: Integer = Field(200, alias = "Status-Code")
Message: String = Field("Success", description = "...")
@dataclasses.dataclass()
class Error(Response):
Mutation: String = Field("N/A")
Status: Integer = Field(400, alias = "Status-Code")
Message: String = Field("Error", description = "...")
| [
"dataclasses.dataclass"
] | [((5229, 5252), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {}), '()\n', (5250, 5252), False, 'import dataclasses\n'), ((5382, 5405), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {}), '()\n', (5403, 5405), False, 'import dataclasses\n'), ((5596, 5619), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {}), '()\n', (5617, 5619), False, 'import dataclasses\n')] |
import gc
from .qfit import QFitRotamericResidue, QFitRotamericResidueOptions
from .qfit import QFitSegment, QFitSegmentOptions
import multiprocessing as mp
from tqdm import tqdm
import os.path
import os
import sys
import time
import argparse
from .custom_argparsers import ToggleActionFlag, CustomHelpFormatter
import logging
import traceback
from .logtools import setup_logging, log_run_info, poolworker_setup_logging, QueueListener
from . import MapScaler, Structure, XMap
from .structure.rotamers import ROTAMERS
logger = logging.getLogger(__name__)
os.environ["OMP_NUM_THREADS"] = "1"
def build_argparser():
p = argparse.ArgumentParser(formatter_class=CustomHelpFormatter,
description=__doc__)
p.add_argument("map", type=str,
help="Density map in CCP4 or MRC format, or an MTZ file "
"containing reflections and phases. For MTZ files "
"use the --label options to specify columns to read.")
p.add_argument("structure",
help="PDB-file containing structure.")
# Map input options
p.add_argument("-l", "--label", default="FWT,PHWT",
metavar="<F,PHI>",
help="MTZ column labels to build density")
p.add_argument('-r', "--resolution", default=None,
metavar="<float>", type=float,
help="Map resolution (Å) (only use when providing CCP4 map files)")
p.add_argument("-m", "--resolution-min", default=None,
metavar="<float>", type=float,
help="Lower resolution bound (Å) (only use when providing CCP4 map files)")
p.add_argument("-z", "--scattering", choices=["xray", "electron"], default="xray",
help="Scattering type")
p.add_argument("-rb", "--randomize-b", action="store_true", dest="randomize_b",
help="Randomize B-factors of generated conformers")
p.add_argument('-o', '--omit', action="store_true",
help="Treat map file as an OMIT map in map scaling routines")
# Map prep options
p.add_argument("--scale", action=ToggleActionFlag, dest="scale", default=True,
help="Scale density")
p.add_argument("-sv", "--scale-rmask", dest="scale_rmask", default=1.0,
metavar="<float>", type=float,
help="Scaling factor for soft-clash mask radius")
p.add_argument("-dc", "--density-cutoff", default=0.3,
metavar="<float>", type=float,
help="Density values below this value are set to <density-cutoff-value>")
p.add_argument("-dv", "--density-cutoff-value", default=-1,
metavar="<float>", type=float,
help="Density values below <density-cutoff> are set to this value")
p.add_argument("--subtract", action=ToggleActionFlag, dest="subtract", default=True,
help="Subtract Fcalc of neighboring residues when running qFit")
p.add_argument("-pad", "--padding", default=8.0,
metavar="<float>", type=float,
help="Padding size for map creation")
p.add_argument("--waters-clash", action=ToggleActionFlag, dest="waters_clash", default=True,
help="Consider waters for soft clash detection")
# Sampling options
p.add_argument("--backbone", action=ToggleActionFlag, dest="sample_backbone", default=True,
help="Sample backbone using inverse kinematics")
p.add_argument('-bbs', "--backbone-step", default=0.1, dest="sample_backbone_step",
metavar="<float>", type=float,
help="Stepsize for the amplitude of backbone sampling (Å)")
p.add_argument('-bba', "--backbone-amplitude", default=0.3, dest="sample_backbone_amplitude",
metavar="<float>", type=float,
help="Maximum backbone amplitude (Å)")
p.add_argument('-bbv', "--backbone-sigma", default=0.125, dest="sample_backbone_sigma",
metavar="<float>", type=float,
help="Backbone random-sampling displacement (Å)")
p.add_argument("--sample-angle", action=ToggleActionFlag, dest="sample_angle", default=True,
help="Sample CA-CB-CG angle for aromatic F/H/W/Y residues")
p.add_argument('-sas', "--sample-angle-step", default=3.75, dest="sample_angle_step",
metavar="<float>", type=float,
help="CA-CB-CG bond angle sampling step in degrees")
p.add_argument('-sar', "--sample-angle-range", default=7.5, dest="sample_angle_range",
metavar="<float>", type=float,
help="CA-CB-CG bond angle sampling range in degrees [-x,x]")
p.add_argument("--sample-rotamers", action=ToggleActionFlag, dest="sample_rotamers", default=True,
help="Sample sidechain rotamers")
p.add_argument("-b", "--dofs-per-iteration", default=2,
metavar="<int>", type=int,
help="Number of internal degrees that are sampled/built per iteration")
p.add_argument("-s", "--dihedral-stepsize", default=10,
metavar="<float>", type=float,
help="Stepsize for dihedral angle sampling in degrees")
p.add_argument("-rn", "--rotamer-neighborhood", default=60,
metavar="<float>", type=float,
help="Chi dihedral-angle sampling range around each rotamer in degrees [-x,x]")
p.add_argument("--remove-conformers-below-cutoff", action="store_true",
dest="remove_conformers_below_cutoff",
help=("Remove conformers during sampling that have atoms "
"with no density support, i.e. atoms are positioned "
"at density values below <density-cutoff>"))
p.add_argument('-cf', "--clash-scaling-factor", default=0.75,
metavar="<float>", type=float,
help="Set clash scaling factor")
p.add_argument('-ec', "--external-clash", action="store_true", dest="external_clash",
help="Enable external clash detection during sampling")
p.add_argument("-bs", "--bulk-solvent-level", default=0.3,
metavar="<float>", type=float,
help="Bulk solvent level in absolute values")
p.add_argument("-c", "--cardinality", default=5,
metavar="<int>", type=int,
help="Cardinality constraint used during MIQP")
p.add_argument("-t", "--threshold", default=0.2,
metavar="<float>", type=float,
help="Threshold constraint used during MIQP")
p.add_argument("-hy", "--hydro", action="store_true", dest="hydro",
help="Include hydrogens during calculations")
p.add_argument('-rmsd', "--rmsd-cutoff", default=0.01,
metavar="<float>", type=float,
help="RMSD cutoff for removal of identical conformers")
p.add_argument("--threshold-selection", dest="bic_threshold", action=ToggleActionFlag, default=True,
help="Use BIC to select the most parsimonious MIQP threshold")
p.add_argument("-p", "--nproc", type=int, default=1, metavar="<int>",
help="Number of processors to use")
# qFit Segment options
p.add_argument("-f", "--fragment-length", default=4, dest="fragment_length",
metavar="<int>", type=int,
help="Fragment length used during qfit_segment")
p.add_argument("--segment-threshold-selection", action=ToggleActionFlag, dest="seg_bic_threshold", default=True,
help="Use BIC to select the most parsimonious MIQP threshold (segment)")
# Global options
p.add_argument("--random-seed", dest="random_seed",
metavar="<int>", type=int,
help="Seed value for PRNG")
# Output options
p.add_argument("-d", "--directory", default='.',
metavar="<dir>", type=os.path.abspath,
help="Directory to store results")
p.add_argument("-v", "--verbose", action="store_true",
help="Be verbose")
p.add_argument("--debug", action="store_true",
help="Log as much information as possible")
p.add_argument("--write-intermediate-conformers", action="store_true",
help="Write intermediate structures to file (useful with debugging)")
p.add_argument("--pdb", help="Name of the input PDB")
return p
class QFitProteinOptions(QFitRotamericResidueOptions, QFitSegmentOptions):
def __init__(self):
super().__init__()
self.nproc = 1
self.verbose = True
self.omit = False
self.checkpoint = False
self.pdb = None
class QFitProtein:
def __init__(self, structure, xmap, options):
self.xmap = xmap
self.structure = structure
self.options = options
def run(self):
if self.options.pdb is not None:
self.pdb = self.options.pdb + '_'
else:
self.pdb = ''
multiconformer = self._run_qfit_residue_parallel()
structure = Structure.fromfile('multiconformer_model.pdb') # .reorder()
structure = structure.extract('e', 'H', '!=')
multiconformer = self._run_qfit_segment(structure)
return multiconformer
def _run_qfit_residue_parallel(self):
"""Run qfit independently over all residues."""
# This function hands out the job in parallel to a Pool of Workers.
# To create Workers, we will use "forkserver" where possible,
# and default to "spawn" elsewhere (e.g. on Windows).
try:
ctx = mp.get_context(method="forkserver")
except ValueError:
ctx = mp.get_context(method="spawn")
# Print execution stats
residues = list(self.structure.single_conformer_residues)
logger.info(f"RESIDUES: {len(residues)}")
logger.info(f"NPROC: {self.options.nproc}")
# Build a Manager, have it construct a Queue. This will conduct
# thread-safe and process-safe passing of LogRecords.
# Then launch a QueueListener Thread to read & handle LogRecords
# that are placed on the Queue.
mgr = mp.Manager()
logqueue = mgr.Queue()
listener = QueueListener(logqueue)
listener.start()
# Initialise progress bar
progress = tqdm(total=len(residues),
desc="Sampling residues",
unit="residue",
unit_scale=True,
leave=True,
miniters=1)
# Define callbacks and error callbacks to be attached to Jobs
def _cb(result):
if result:
logger.info(result)
progress.update()
def _error_cb(e):
tb = ''.join(traceback.format_exception(e.__class__, e, e.__traceback__))
logger.critical(tb)
progress.update()
# Launch a Pool and run Jobs
# Here, we calculate alternate conformers for individual residues.
with ctx.Pool(processes=self.options.nproc, maxtasksperchild=4) as pool:
futures = [pool.apply_async(QFitProtein._run_qfit_residue,
kwds={'residue': residue,
'structure': self.structure,
'xmap': self.xmap,
'options': self.options,
'logqueue': logqueue},
callback=_cb,
error_callback=_error_cb)
for residue in residues]
# Make sure all jobs are finished
for f in futures:
f.wait()
# Close the progressbar
pool.close()
pool.join()
progress.close()
# There are no more sub-processes, so we stop the QueueListener
listener.stop()
listener.join()
# Extract non-protein atoms
hetatms = self.structure.extract('record', 'HETATM', '==')
waters = self.structure.extract('record', 'ATOM', '==')
waters = waters.extract('resn', 'HOH', '==')
hetatms = hetatms.combine(waters)
# Combine all multiconformer residues into one structure
for residue in residues:
if residue.resn[0] not in ROTAMERS:
hetatms = hetatms.combine(residue)
continue
chain = residue.chain[0]
resid, icode = residue.id
directory = os.path.join(self.options.directory,
f"{chain}_{resid}")
if icode:
directory += f"_{icode}"
fname = os.path.join(directory, 'multiconformer_residue.pdb')
if not os.path.exists(fname):
continue
residue_multiconformer = Structure.fromfile(fname)
try:
multiconformer = multiconformer.combine(residue_multiconformer)
except UnboundLocalError:
multiconformer = residue_multiconformer
except FileNotFoundError:
logger.error(f"File \"{fname}\" not found!")
pass
multiconformer = multiconformer.combine(hetatms)
fname = os.path.join(self.options.directory,
"multiconformer_model.pdb")
if self.structure.scale or self.structure.cryst_info:
multiconformer.tofile(fname, self.structure.scale, self.structure.cryst_info)
else:
multiconformer.tofile(fname)
return multiconformer
def _run_qfit_segment(self, multiconformer):
self.options.randomize_b = False
self.options.bic_threshold = self.options.seg_bic_threshold
if self.options.seg_bic_threshold:
self.options.fragment_length = 3
else:
self.options.threshold = 0.2
self.xmap = self.xmap.extract(self.structure.coor, padding=5)
qfit = QFitSegment(multiconformer, self.xmap, self.options)
multiconformer = qfit()
fname = os.path.join(self.options.directory,
self.pdb + "multiconformer_model2.pdb")
if self.structure.scale or self.structure.cryst_info:
multiconformer.tofile(fname, self.structure.scale, self.structure.cryst_info)
else:
multiconformer.tofile(fname)
return multiconformer
@staticmethod
def _run_qfit_residue(residue, structure, xmap, options, logqueue):
"""Run qfit on a single residue to determine density-supported conformers."""
# Don't run qfit if we have a ligand or water
if residue.type != 'rotamer-residue':
return
# Set up logger hierarchy in this subprocess
poolworker_setup_logging(logqueue)
# This function is run in a subprocess, so `structure` and `residue` have
# been 'copied' (pickled+unpickled) as best as possible.
# However, `structure`/`residue` objects pickled and passed to subprocesses do
# not contain attributes decorated by @_structure_properties.
# This decorator attaches 'getter' and 'setter' _local_ functions to the attrs
# (defined within, and local to the _structure_properties function).
# Local functions are **unpickleable**, and as a result, so are these attrs.
# This includes:
# (record, atomid, name, altloc, resn, chain, resi, icode,
# q, b, e, charge, coor, active, u00, u11, u22, u01, u02, u12)
# Similarly, these objects are also missing attributes wrapped by @property:
# (covalent_radius, vdw_radius)
# Finally, the _selector object is only partially pickleable,
# as it contains a few methods that are defined by a local lambda inside
# pyparsing._trim_arity().
# Since all these attributes are attached by __init__ of the
# qfit.structure.base_structure._BaseStructure class,
# here, we call __init__ again, to make sure these objects are
# correctly initialised in a subprocess.
structure.__init__(
structure.data,
selection=structure._selection,
parent=structure.parent,
)
residue.__init__(
residue.data,
resi=residue.id[0],
icode=residue.id[1],
type=residue.type,
selection=residue._selection,
parent=residue.parent,
)
# Build the residue results directory
chainid = residue.chain[0]
resi, icode = residue.id
identifier = f"{chainid}_{resi}"
if icode:
identifier += f'_{icode}'
base_directory = options.directory
options.directory = os.path.join(base_directory, identifier)
try:
os.makedirs(options.directory)
except OSError:
pass
# Exit early if we have already run qfit for this residue
fname = os.path.join(options.directory, 'multiconformer_residue.pdb')
if os.path.exists(fname):
return
# Copy the structure
structure_new = structure
structure_resi = structure.extract(f'resi {resi} and chain {chainid}')
if icode:
structure_resi = structure_resi.extract('icode', icode)
chain = structure_resi[chainid]
conformer = chain.conformers[0]
residue = conformer[residue.id]
altlocs = sorted(list(set(residue.altloc)))
if len(altlocs) > 1:
try:
altlocs.remove('')
except ValueError:
pass
for altloc in altlocs[1:]:
sel_str = f"resi {resi} and chain {chainid} and altloc {altloc}"
sel_str = f"not ({sel_str})"
structure_new = structure_new.extract(sel_str)
# Copy the map
xmap_reduced = xmap.extract(residue.coor, padding=options.padding)
# Exception handling in case qFit-residue fails:
qfit = QFitRotamericResidue(residue, structure_new,
xmap_reduced, options)
try:
qfit.run()
except RuntimeError as e:
tb = ''.join(traceback.format_exception(e.__class__, e, e.__traceback__))
logger.warning(f"[{qfit.identifier}] "
f"Unable to produce an alternate conformer. "
f"Using deposited conformer A for this residue.")
logger.info(f"[{qfit.identifier}] This is a result of the following exception:\n"
f"{tb})")
qfit.conformer = residue.copy()
qfit._occupancies = [residue.q]
qfit._coor_set = [residue.coor]
qfit._bs = [residue.b]
# Save multiconformer_residue
qfit.tofile()
qfit_id = qfit.identifier
# How many conformers were found?
n_conformers = len(qfit.get_conformers())
# Freeing up some memory to avoid memory issues:
del xmap_reduced
del qfit
gc.collect()
# Return a string about the residue that was completed.
return f"[{qfit_id}]: {n_conformers} conformers"
def prepare_qfit_protein(options):
"""Loads files to build a QFitProtein job."""
# Load structure and prepare it
structure = Structure.fromfile(options.structure).reorder()
if not options.hydro:
structure = structure.extract('e', 'H', '!=')
# Load map and prepare it
xmap = XMap.fromfile(
options.map, resolution=options.resolution, label=options.label
)
xmap = xmap.canonical_unit_cell()
if options.scale is True:
scaler = MapScaler(xmap, scattering=options.scattering)
radius = 1.5
reso = None
if xmap.resolution.high is not None:
reso = xmap.resolution.high
elif options.resolution is not None:
reso = options.resolution
if reso is not None:
radius = 0.5 + reso / 3.0
scaler.scale(structure, radius=options.scale_rmask*radius)
return QFitProtein(structure, xmap, options)
def main():
"""Default entrypoint for qfit_protein."""
# Collect and act on arguments
# (When args==None, argparse will default to sys.argv[1:])
p = build_argparser()
args = p.parse_args(args=None)
try:
os.mkdir(args.directory)
except OSError:
pass
# Apply the arguments to options
options = QFitProteinOptions()
options.apply_command_args(args)
# Setup logger
setup_logging(options=options)
log_run_info(options, logger)
# Build a QFitProtein job
qfit = prepare_qfit_protein(options=options)
# Run the QFitProtein job
time0 = time.time()
multiconformer = qfit.run()
logger.info(f"Total time: {time.time() - time0}s")
| [
"logging.getLogger",
"os.path.exists",
"argparse.ArgumentParser",
"os.makedirs",
"os.path.join",
"multiprocessing.get_context",
"traceback.format_exception",
"os.mkdir",
"gc.collect",
"multiprocessing.Manager",
"time.time"
] | [((528, 555), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (545, 555), False, 'import logging\n'), ((625, 711), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'CustomHelpFormatter', 'description': '__doc__'}), '(formatter_class=CustomHelpFormatter, description=\n __doc__)\n', (648, 711), False, 'import argparse\n'), ((21123, 21134), 'time.time', 'time.time', ([], {}), '()\n', (21132, 21134), False, 'import time\n'), ((10376, 10388), 'multiprocessing.Manager', 'mp.Manager', ([], {}), '()\n', (10386, 10388), True, 'import multiprocessing as mp\n'), ((13563, 13627), 'os.path.join', 'os.path.join', (['self.options.directory', '"""multiconformer_model.pdb"""'], {}), "(self.options.directory, 'multiconformer_model.pdb')\n", (13575, 13627), False, 'import os\n'), ((14382, 14458), 'os.path.join', 'os.path.join', (['self.options.directory', "(self.pdb + 'multiconformer_model2.pdb')"], {}), "(self.options.directory, self.pdb + 'multiconformer_model2.pdb')\n", (14394, 14458), False, 'import os\n'), ((17118, 17158), 'os.path.join', 'os.path.join', (['base_directory', 'identifier'], {}), '(base_directory, identifier)\n', (17130, 17158), False, 'import os\n'), ((17339, 17400), 'os.path.join', 'os.path.join', (['options.directory', '"""multiconformer_residue.pdb"""'], {}), "(options.directory, 'multiconformer_residue.pdb')\n", (17351, 17400), False, 'import os\n'), ((17412, 17433), 'os.path.exists', 'os.path.exists', (['fname'], {}), '(fname)\n', (17426, 17433), False, 'import os\n'), ((19440, 19452), 'gc.collect', 'gc.collect', ([], {}), '()\n', (19450, 19452), False, 'import gc\n'), ((20743, 20767), 'os.mkdir', 'os.mkdir', (['args.directory'], {}), '(args.directory)\n', (20751, 20767), False, 'import os\n'), ((9797, 9832), 'multiprocessing.get_context', 'mp.get_context', ([], {'method': '"""forkserver"""'}), "(method='forkserver')\n", (9811, 9832), True, 'import multiprocessing as mp\n'), ((12817, 12873), 'os.path.join', 'os.path.join', (['self.options.directory', 'f"""{chain}_{resid}"""'], {}), "(self.options.directory, f'{chain}_{resid}')\n", (12829, 12873), False, 'import os\n'), ((12994, 13047), 'os.path.join', 'os.path.join', (['directory', '"""multiconformer_residue.pdb"""'], {}), "(directory, 'multiconformer_residue.pdb')\n", (13006, 13047), False, 'import os\n'), ((17184, 17214), 'os.makedirs', 'os.makedirs', (['options.directory'], {}), '(options.directory)\n', (17195, 17214), False, 'import os\n'), ((9878, 9908), 'multiprocessing.get_context', 'mp.get_context', ([], {'method': '"""spawn"""'}), "(method='spawn')\n", (9892, 9908), True, 'import multiprocessing as mp\n'), ((11008, 11067), 'traceback.format_exception', 'traceback.format_exception', (['e.__class__', 'e', 'e.__traceback__'], {}), '(e.__class__, e, e.__traceback__)\n', (11034, 11067), False, 'import traceback\n'), ((13067, 13088), 'os.path.exists', 'os.path.exists', (['fname'], {}), '(fname)\n', (13081, 13088), False, 'import os\n'), ((18587, 18646), 'traceback.format_exception', 'traceback.format_exception', (['e.__class__', 'e', 'e.__traceback__'], {}), '(e.__class__, e, e.__traceback__)\n', (18613, 18646), False, 'import traceback\n'), ((21198, 21209), 'time.time', 'time.time', ([], {}), '()\n', (21207, 21209), False, 'import time\n')] |
from genericpath import exists
import threading
import queue
import socket
import json
import time
import sys
from eventHandler import handleEvents
from abc import ABC, abstractmethod
from DDSlogger import logger, config
# HELPER FUNCTION
def recvall(sock):
""" recvall implementation, receives data of whatever size in several chunks
Args:
sock (socket): connected socket
Returns:
[byte]: data received from the socket
"""
BUFF_SIZE = 4096 # 4 KiB, §EDIT buffer size
data = b''
while True:
part = sock.recv(BUFF_SIZE)
data += part
if len(part) < BUFF_SIZE:
# either 0 or end of data
break
return data
# abstract class
class FairLossLink(ABC):
@abstractmethod
def send(self, pid_receiver, message):
pass
@abstractmethod
def deliver(self, pid_sender, message):
pass
class FairLossLink_vTCP_simple(FairLossLink):
"""
# 2.4.2 Fair-Loss Links
This implementation relies on TCP sockets and on three threads:
1) one that keeps a listening socket open and waits for new connections
2) one that take care of receiving sequentially messages from all incoming connections
3) ona that transmit all messages enqueued to send
"""
def __init__(self, pid, servicePort : int, dest_addresses : dict) -> None:
"""
Args:
servicePort (int): port for the incoming connections
dest_addresses (dict): map pid -> IP address
"""
self.pid = pid
self.servicePort = servicePort
self.pid_to_address = dest_addresses
self.address_to_pid = dict((v,k) for k,v in self.pid_to_address.items())
self.to_receive = queue.Queue() # (socket, sourceIP)
self.to_send = queue.Queue() # (destIP, messageByte)
self.deliver_events = None # (pid_source, message)
linkInThread = threading.Thread(target=self.manage_links_in, args=()) # this thread should die with its parent process
linkInThread.start()
linkOutThread = threading.Thread(target=self.manage_links_out, args=()) # this thread should die with its parent process
linkOutThread.start()
receiveThread = threading.Thread(target=self.receive_message, args=()) # this thread should die with its parent process
receiveThread.start()
### LINK MANAGEMENT
def manage_links_in(self):
while True: # if the socket fails, re-open
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: # TCP socket
s.bind(('', self.servicePort)) # the socket is reachable by any address the machine happens to have.
s.listen(1) # we want it to queue up as many as * connect requests before refusing outside connections. §EDIT
while True:
sock, addr = s.accept()
self.to_receive.put((sock,addr))
except socket.error as err:
_, _, exc_tb = sys.exc_info()
logger.debug('pid:'+self.pid+' - Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(err))+' : '+str(err))
continue
except Exception as ex:
_, _, exc_tb = sys.exc_info()
logger.debug('Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(ex))+' : '+str(ex))
def manage_links_out(self):
while True:
ipDestionation, message = self.to_send.get()
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.settimeout(2) # connect timeout
s.connect((ipDestionation, self.servicePort))
s.settimeout(None) # back to a blocking socket
s.sendall(message)
if config['LOG'].getboolean('fairlosslink'):
logger.info('pid:'+self.pid+' - '+'fll_send: sent '+str(message) +' to '+self.address_to_pid[ipDestionation])
except socket.error as err:
_, _, exc_tb = sys.exc_info()
logger.debug('pid:'+self.pid+' - Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(err))+' : '+str(err))
continue
except Exception as ex:
_, _, exc_tb = sys.exc_info()
logger.debug('pid:'+self.pid+' - Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(ex))+' : '+str(ex))
def receive_message(self):
while True:
sock, addr = self.to_receive.get()
try:
with sock:
received_data = recvall(sock)
message = json.loads(received_data.decode('utf-8')) #§NOTE what about decoding errors?
self.deliver(self.address_to_pid[addr[0]], message['msg']) #§NOTE direct delivery
except socket.error as err:
_, _, exc_tb = sys.exc_info()
logger.debug('pid:'+self.pid+' - Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(err))+' : '+str(err))
continue
except Exception as ex:
_, _, exc_tb = sys.exc_info()
logger.debug('pid:'+self.pid+' - Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(err))+' : '+str(err))
### INTERFACES
def send(self, pid_receiver, message):
data_to_send = {'msg' : message} #§NOTE message needs to be convertible in JSON
data_to_send_byte = json.dumps(data_to_send).encode('utf-8')
self.to_send.put((self.pid_to_address[pid_receiver],data_to_send_byte))
if config['LOG'].getboolean('fairlosslink'):
logger.info('pid:'+self.pid+' - '+'fll_send: sending '+str(message)+' to '+str(pid_receiver))
def deliver(self, pid_sender, message):
if config['LOG'].getboolean('fairlosslink'):
logger.info('pid:'+self.pid+' - '+'fll_deliver: delivered '+str(message)+' from '+str(pid_sender))
if self.deliver_events != None:
self.deliver_events.put((pid_sender,message))
### INTERCONNECTION
def getDeliverEvents(self):
self.deliver_events = queue.Queue()
return self.deliver_events
class FairLossLink_vTCP_MTC(FairLossLink):
"""
# 2.4.2 Fair-Loss Links
MTC: Multiple Threads Connection
This version improves with respect to FairLossLink_vTCP_simple employing multiple threads handling the incoming and outgoing connections
"""
def __init__(self, pid, servicePort : int, dest_addresses : dict, n_threads_in : int = 1, n_threads_out : int = 1) -> None:
"""
Args:
servicePort (int): port for the incoming connections
dest_addresses (dict): map pid -> IP address
n_threads_in (int): number of threads managing incoming connections
n_threads_out (int): number of threads managing outgoing connections
"""
self.pid = pid
self.servicePort = servicePort
self.pid_to_address = dest_addresses
self.address_to_pid = dict((v,k) for k,v in self.pid_to_address.items())
self.to_receive = queue.Queue() # (socket, sourceIP)
self.to_send = queue.Queue() # (destIP, messageByte)
self.deliver_events = None # (pid_source, message)
linkInThread = threading.Thread(target=self.manage_links_in, args=(n_threads_in,)) # this thread should die with its parent process
linkInThread.start()
self.manage_links_out(n_threads_out)
### LINK MANAGEMENT
def manage_links_in(self, n_thread : int):
# creating multiple threads that handles the incoming connections
for i in range(n_thread):
receiveThread = threading.Thread(target=self.receive_message, args=()) # this thread should die with its parent process
receiveThread.start()
while True: # if the socket fails, re-open
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: # TCP socket
s.bind(('', self.servicePort)) # the socket is reachable by any address the machine happens to have.
s.listen(1) # we want it to queue up as many as * connect requests before refusing outside connections. §EDIT
while True:
sock, addr = s.accept()
self.to_receive.put((sock,addr))
except socket.error as err:
_, _, exc_tb = sys.exc_info()
logger.debug('pid:'+self.pid+' - Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(err))+' : '+str(err))
continue
except Exception as ex:
_, _, exc_tb = sys.exc_info()
logger.debug('Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(ex))+' : '+str(ex))
def manage_links_out(self, n_thread : int):
for i in range(n_thread):
sendThread = threading.Thread(target=self.send_message, args=()) # this thread should die with its parent process
sendThread.start()
def receive_message(self):
while True:
sock, addr = self.to_receive.get()
try:
with sock:
received_data = recvall(sock)
message = json.loads(received_data.decode('utf-8')) #§NOTE what about decoding errors?
self.deliver(self.address_to_pid[addr[0]], message['msg']) #§NOTE direct delivery
except socket.error as err:
_, _, exc_tb = sys.exc_info()
logger.debug('pid:'+self.pid+' - Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(err))+' : '+str(err))
continue
except Exception as ex:
_, _, exc_tb = sys.exc_info()
logger.debug('Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(ex))+' : '+str(ex))
def send_message(self):
while True:
ipDestionation, message = self.to_send.get()
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.settimeout(2) # connect timeout
s.connect((ipDestionation, self.servicePort))
s.settimeout(None) # back to a blocking socket
s.sendall(message)
if config['LOG'].getboolean('fairlosslink'):
logger.info('pid:'+self.pid+' - '+'fll_send: sent '+str(message) +' to '+self.address_to_pid[ipDestionation])
except socket.error as err:
_, _, exc_tb = sys.exc_info()
logger.debug('pid:'+self.pid+' - Exception in '+str(sys._getframe( ).f_code.co_name)+":"+str(exc_tb.tb_lineno)+" - "+str(type(err))+' : '+str(err))
continue
except Exception as ex: #§TO-DO proper exeception handling, except socket.error:
logger.debug('pid:'+self.pid+' - EXCEPTION, '+self.manage_link_out.__name__+str(type(ex))+':'+str(ex)+' - '+str(ipDestionation))
### INTERFACES
def send(self, pid_receiver, message):
data_to_send = {'msg' : message} #§NOTE message needs to be convertible in JSON
data_to_send_byte = json.dumps(data_to_send).encode('utf-8')
self.to_send.put((self.pid_to_address[pid_receiver],data_to_send_byte))
if config['LOG'].getboolean('fairlosslink'):
logger.info('pid:'+self.pid+' - '+'fll_send: sending '+str(message)+' to '+str(pid_receiver))
def deliver(self, pid_sender, message):
if config['LOG'].getboolean('fairlosslink'):
logger.info('pid:'+self.pid+' - '+'fll_deliver: delivered '+str(message)+' from '+str(pid_sender))
if self.deliver_events != None:
self.deliver_events.put((pid_sender,message))
### INTERCONNECTION
def getDeliverEvents(self):
self.deliver_events = queue.Queue()
return self.deliver_events
class StubbornLink:
"""
2.4.3 Stubborn Links
"""
def __init__(self, fll : FairLossLink, timeout) -> None:
self.fll = fll
self.pid = fll.pid
self.sent = []
self.fllDeliverEvents = self.fll.getDeliverEvents() # interconnection
self.send_events = queue.Queue()
self.deliver_events = None
# handle timeout events
timeoutEventHandlerThread = threading.Thread(target=self.onEventTimeout, args=(timeout, )) # this thread should die with its parent process
timeoutEventHandlerThread.start()
# handle fll_deliver events
fllDeliverEventHandlerThread = threading.Thread(target=handleEvents, args=(self.fllDeliverEvents, self.onEventFllDeliver))
fllDeliverEventHandlerThread.start()
# handle fll_deliver events
sendEventHandlerThread = threading.Thread(target=handleEvents, args=(self.send_events, self.onEventFlSend))
sendEventHandlerThread.start()
### EVENT HANDLERS
def onEventTimeout(self, seconds : float) -> None:
while True:
time.sleep(seconds)
for pid_receiver, message in self.sent:
self.fll.send(pid_receiver, message)
def onEventFllDeliver(self, pid_sender, message):
self.deliver(pid_sender, message)
def onEventFlSend(self, pid_receiver, message):
self.fll.send(pid_receiver,message)
self.sent.append((pid_receiver,message))
### INTERFACES
def send(self, pid_receiver, message):
if config['LOG'].getboolean('stubbornlink'):
logger.info('pid:'+self.pid+' - '+'sl_send: sending '+str(message)+' to '+str(pid_receiver))
self.send_events.put((pid_receiver, message))
def deliver(self, pid_sender, message):
if config['LOG'].getboolean('stubbornlink'):
logger.info('pid:'+self.pid+' - '+'sl_deliver: delivered '+str(message)+' from '+str(pid_sender))
if self.deliver_events != None:
self.deliver_events.put((pid_sender,message))
### INTERCONNECTION
def getDeliverEvents(self):
self.deliver_events = queue.Queue()
return self.deliver_events
# abstract class
class PerfectLink(ABC):
@abstractmethod
def send(self, pid_receiver, message):
pass
@abstractmethod
def deliver(self, pid_sender, message):
pass
class PerfectLinkOnStubborn(PerfectLink):
"""
2.4.4 Perfect Links
"""
def __init__(self, sl : StubbornLink) -> None:
self.sl = sl
self.pid = sl.pid
self.delivered = []
self.send_events = queue.Queue()
self.tagged_deliver_events = {} # collect deliver events with a specific message tag
self.deliver_events = None
self.slDeliverEvents = self.sl.getDeliverEvents()
slDeliverEventHandlerThread = threading.Thread(target=handleEvents, args=(self.slDeliverEvents, self.onEventSlDeliver))
slDeliverEventHandlerThread.start()
plSendEventHandlerThread = threading.Thread(target=handleEvents, args=(self.send_events, self.onEventPlSend))
plSendEventHandlerThread.start()
### EVENT HANDLERS
def onEventSlDeliver(self, pid_sender, message):
pid_sender_message_tuple = (pid_sender, message)
if pid_sender_message_tuple not in self.delivered:
self.delivered.append(pid_sender_message_tuple)
self.deliver(pid_sender_message_tuple[0], pid_sender_message_tuple[1])
def onEventPlSend(self, pid_receiver, message):
self.sl.send(pid_receiver,message)
### INTERFACES
def send(self, pid_receiver, message):
self.send_events.put((pid_receiver,message))
if config['LOG'].getboolean('perfectlink'):
logger.info('pid:'+self.pid+' - '+'pl_send: sending '+str(message)+' to '+str(pid_receiver))
def deliver(self, pid_sender, message):
if config['LOG'].getboolean('perfectlink'):
logger.info('pid:'+self.pid+' - '+'pl_deliver: delivered '+str(message)+' from '+str(pid_sender))
if len(message) > 1 and isinstance(message[0],str) and message[0][:3] == 'MT:' and message[0] in self.tagged_deliver_events:
self.tagged_deliver_events[message[0]].put((pid_sender,message))
elif self.deliver_events != None:
self.deliver_events.put((pid_sender,message))
### INTERCONNECTION
def getDeliverEvents(self):
self.deliver_events = queue.Queue()
return self.deliver_events
def getTaggedDeliverEvents(self, msg_tag : str) -> queue.Queue:
"""
msg_tag (str) : get delivery events for a specific message tag (msg_tag DO NOT include the prefix 'MT:')
"""
self.tagged_deliver_events['MT:'+msg_tag] = queue.Queue()
return self.tagged_deliver_events['MT:'+msg_tag]
class PerfectLinkPingPong(PerfectLink):
"""
PerfectLink implementation on a Fairloss link, based on ack mechanism to avoid infinite retransmissions
"""
def __init__(self, fll : FairLossLink, timeout : int) -> None:
self.fll = fll
self.pid = fll.pid
self.delivered = []
self.waitingForAck = []
self.send_events = queue.Queue()
self.tagged_deliver_events = {} # collect deliver events with a specific message tag
self.deliver_events = None
self.flDeliverEvents = self.fll.getDeliverEvents()
# handle timeout events
timeoutEventHandlerThread = threading.Thread(target=self.onEventTimeout, args=(timeout, )) # this thread should die with its parent process
timeoutEventHandlerThread.start()
flDeliverEventHandlerThread = threading.Thread(target=handleEvents, args=(self.flDeliverEvents, self.onEventFlDeliver))
flDeliverEventHandlerThread.start()
plSendEventHandlerThread = threading.Thread(target=handleEvents, args=(self.send_events, self.onEventPlSend))
plSendEventHandlerThread.start()
### EVENT HANDLERS
def onEventTimeout(self, seconds : float) -> None:
while True:
time.sleep(seconds)
for pid_receiver, message in self.waitingForAck:
self.fll.send(pid_receiver, message)
def onEventFlDeliver(self, pid_sender, message):
if message[0] == 'pl_ACK':
innerMessage = message[1:]
pid_sender_message_tuple = (pid_sender, innerMessage)
if pid_sender_message_tuple in self.waitingForAck:
self.waitingForAck.remove(pid_sender_message_tuple)
else:
pid_sender_message_tuple = (pid_sender, message)
if pid_sender_message_tuple not in self.delivered:
self.delivered.append(pid_sender_message_tuple)
self.deliver(pid_sender, message)
messageToAck = ['pl_ACK'] + message
self.fll.send(pid_receiver=pid_sender, message=messageToAck)
def onEventPlSend(self, pid_receiver, message):
self.fll.send(pid_receiver,message)
self.waitingForAck.append((pid_receiver, message))
### INTERFACES
def send(self, pid_receiver, message):
self.send_events.put((pid_receiver,message))
if config['LOG'].getboolean('perfectlink'):
logger.info('pid:'+self.pid+' - '+'pl_send: sending '+str(message)+' to '+str(pid_receiver))
def deliver(self, pid_sender, message):
if config['LOG'].getboolean('perfectlink'):
logger.info('pid:'+self.pid+' - '+'pl_deliver: delivered '+str(message)+' from '+str(pid_sender))
if len(message) > 1 and isinstance(message[0],str) and message[0][:3] == 'MT:' and message[0] in self.tagged_deliver_events:
self.tagged_deliver_events[message[0]].put((pid_sender,message))
elif self.deliver_events != None:
self.deliver_events.put((pid_sender,message))
### INTERCONNECTION
def getDeliverEvents(self):
self.deliver_events = queue.Queue()
return self.deliver_events
def getTaggedDeliverEvents(self, msg_tag : str) -> queue.Queue:
"""
msg_tag (str) : get delivery events for a specific message tag (msg_tag DO NOT include the prefix 'MT:')
"""
self.tagged_deliver_events['MT:'+msg_tag] = queue.Queue()
return self.tagged_deliver_events['MT:'+msg_tag] | [
"socket.socket",
"json.dumps",
"time.sleep",
"sys._getframe",
"sys.exc_info",
"threading.Thread",
"queue.Queue"
] | [((1831, 1844), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (1842, 1844), False, 'import queue\n'), ((1890, 1903), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (1901, 1903), False, 'import queue\n'), ((2030, 2084), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.manage_links_in', 'args': '()'}), '(target=self.manage_links_in, args=())\n', (2046, 2084), False, 'import threading\n'), ((2200, 2255), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.manage_links_out', 'args': '()'}), '(target=self.manage_links_out, args=())\n', (2216, 2255), False, 'import threading\n'), ((2377, 2431), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.receive_message', 'args': '()'}), '(target=self.receive_message, args=())\n', (2393, 2431), False, 'import threading\n'), ((6653, 6666), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (6664, 6666), False, 'import queue\n'), ((7688, 7701), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (7699, 7701), False, 'import queue\n'), ((7747, 7760), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (7758, 7760), False, 'import queue\n'), ((7887, 7954), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.manage_links_in', 'args': '(n_threads_in,)'}), '(target=self.manage_links_in, args=(n_threads_in,))\n', (7903, 7954), False, 'import threading\n'), ((12742, 12755), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (12753, 12755), False, 'import queue\n'), ((13112, 13125), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (13123, 13125), False, 'import queue\n'), ((13237, 13298), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.onEventTimeout', 'args': '(timeout,)'}), '(target=self.onEventTimeout, args=(timeout,))\n', (13253, 13298), False, 'import threading\n'), ((13480, 13576), 'threading.Thread', 'threading.Thread', ([], {'target': 'handleEvents', 'args': '(self.fllDeliverEvents, self.onEventFllDeliver)'}), '(target=handleEvents, args=(self.fllDeliverEvents, self.\n onEventFllDeliver))\n', (13496, 13576), False, 'import threading\n'), ((13691, 13778), 'threading.Thread', 'threading.Thread', ([], {'target': 'handleEvents', 'args': '(self.send_events, self.onEventFlSend)'}), '(target=handleEvents, args=(self.send_events, self.\n onEventFlSend))\n', (13707, 13778), False, 'import threading\n'), ((15007, 15020), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (15018, 15020), False, 'import queue\n'), ((15519, 15532), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (15530, 15532), False, 'import queue\n'), ((15764, 15858), 'threading.Thread', 'threading.Thread', ([], {'target': 'handleEvents', 'args': '(self.slDeliverEvents, self.onEventSlDeliver)'}), '(target=handleEvents, args=(self.slDeliverEvents, self.\n onEventSlDeliver))\n', (15780, 15858), False, 'import threading\n'), ((15937, 16024), 'threading.Thread', 'threading.Thread', ([], {'target': 'handleEvents', 'args': '(self.send_events, self.onEventPlSend)'}), '(target=handleEvents, args=(self.send_events, self.\n onEventPlSend))\n', (15953, 16024), False, 'import threading\n'), ((17409, 17422), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (17420, 17422), False, 'import queue\n'), ((17727, 17740), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (17738, 17740), False, 'import queue\n'), ((18191, 18204), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (18202, 18204), False, 'import queue\n'), ((18468, 18529), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.onEventTimeout', 'args': '(timeout,)'}), '(target=self.onEventTimeout, args=(timeout,))\n', (18484, 18529), False, 'import threading\n'), ((18665, 18759), 'threading.Thread', 'threading.Thread', ([], {'target': 'handleEvents', 'args': '(self.flDeliverEvents, self.onEventFlDeliver)'}), '(target=handleEvents, args=(self.flDeliverEvents, self.\n onEventFlDeliver))\n', (18681, 18759), False, 'import threading\n'), ((18838, 18925), 'threading.Thread', 'threading.Thread', ([], {'target': 'handleEvents', 'args': '(self.send_events, self.onEventPlSend)'}), '(target=handleEvents, args=(self.send_events, self.\n onEventPlSend))\n', (18854, 18925), False, 'import threading\n'), ((20993, 21006), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (21004, 21006), False, 'import queue\n'), ((21311, 21324), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (21322, 21324), False, 'import queue\n'), ((8317, 8371), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.receive_message', 'args': '()'}), '(target=self.receive_message, args=())\n', (8333, 8371), False, 'import threading\n'), ((9636, 9687), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.send_message', 'args': '()'}), '(target=self.send_message, args=())\n', (9652, 9687), False, 'import threading\n'), ((13932, 13951), 'time.sleep', 'time.sleep', (['seconds'], {}), '(seconds)\n', (13942, 13951), False, 'import time\n'), ((19079, 19098), 'time.sleep', 'time.sleep', (['seconds'], {}), '(seconds)\n', (19089, 19098), False, 'import time\n'), ((5962, 5986), 'json.dumps', 'json.dumps', (['data_to_send'], {}), '(data_to_send)\n', (5972, 5986), False, 'import json\n'), ((12051, 12075), 'json.dumps', 'json.dumps', (['data_to_send'], {}), '(data_to_send)\n', (12061, 12075), False, 'import json\n'), ((2674, 2723), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (2687, 2723), False, 'import socket\n'), ((3209, 3223), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3221, 3223), False, 'import sys\n'), ((3485, 3499), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3497, 3499), False, 'import sys\n'), ((3801, 3850), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (3814, 3850), False, 'import socket\n'), ((4361, 4375), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4373, 4375), False, 'import sys\n'), ((4638, 4652), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4650, 4652), False, 'import sys\n'), ((5313, 5327), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (5325, 5327), False, 'import sys\n'), ((5589, 5603), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (5601, 5603), False, 'import sys\n'), ((8551, 8600), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (8564, 8600), False, 'import socket\n'), ((9086, 9100), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (9098, 9100), False, 'import sys\n'), ((9362, 9376), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (9374, 9376), False, 'import sys\n'), ((10266, 10280), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (10278, 10280), False, 'import sys\n'), ((10542, 10556), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (10554, 10556), False, 'import sys\n'), ((10852, 10901), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (10865, 10901), False, 'import socket\n'), ((11412, 11426), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (11424, 11426), False, 'import sys\n'), ((3293, 3308), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (3306, 3308), False, 'import sys\n'), ((3550, 3565), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (3563, 3565), False, 'import sys\n'), ((4445, 4460), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (4458, 4460), False, 'import sys\n'), ((4722, 4737), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (4735, 4737), False, 'import sys\n'), ((5397, 5412), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (5410, 5412), False, 'import sys\n'), ((5673, 5688), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (5686, 5688), False, 'import sys\n'), ((9170, 9185), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (9183, 9185), False, 'import sys\n'), ((9427, 9442), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (9440, 9442), False, 'import sys\n'), ((10350, 10365), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (10363, 10365), False, 'import sys\n'), ((10607, 10622), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (10620, 10622), False, 'import sys\n'), ((11496, 11511), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (11509, 11511), False, 'import sys\n')] |
from django.shortcuts import render
# Create your views here.
def home(request):
return render(request, 'webpages/home.html')
def about(request):
return render(request, 'webpages/about.html')
def services(request):
return render(request, 'webpages/services.html')
def contact(request):
return render(request, 'webpages/contact.html')
| [
"django.shortcuts.render"
] | [((95, 132), 'django.shortcuts.render', 'render', (['request', '"""webpages/home.html"""'], {}), "(request, 'webpages/home.html')\n", (101, 132), False, 'from django.shortcuts import render\n'), ((166, 204), 'django.shortcuts.render', 'render', (['request', '"""webpages/about.html"""'], {}), "(request, 'webpages/about.html')\n", (172, 204), False, 'from django.shortcuts import render\n'), ((241, 282), 'django.shortcuts.render', 'render', (['request', '"""webpages/services.html"""'], {}), "(request, 'webpages/services.html')\n", (247, 282), False, 'from django.shortcuts import render\n'), ((318, 358), 'django.shortcuts.render', 'render', (['request', '"""webpages/contact.html"""'], {}), "(request, 'webpages/contact.html')\n", (324, 358), False, 'from django.shortcuts import render\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import rospy
from rospy import Publisher, Service
from gs_interfaces.srv import Live, LiveResponse
from gs_interfaces.srv import Log,LogResponse
from gs_interfaces.srv import Led,LedResponse
from gs_interfaces.srv import Event,EventResponse
from gs_interfaces.srv import Time, TimeResponse
from gs_interfaces.srv import Info, InfoResponse
from gs_interfaces.srv import NavigationSystem,NavigationSystemResponse
from gs_interfaces.srv import SetNavigationSystem,SetNavigationSystemResponse
from gs_interfaces.srv import Position, PositionResponse
from gs_interfaces.srv import PositionGPS, PositionGPSResponse
from gs_interfaces.srv import Yaw, YawResponse
from gs_interfaces.srv import ParametersList, ParametersListResponse
from gs_interfaces.srv import SetParametersList, SetParametersListResponse
from gs_interfaces.msg import SimpleBatteryState, OptVelocity, PointGPS, SatellitesGPS, Orientation
from std_msgs.msg import Float32, Int32, String, Int8
from geometry_msgs.msg import Point
from std_srvs.srv import Empty, EmptyResponse
from std_srvs.srv import SetBool, SetBoolResponse
from threading import Thread
from math import degrees, sqrt
from time import sleep
TIME_FOR_RESTART = 5 # приблизительное время необходимое для перезапуска платы
class ROSSimNode(): # класс ноды ros_plaz_node
def __init__(self, rate = None, start_x = 0.0, start_y = 0.0, start_z = 0.0):
self.live = False # состояние подключение к базовой платы АП
self.event_messages = (10, 12, 23, 2) # доступные события(команды) АП
self.callback_event_messages = (255, 26, 31, 32, 42, 43, 51, 56, 65) # события, возвращаемые АП
self.state_event = -1 # последнеее событие, отправленное в АП
self.state_callback_event = -1 # полседнее событие пришедшее от АП
self.state_position = [0., 0., 0., 0.] # последняя точка, на которую был отправлен коптер (в локальных координатах)
self.rate = rate # таймер
self.log = []
self.preflight_state = False
self.takeoff_state = False
self.__start_x = start_x
self.__start_y = start_y
self.__start_z = start_z
self.x = start_x
self.y = start_y
self.z = start_z
self.yaw = 90.0
self.logger = Service("geoscan/get_log", Log, self.handle_log)
self.alive = Service("geoscan/alive", Live, self.handle_live) # сервис, показывающий состояние подключения
self.info_service = Service("geoscan/board/get_info", Info, self.handle_info)
self.time_service = Service("geoscan/board/get_time", Time, self.handle_time)
self.uptime_service = Service("geoscan/board/get_uptime", Time, self.handle_uptime)
self.flight_time_service = Service("geoscan/board/get_flight_time", Time, self.handle_flight_time)
self.get_autopilot_params_service = Service("geoscan/board/get_parameters", ParametersList, self.handle_get_autopilot_params) # сервис, возвращающий параметры АП
self.set_autopilot_params_service = Service("geoscan/board/set_parameters", SetParametersList, self.handle_set_autopilot_params) # сервис, устанавливающий параметры АП
self.restart_service = Service("geoscan/board/restart", Empty, self.handle_restart) # сервиc перезапуска базововй платы
self.get_navigation_service = Service("geoscan/navigation/get_system", NavigationSystem, self.handle_get_navigation_system) # сервис, возвращающий текущую систему позиционирования
self.set_navigation_service = Service("geoscan/navigation/set_system", SetNavigationSystem, self.handle_set_navigation_system) # сервис, устанавливающий текущую систему позиционирования
self.local_position_service = Service("geoscan/flight/set_local_position", Position, self.handle_local_pos) # сервис полета в локальную точку
self.global_position_service = Service("geoscan/flight/set_global_position",PositionGPS, self.handle_gps_pos)
self.yaw_service = Service("geoscan/flight/set_yaw", Yaw, self.handle_yaw) # сервис управления рысканьем
self.event_service = Service("geoscan/flight/set_event", Event, self.handle_event) # севрис управления событиями АП
self.module_led_service = Service("geoscan/led/module/set", Led, self.handle_led) # сервис управления светодиодами на LED-модуле
self.board_led_service = Service("geoscan/led/board/set", Led, self.handle_led)
self.logger_publisher = Publisher("geoscan/log", String, queue_size=10)
self.battery_publisher = Publisher("geoscan/battery_state", SimpleBatteryState, queue_size=10) # издатель темы состояния АКБ
self.local_position_publisher = Publisher("geoscan/navigation/local/position", Point, queue_size=10) # издатель темы позиции в LPS
self.local_yaw_publisher = Publisher("geoscan/navigation/local/yaw", Float32, queue_size=10) # издаетель темы рысканья в LPS
self.local_velocity_publisher = Publisher("geoscan/navigation/local/velocity", Point, queue_size=10)
self.global_position_publisher = Publisher("geoscan/navigation/global/position", PointGPS, queue_size=10)
self.global_status_publisher = Publisher("geoscan/navigation/global/status", Int8, queue_size=10)
self.satellites_publisher = Publisher("geoscan/navigation/satellites", SatellitesGPS, queue_size=10)
self.opt_velocity_publisher = Publisher("geoscan/navigation/opt/velocity", OptVelocity, queue_size=10) # издатель темы ускорения в OPT
self.callback_event_publisher = Publisher("geoscan/flight/callback_event", Int32, queue_size=10) # издатель темы событий, возвращаемых АП
self.gyro_publisher = Publisher("geoscan/sensors/gyro", Point, queue_size=10)
self.accel_publisher = Publisher("geoscan/sensors/accel", Point, queue_size=10)
self.orientation_publisher = Publisher("geoscan/sensors/orientation", Orientation, queue_size=10)
self.altitude_publisher = Publisher("geoscan/sensors/altitude", Float32, queue_size=10)
self.mag_publisher = Publisher("geoscan/sensors/mag", Point, queue_size=10)
def __preflight(self):
sleep(0.5)
self.preflight_state = True
self.callback_event_publisher.publish(7)
def __takeoff(self):
if self.preflight_state and not self.takeoff_state:
for _ in range(0, 200):
self.z += 0.01
sleep(0.05)
self.takeoff_state = True
self.callback_event_publisher.publish(6)
def __landing(self, time=0.1):
if self.takeoff_state:
for _ in range(int(self.z * 100),0,-1):
self.z -= 0.01
sleep(time)
self.callback_event_publisher.publish(1)
else:
self.callback_event_publisher.publish(0)
self.takeoff_state = False
self.preflight_state = False
def __go_to_point(self, x, y, z):
delta_x = x - self.x
delta_y = y - self.y
delta_z = z - self.z
l = sqrt(delta_x**2 + delta_y**2 + delta_z**2)
for _ in range(0,int(l*100) - 1):
self.x += delta_x / l * 0.01
self.y += delta_y / l * 0.01
self.z += delta_z / l * 0.01
sleep(0.03)
self.callback_event_publisher.publish(5)
self.x += x - self.x
self.y += y - self.y
self.z += z - self.z
self.callback_event_publisher.publish(4)
def __update_yaw(self, angle):
if self.takeoff_state:
old_angle = int(self.yaw)
pri = 1
if angle < 0.0:
pri = -1
for new_angle in range(old_angle, old_angle + int(angle / 2), pri):
self.yaw = new_angle
sleep(0.03)
def __get_time(self):
return rospy.Time().now().to_sec()
def handle_restart(self, request): # функция обработки запроса на перезагрузку
return EmptyResponse() # возвращаем пустой ответ
def handle_log(self, request):
return LogResponse(self.log)
def handle_live(self, request):
return LiveResponse(self.live)
def handle_info(self, request):
return InfoResponse()
def handle_time(self, request):
return TimeResponse(self.__get_time())
def handle_uptime(self, reuest):
return TimeResponse(self.__get_time())
def handle_flight_time(self, request):
return TimeResponse(0.)
def handle_event(self, request): # функция обработки запроса на отправление события в АП
if self.state_event != request.event:
if request.event == 0:
Thread(target=self.__preflight).start()
elif request.event == 1:
Thread(target=self.__takeoff).start()
elif request.event == 2:
Thread(target=self.__landing).start()
elif request.event == 3:
self.x = self.__start_x
self.y = self.__start_y
self.z = self.__start_z
self.takeoff_state = False
self.preflight_state = False
self.state_event = request.event
return EventResponse(1)
def handle_local_pos(self, request): # функция обработки запроса на полет в локальную точку
request_position = [request.position.x, request.position.y, request.position.z] # запоминаем координаты точки из запроса
if self.takeoff_state: # сравниваем координаты точки с предыдущими координатами
Thread(target=self.__go_to_point, args = [request_position[0], request_position[1], request_position[2]]).start()
self.state_position = request_position
return PositionResponse(True) # возвращаем True - команда выполнена
def handle_gps_pos(self, request):
return PositionGPSResponse(False)
def handle_yaw(self, request): # функция обработки запроса на изменение угла рысканья
Thread(target=self.__update_yaw, args=[degrees(request.angle), ]).start()
return YawResponse(True) # возвращаем True - команда выполнена
def handle_led(self, request): # функция обработки запроса на изменение цвета светодиодов на LED-модуле
return LedResponse(True) # возвращаем True - команда выполнена
def handle_get_navigation_system(self, request): # функция обработки запроса на получение текущей системы навигации
return NavigationSystemResponse("LPS") # возвращаем имя системы позиционирования
def handle_set_navigation_system(self, request):
return SetNavigationSystemResponse(True) # возвращаем True - команда выполнена
def handle_get_autopilot_params(self, request):
return ParametersListResponse([])
def handle_set_autopilot_params(self, request):
return SetParametersListResponse(True) # возвращаем True - команда выполнена
def connect(self):
rospy.loginfo("Try to connect ...")
self.state_event = -1
self.state_callback_event = -1
rospy.loginfo("Board start connect - done")
self.live = True
def data_exchange(self):
if self.live:
battery_state = SimpleBatteryState()
battery_state.header.stamp = rospy.Time.now()
battery_state.charge = 8.33
self.battery_publisher.publish(battery_state)
self.accel_publisher.publish(Point())
self.gyro_publisher.publish(Point())
self.orientation_publisher.publish(Orientation())
self.altitude_publisher.publish(self.z)
local_point = Point()
local_point.x = self.x
local_point.y = self.y
local_point.z = self.z
self.local_position_publisher.publish(local_point)
self.local_yaw_publisher.publish(self.yaw)
def spin(self):
if self.live:
self.data_exchange()
else:
self.connect()
if self.rate is not None:
self.rate.sleep()
return True
if __name__ == "__main__":
rospy.init_node("ros_plaz_node") # инициализируем ноду
x = rospy.get_param(rospy.search_param("start_x")) # получение имени порта, как параметра ноды
if type(x) == dict:
x = 0.0
y = rospy.get_param(rospy.search_param("start_y")) # получение скорости обмена данными, как параметра ноды
if type(y) == dict:
y = 0.0
z = rospy.get_param(rospy.search_param("start_z")) # получение скорости обмена данными, как параметра ноды
if type(z) == dict:
z = 0.0
rate = rospy.Rate(100)
ros_plaz_node = ROSSimNode(rate, x, y, z)
while not rospy.is_shutdown():
if not ros_plaz_node.spin():
break
| [
"rospy.init_node",
"gs_interfaces.srv.LiveResponse",
"math.sqrt",
"time.sleep",
"rospy.Rate",
"gs_interfaces.srv.TimeResponse",
"gs_interfaces.srv.ParametersListResponse",
"rospy.Service",
"gs_interfaces.msg.SimpleBatteryState",
"gs_interfaces.srv.EventResponse",
"gs_interfaces.msg.Orientation",
"std_srvs.srv.EmptyResponse",
"math.degrees",
"gs_interfaces.srv.LogResponse",
"gs_interfaces.srv.SetParametersListResponse",
"rospy.Time.now",
"rospy.search_param",
"geometry_msgs.msg.Point",
"gs_interfaces.srv.PositionResponse",
"rospy.Time",
"gs_interfaces.srv.LedResponse",
"gs_interfaces.srv.YawResponse",
"rospy.Publisher",
"gs_interfaces.srv.SetNavigationSystemResponse",
"rospy.loginfo",
"gs_interfaces.srv.PositionGPSResponse",
"gs_interfaces.srv.InfoResponse",
"rospy.is_shutdown",
"threading.Thread",
"gs_interfaces.srv.NavigationSystemResponse"
] | [((12076, 12108), 'rospy.init_node', 'rospy.init_node', (['"""ros_plaz_node"""'], {}), "('ros_plaz_node')\n", (12091, 12108), False, 'import rospy\n'), ((12586, 12601), 'rospy.Rate', 'rospy.Rate', (['(100)'], {}), '(100)\n', (12596, 12601), False, 'import rospy\n'), ((2301, 2349), 'rospy.Service', 'Service', (['"""geoscan/get_log"""', 'Log', 'self.handle_log'], {}), "('geoscan/get_log', Log, self.handle_log)\n", (2308, 2349), False, 'from rospy import Publisher, Service\n'), ((2372, 2420), 'rospy.Service', 'Service', (['"""geoscan/alive"""', 'Live', 'self.handle_live'], {}), "('geoscan/alive', Live, self.handle_live)\n", (2379, 2420), False, 'from rospy import Publisher, Service\n'), ((2495, 2552), 'rospy.Service', 'Service', (['"""geoscan/board/get_info"""', 'Info', 'self.handle_info'], {}), "('geoscan/board/get_info', Info, self.handle_info)\n", (2502, 2552), False, 'from rospy import Publisher, Service\n'), ((2581, 2638), 'rospy.Service', 'Service', (['"""geoscan/board/get_time"""', 'Time', 'self.handle_time'], {}), "('geoscan/board/get_time', Time, self.handle_time)\n", (2588, 2638), False, 'from rospy import Publisher, Service\n'), ((2669, 2730), 'rospy.Service', 'Service', (['"""geoscan/board/get_uptime"""', 'Time', 'self.handle_uptime'], {}), "('geoscan/board/get_uptime', Time, self.handle_uptime)\n", (2676, 2730), False, 'from rospy import Publisher, Service\n'), ((2766, 2837), 'rospy.Service', 'Service', (['"""geoscan/board/get_flight_time"""', 'Time', 'self.handle_flight_time'], {}), "('geoscan/board/get_flight_time', Time, self.handle_flight_time)\n", (2773, 2837), False, 'from rospy import Publisher, Service\n'), ((2882, 2976), 'rospy.Service', 'Service', (['"""geoscan/board/get_parameters"""', 'ParametersList', 'self.handle_get_autopilot_params'], {}), "('geoscan/board/get_parameters', ParametersList, self.\n handle_get_autopilot_params)\n", (2889, 2976), False, 'from rospy import Publisher, Service\n'), ((3052, 3149), 'rospy.Service', 'Service', (['"""geoscan/board/set_parameters"""', 'SetParametersList', 'self.handle_set_autopilot_params'], {}), "('geoscan/board/set_parameters', SetParametersList, self.\n handle_set_autopilot_params)\n", (3059, 3149), False, 'from rospy import Publisher, Service\n'), ((3215, 3275), 'rospy.Service', 'Service', (['"""geoscan/board/restart"""', 'Empty', 'self.handle_restart'], {}), "('geoscan/board/restart', Empty, self.handle_restart)\n", (3222, 3275), False, 'from rospy import Publisher, Service\n'), ((3359, 3457), 'rospy.Service', 'Service', (['"""geoscan/navigation/get_system"""', 'NavigationSystem', 'self.handle_get_navigation_system'], {}), "('geoscan/navigation/get_system', NavigationSystem, self.\n handle_get_navigation_system)\n", (3366, 3457), False, 'from rospy import Publisher, Service\n'), ((3547, 3648), 'rospy.Service', 'Service', (['"""geoscan/navigation/set_system"""', 'SetNavigationSystem', 'self.handle_set_navigation_system'], {}), "('geoscan/navigation/set_system', SetNavigationSystem, self.\n handle_set_navigation_system)\n", (3554, 3648), False, 'from rospy import Publisher, Service\n'), ((3742, 3819), 'rospy.Service', 'Service', (['"""geoscan/flight/set_local_position"""', 'Position', 'self.handle_local_pos'], {}), "('geoscan/flight/set_local_position', Position, self.handle_local_pos)\n", (3749, 3819), False, 'from rospy import Publisher, Service\n'), ((3893, 3972), 'rospy.Service', 'Service', (['"""geoscan/flight/set_global_position"""', 'PositionGPS', 'self.handle_gps_pos'], {}), "('geoscan/flight/set_global_position', PositionGPS, self.handle_gps_pos)\n", (3900, 3972), False, 'from rospy import Publisher, Service\n'), ((3999, 4054), 'rospy.Service', 'Service', (['"""geoscan/flight/set_yaw"""', 'Yaw', 'self.handle_yaw'], {}), "('geoscan/flight/set_yaw', Yaw, self.handle_yaw)\n", (4006, 4054), False, 'from rospy import Publisher, Service\n'), ((4114, 4175), 'rospy.Service', 'Service', (['"""geoscan/flight/set_event"""', 'Event', 'self.handle_event'], {}), "('geoscan/flight/set_event', Event, self.handle_event)\n", (4121, 4175), False, 'from rospy import Publisher, Service\n'), ((4244, 4299), 'rospy.Service', 'Service', (['"""geoscan/led/module/set"""', 'Led', 'self.handle_led'], {}), "('geoscan/led/module/set', Led, self.handle_led)\n", (4251, 4299), False, 'from rospy import Publisher, Service\n'), ((4380, 4434), 'rospy.Service', 'Service', (['"""geoscan/led/board/set"""', 'Led', 'self.handle_led'], {}), "('geoscan/led/board/set', Led, self.handle_led)\n", (4387, 4434), False, 'from rospy import Publisher, Service\n'), ((4468, 4515), 'rospy.Publisher', 'Publisher', (['"""geoscan/log"""', 'String'], {'queue_size': '(10)'}), "('geoscan/log', String, queue_size=10)\n", (4477, 4515), False, 'from rospy import Publisher, Service\n'), ((4550, 4619), 'rospy.Publisher', 'Publisher', (['"""geoscan/battery_state"""', 'SimpleBatteryState'], {'queue_size': '(10)'}), "('geoscan/battery_state', SimpleBatteryState, queue_size=10)\n", (4559, 4619), False, 'from rospy import Publisher, Service\n'), ((4691, 4759), 'rospy.Publisher', 'Publisher', (['"""geoscan/navigation/local/position"""', 'Point'], {'queue_size': '(10)'}), "('geoscan/navigation/local/position', Point, queue_size=10)\n", (4700, 4759), False, 'from rospy import Publisher, Service\n'), ((4825, 4890), 'rospy.Publisher', 'Publisher', (['"""geoscan/navigation/local/yaw"""', 'Float32'], {'queue_size': '(10)'}), "('geoscan/navigation/local/yaw', Float32, queue_size=10)\n", (4834, 4890), False, 'from rospy import Publisher, Service\n'), ((4963, 5031), 'rospy.Publisher', 'Publisher', (['"""geoscan/navigation/local/velocity"""', 'Point'], {'queue_size': '(10)'}), "('geoscan/navigation/local/velocity', Point, queue_size=10)\n", (4972, 5031), False, 'from rospy import Publisher, Service\n'), ((5074, 5146), 'rospy.Publisher', 'Publisher', (['"""geoscan/navigation/global/position"""', 'PointGPS'], {'queue_size': '(10)'}), "('geoscan/navigation/global/position', PointGPS, queue_size=10)\n", (5083, 5146), False, 'from rospy import Publisher, Service\n'), ((5186, 5252), 'rospy.Publisher', 'Publisher', (['"""geoscan/navigation/global/status"""', 'Int8'], {'queue_size': '(10)'}), "('geoscan/navigation/global/status', Int8, queue_size=10)\n", (5195, 5252), False, 'from rospy import Publisher, Service\n'), ((5289, 5361), 'rospy.Publisher', 'Publisher', (['"""geoscan/navigation/satellites"""', 'SatellitesGPS'], {'queue_size': '(10)'}), "('geoscan/navigation/satellites', SatellitesGPS, queue_size=10)\n", (5298, 5361), False, 'from rospy import Publisher, Service\n'), ((5401, 5473), 'rospy.Publisher', 'Publisher', (['"""geoscan/navigation/opt/velocity"""', 'OptVelocity'], {'queue_size': '(10)'}), "('geoscan/navigation/opt/velocity', OptVelocity, queue_size=10)\n", (5410, 5473), False, 'from rospy import Publisher, Service\n'), ((5547, 5611), 'rospy.Publisher', 'Publisher', (['"""geoscan/flight/callback_event"""', 'Int32'], {'queue_size': '(10)'}), "('geoscan/flight/callback_event', Int32, queue_size=10)\n", (5556, 5611), False, 'from rospy import Publisher, Service\n'), ((5684, 5739), 'rospy.Publisher', 'Publisher', (['"""geoscan/sensors/gyro"""', 'Point'], {'queue_size': '(10)'}), "('geoscan/sensors/gyro', Point, queue_size=10)\n", (5693, 5739), False, 'from rospy import Publisher, Service\n'), ((5771, 5827), 'rospy.Publisher', 'Publisher', (['"""geoscan/sensors/accel"""', 'Point'], {'queue_size': '(10)'}), "('geoscan/sensors/accel', Point, queue_size=10)\n", (5780, 5827), False, 'from rospy import Publisher, Service\n'), ((5865, 5933), 'rospy.Publisher', 'Publisher', (['"""geoscan/sensors/orientation"""', 'Orientation'], {'queue_size': '(10)'}), "('geoscan/sensors/orientation', Orientation, queue_size=10)\n", (5874, 5933), False, 'from rospy import Publisher, Service\n'), ((5968, 6029), 'rospy.Publisher', 'Publisher', (['"""geoscan/sensors/altitude"""', 'Float32'], {'queue_size': '(10)'}), "('geoscan/sensors/altitude', Float32, queue_size=10)\n", (5977, 6029), False, 'from rospy import Publisher, Service\n'), ((6059, 6113), 'rospy.Publisher', 'Publisher', (['"""geoscan/sensors/mag"""', 'Point'], {'queue_size': '(10)'}), "('geoscan/sensors/mag', Point, queue_size=10)\n", (6068, 6113), False, 'from rospy import Publisher, Service\n'), ((6150, 6160), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (6155, 6160), False, 'from time import sleep\n'), ((7031, 7079), 'math.sqrt', 'sqrt', (['(delta_x ** 2 + delta_y ** 2 + delta_z ** 2)'], {}), '(delta_x ** 2 + delta_y ** 2 + delta_z ** 2)\n', (7035, 7079), False, 'from math import degrees, sqrt\n'), ((7941, 7956), 'std_srvs.srv.EmptyResponse', 'EmptyResponse', ([], {}), '()\n', (7954, 7956), False, 'from std_srvs.srv import Empty, EmptyResponse\n'), ((8034, 8055), 'gs_interfaces.srv.LogResponse', 'LogResponse', (['self.log'], {}), '(self.log)\n', (8045, 8055), False, 'from gs_interfaces.srv import Log, LogResponse\n'), ((8109, 8132), 'gs_interfaces.srv.LiveResponse', 'LiveResponse', (['self.live'], {}), '(self.live)\n', (8121, 8132), False, 'from gs_interfaces.srv import Live, LiveResponse\n'), ((8185, 8199), 'gs_interfaces.srv.InfoResponse', 'InfoResponse', ([], {}), '()\n', (8197, 8199), False, 'from gs_interfaces.srv import Info, InfoResponse\n'), ((8432, 8449), 'gs_interfaces.srv.TimeResponse', 'TimeResponse', (['(0.0)'], {}), '(0.0)\n', (8444, 8449), False, 'from gs_interfaces.srv import Time, TimeResponse\n'), ((9219, 9235), 'gs_interfaces.srv.EventResponse', 'EventResponse', (['(1)'], {}), '(1)\n', (9232, 9235), False, 'from gs_interfaces.srv import Event, EventResponse\n'), ((9742, 9764), 'gs_interfaces.srv.PositionResponse', 'PositionResponse', (['(True)'], {}), '(True)\n', (9758, 9764), False, 'from gs_interfaces.srv import Position, PositionResponse\n'), ((9858, 9884), 'gs_interfaces.srv.PositionGPSResponse', 'PositionGPSResponse', (['(False)'], {}), '(False)\n', (9877, 9884), False, 'from gs_interfaces.srv import PositionGPS, PositionGPSResponse\n'), ((10073, 10090), 'gs_interfaces.srv.YawResponse', 'YawResponse', (['(True)'], {}), '(True)\n', (10084, 10090), False, 'from gs_interfaces.srv import Yaw, YawResponse\n'), ((10253, 10270), 'gs_interfaces.srv.LedResponse', 'LedResponse', (['(True)'], {}), '(True)\n', (10264, 10270), False, 'from gs_interfaces.srv import Led, LedResponse\n'), ((10445, 10476), 'gs_interfaces.srv.NavigationSystemResponse', 'NavigationSystemResponse', (['"""LPS"""'], {}), "('LPS')\n", (10469, 10476), False, 'from gs_interfaces.srv import NavigationSystem, NavigationSystemResponse\n'), ((10592, 10625), 'gs_interfaces.srv.SetNavigationSystemResponse', 'SetNavigationSystemResponse', (['(True)'], {}), '(True)\n', (10619, 10625), False, 'from gs_interfaces.srv import SetNavigationSystem, SetNavigationSystemResponse\n'), ((10732, 10758), 'gs_interfaces.srv.ParametersListResponse', 'ParametersListResponse', (['[]'], {}), '([])\n', (10754, 10758), False, 'from gs_interfaces.srv import ParametersList, ParametersListResponse\n'), ((10827, 10858), 'gs_interfaces.srv.SetParametersListResponse', 'SetParametersListResponse', (['(True)'], {}), '(True)\n', (10852, 10858), False, 'from gs_interfaces.srv import SetParametersList, SetParametersListResponse\n'), ((10929, 10964), 'rospy.loginfo', 'rospy.loginfo', (['"""Try to connect ..."""'], {}), "('Try to connect ...')\n", (10942, 10964), False, 'import rospy\n'), ((11044, 11087), 'rospy.loginfo', 'rospy.loginfo', (['"""Board start connect - done"""'], {}), "('Board start connect - done')\n", (11057, 11087), False, 'import rospy\n'), ((12155, 12184), 'rospy.search_param', 'rospy.search_param', (['"""start_x"""'], {}), "('start_x')\n", (12173, 12184), False, 'import rospy\n'), ((12295, 12324), 'rospy.search_param', 'rospy.search_param', (['"""start_y"""'], {}), "('start_y')\n", (12313, 12324), False, 'import rospy\n'), ((12447, 12476), 'rospy.search_param', 'rospy.search_param', (['"""start_z"""'], {}), "('start_z')\n", (12465, 12476), False, 'import rospy\n'), ((12663, 12682), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (12680, 12682), False, 'import rospy\n'), ((7251, 7262), 'time.sleep', 'sleep', (['(0.03)'], {}), '(0.03)\n', (7256, 7262), False, 'from time import sleep\n'), ((11193, 11213), 'gs_interfaces.msg.SimpleBatteryState', 'SimpleBatteryState', ([], {}), '()\n', (11211, 11213), False, 'from gs_interfaces.msg import SimpleBatteryState, OptVelocity, PointGPS, SatellitesGPS, Orientation\n'), ((11255, 11271), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (11269, 11271), False, 'import rospy\n'), ((11611, 11618), 'geometry_msgs.msg.Point', 'Point', ([], {}), '()\n', (11616, 11618), False, 'from geometry_msgs.msg import Point\n'), ((6415, 6426), 'time.sleep', 'sleep', (['(0.05)'], {}), '(0.05)\n', (6420, 6426), False, 'from time import sleep\n'), ((6689, 6700), 'time.sleep', 'sleep', (['time'], {}), '(time)\n', (6694, 6700), False, 'from time import sleep\n'), ((7760, 7771), 'time.sleep', 'sleep', (['(0.03)'], {}), '(0.03)\n', (7765, 7771), False, 'from time import sleep\n'), ((11412, 11419), 'geometry_msgs.msg.Point', 'Point', ([], {}), '()\n', (11417, 11419), False, 'from geometry_msgs.msg import Point\n'), ((11461, 11468), 'geometry_msgs.msg.Point', 'Point', ([], {}), '()\n', (11466, 11468), False, 'from geometry_msgs.msg import Point\n'), ((11517, 11530), 'gs_interfaces.msg.Orientation', 'Orientation', ([], {}), '()\n', (11528, 11530), False, 'from gs_interfaces.msg import SimpleBatteryState, OptVelocity, PointGPS, SatellitesGPS, Orientation\n'), ((9562, 9669), 'threading.Thread', 'Thread', ([], {'target': 'self.__go_to_point', 'args': '[request_position[0], request_position[1], request_position[2]]'}), '(target=self.__go_to_point, args=[request_position[0],\n request_position[1], request_position[2]])\n', (9568, 9669), False, 'from threading import Thread\n'), ((7814, 7826), 'rospy.Time', 'rospy.Time', ([], {}), '()\n', (7824, 7826), False, 'import rospy\n'), ((8648, 8679), 'threading.Thread', 'Thread', ([], {'target': 'self.__preflight'}), '(target=self.__preflight)\n', (8654, 8679), False, 'from threading import Thread\n'), ((8749, 8778), 'threading.Thread', 'Thread', ([], {'target': 'self.__takeoff'}), '(target=self.__takeoff)\n', (8755, 8778), False, 'from threading import Thread\n'), ((10023, 10045), 'math.degrees', 'degrees', (['request.angle'], {}), '(request.angle)\n', (10030, 10045), False, 'from math import degrees, sqrt\n'), ((8848, 8877), 'threading.Thread', 'Thread', ([], {'target': 'self.__landing'}), '(target=self.__landing)\n', (8854, 8877), False, 'from threading import Thread\n')] |
from __future__ import print_function
import argparse
import logging
import sys
from simplesat.dependency_solver import DependencySolver
from simplesat.pool import Pool
from simplesat.sat.policy import InstalledFirstPolicy
from simplesat.test_utils import Scenario
from simplesat.errors import SatisfiabilityError
def solve_and_print(request, remote_repositories, installed_repository,
print_ids, prune=True, prefer_installed=True, debug=0,
simple=False, strict=False):
pool = Pool(remote_repositories)
pool.add_repository(installed_repository)
policy = InstalledFirstPolicy(pool, installed_repository,
prefer_installed=prefer_installed)
solver = DependencySolver(
pool, remote_repositories, installed_repository,
policy=policy, use_pruning=prune, strict=strict)
fmt = "ELAPSED : {description:20} : {elapsed:e}"
try:
transaction = solver.solve(request)
if simple:
print(transaction.to_simple_string())
else:
print(transaction)
except SatisfiabilityError as e:
msg = "UNSATISFIABLE: {}"
print(msg.format(e.unsat.to_string(pool)))
print(e.unsat._find_requirement_time.pretty(fmt), file=sys.stderr)
if debug:
counts, hist = solver._policy._log_histogram()
print(hist, file=sys.stderr)
report = solver._policy._log_report(with_assignments=debug > 1)
print(report, file=sys.stderr)
print(solver._last_rules_time.pretty(fmt), file=sys.stderr)
print(solver._last_solver_init_time.pretty(fmt), file=sys.stderr)
print(solver._last_solve_time.pretty(fmt), file=sys.stderr)
def main(argv=None):
argv = argv or sys.argv[1:]
p = argparse.ArgumentParser()
p.add_argument("scenario", help="Path to the YAML scenario file.")
p.add_argument("--print-ids", action="store_true")
p.add_argument("--no-prune", dest="prune", action="store_false")
p.add_argument("--no-prefer-installed", dest="prefer_installed",
action="store_false")
p.add_argument("-d", "--debug", default=0, action="count")
p.add_argument("--simple", action="store_true",
help="Show a simpler description of the transaction.")
p.add_argument("--strict", action="store_true",
help="Use stricter error checking for package metadata.")
ns = p.parse_args(argv)
logging.basicConfig(
format=('%(asctime)s %(levelname)-8.8s [%(name)s:%(lineno)s]'
' %(message)s'),
datefmt='%Y-%m-%d %H:%M:%S',
level=('INFO', 'WARNING', 'DEBUG')[ns.debug])
scenario = Scenario.from_yaml(ns.scenario)
solve_and_print(scenario.request, scenario.remote_repositories,
scenario.installed_repository, ns.print_ids,
prune=ns.prune, prefer_installed=ns.prefer_installed,
debug=ns.debug, simple=ns.simple, strict=ns.strict)
if __name__ == '__main__':
main()
| [
"logging.basicConfig",
"argparse.ArgumentParser",
"simplesat.sat.policy.InstalledFirstPolicy",
"simplesat.dependency_solver.DependencySolver",
"simplesat.test_utils.Scenario.from_yaml",
"simplesat.pool.Pool"
] | [((525, 550), 'simplesat.pool.Pool', 'Pool', (['remote_repositories'], {}), '(remote_repositories)\n', (529, 550), False, 'from simplesat.pool import Pool\n'), ((611, 699), 'simplesat.sat.policy.InstalledFirstPolicy', 'InstalledFirstPolicy', (['pool', 'installed_repository'], {'prefer_installed': 'prefer_installed'}), '(pool, installed_repository, prefer_installed=\n prefer_installed)\n', (631, 699), False, 'from simplesat.sat.policy import InstalledFirstPolicy\n'), ((742, 861), 'simplesat.dependency_solver.DependencySolver', 'DependencySolver', (['pool', 'remote_repositories', 'installed_repository'], {'policy': 'policy', 'use_pruning': 'prune', 'strict': 'strict'}), '(pool, remote_repositories, installed_repository, policy=\n policy, use_pruning=prune, strict=strict)\n', (758, 861), False, 'from simplesat.dependency_solver import DependencySolver\n'), ((1772, 1797), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1795, 1797), False, 'import argparse\n'), ((2455, 2632), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s %(levelname)-8.8s [%(name)s:%(lineno)s] %(message)s"""', 'datefmt': '"""%Y-%m-%d %H:%M:%S"""', 'level': "('INFO', 'WARNING', 'DEBUG')[ns.debug]"}), "(format=\n '%(asctime)s %(levelname)-8.8s [%(name)s:%(lineno)s] %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S', level=('INFO', 'WARNING', 'DEBUG')[ns.debug])\n", (2474, 2632), False, 'import logging\n'), ((2686, 2717), 'simplesat.test_utils.Scenario.from_yaml', 'Scenario.from_yaml', (['ns.scenario'], {}), '(ns.scenario)\n', (2704, 2717), False, 'from simplesat.test_utils import Scenario\n')] |
'''
Docstring
'''
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://postgres:[email protected]:5432/example'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
class Person(db.Model):
__tablename__ = 'persons'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(), nullable=False)
def __repr__(self):
return f'<Person ID: {self.id}, name: {self.name}>'
db.create_all()
@app.route('/')
def index():
person = Person.query.first()
return 'Hello ' + person.name
| [
"flask_sqlalchemy.SQLAlchemy",
"flask.Flask"
] | [((90, 105), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (95, 105), False, 'from flask import Flask\n'), ((260, 275), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (270, 275), False, 'from flask_sqlalchemy import SQLAlchemy\n')] |
'''
Author: <NAME>
Project: Jisho grabber
Description: Main script for taking in user input via the GUI and outputting
an Anki deck and json with all word information.
'''
import sys
import json
import os
import wx
import genanki
from search_gui import SearchFrame
from ankify import jisho_vocab, jisho_deck
if __name__ == '__main__':
# get paths for file dumping and make sure they exist
dumpsdir = os.path.join(os.getcwd(), 'vocab_dumps')
if not os.path.isdir(dumpsdir):
sys.exit("Error: vocab_dumps folder is missing or you are in the wrong" +\
"directory. Please try again.")
jsonpath = os.path.join(dumpsdir, 'vocab_words.json')
ankipath = os.path.join(dumpsdir, 'jisho_search_deck.apkg')
# create json if none exists
if not os.path.exists(jsonpath):
with open(jsonpath, 'w') as outfile:
json.dump([], outfile, indent=2)
# load existing vocab words
with open(jsonpath, 'r') as infile:
print("Loading json...")
all_vocab = json.load(infile)
print("Finished loading json.")
# run gui
app = wx.App()
frame = SearchFrame()
app.MainLoop()
# add new vocab to existing vocab and export as an Anki deck
all_vocab.extend(frame.fields_list)
fields_len = len(jisho_vocab.fields)
if len(all_vocab) > 0:
field_pad = fields_len - len(all_vocab[0])
else:
sys.exit("No vocab was added and no vocab existed in json. Exiting program.")
if field_pad < 0:
print("Warning: There are fewer note fields than there are fields for each vocab word.")
elif field_pad > 0:
print("Warning: There are more note fields than there are fields for each vocab word.")
for note_fields in all_vocab:
note_fields.extend([''] * field_pad)
new_note = genanki.Note(
model=jisho_vocab,
fields=note_fields[:fields_len]
)
jisho_deck.add_note(new_note)
if len(all_vocab) > 0:
genanki.Package(jisho_deck).write_to_file(ankipath)
with open(jsonpath, 'w') as outfile:
print("Dumping words into json...")
json.dump(all_vocab, outfile, indent=2)
print("Json has taken a dump. Or been dumped or whatever") | [
"os.path.exists",
"genanki.Note",
"ankify.jisho_deck.add_note",
"genanki.Package",
"os.path.join",
"os.getcwd",
"search_gui.SearchFrame",
"os.path.isdir",
"sys.exit",
"json.load",
"wx.App",
"json.dump"
] | [((694, 736), 'os.path.join', 'os.path.join', (['dumpsdir', '"""vocab_words.json"""'], {}), "(dumpsdir, 'vocab_words.json')\n", (706, 736), False, 'import os\n'), ((753, 801), 'os.path.join', 'os.path.join', (['dumpsdir', '"""jisho_search_deck.apkg"""'], {}), "(dumpsdir, 'jisho_search_deck.apkg')\n", (765, 801), False, 'import os\n'), ((1186, 1194), 'wx.App', 'wx.App', ([], {}), '()\n', (1192, 1194), False, 'import wx\n'), ((1208, 1221), 'search_gui.SearchFrame', 'SearchFrame', ([], {}), '()\n', (1219, 1221), False, 'from search_gui import SearchFrame\n'), ((478, 489), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (487, 489), False, 'import os\n'), ((518, 541), 'os.path.isdir', 'os.path.isdir', (['dumpsdir'], {}), '(dumpsdir)\n', (531, 541), False, 'import os\n'), ((552, 662), 'sys.exit', 'sys.exit', (["('Error: vocab_dumps folder is missing or you are in the wrong' +\n 'directory. Please try again.')"], {}), "('Error: vocab_dumps folder is missing or you are in the wrong' +\n 'directory. Please try again.')\n", (560, 662), False, 'import sys\n'), ((850, 874), 'os.path.exists', 'os.path.exists', (['jsonpath'], {}), '(jsonpath)\n', (864, 874), False, 'import os\n'), ((1099, 1116), 'json.load', 'json.load', (['infile'], {}), '(infile)\n', (1108, 1116), False, 'import json\n'), ((1493, 1571), 'sys.exit', 'sys.exit', (['"""No vocab was added and no vocab existed in json. Exiting program."""'], {}), "('No vocab was added and no vocab existed in json. Exiting program.')\n", (1501, 1571), False, 'import sys\n'), ((1916, 1980), 'genanki.Note', 'genanki.Note', ([], {'model': 'jisho_vocab', 'fields': 'note_fields[:fields_len]'}), '(model=jisho_vocab, fields=note_fields[:fields_len])\n', (1928, 1980), False, 'import genanki\n'), ((2027, 2056), 'ankify.jisho_deck.add_note', 'jisho_deck.add_note', (['new_note'], {}), '(new_note)\n', (2046, 2056), False, 'from ankify import jisho_vocab, jisho_deck\n'), ((2244, 2283), 'json.dump', 'json.dump', (['all_vocab', 'outfile'], {'indent': '(2)'}), '(all_vocab, outfile, indent=2)\n', (2253, 2283), False, 'import json\n'), ((935, 967), 'json.dump', 'json.dump', (['[]', 'outfile'], {'indent': '(2)'}), '([], outfile, indent=2)\n', (944, 967), False, 'import json\n'), ((2094, 2121), 'genanki.Package', 'genanki.Package', (['jisho_deck'], {}), '(jisho_deck)\n', (2109, 2121), False, 'import genanki\n')] |
"""
This module contains the class OffloadingCommon, which is the base class of all algorithms (benchmarks, cco and decor).
OffloadingCommon defines several points in a computation offloading problem.
[--
In order to avoid Multiple Inheritance, CcoAlgorithm only inherit from Racos. Similar methods and properties are
copied from OffloadingCommon, which are marked by annotations.
--]
Author:
<NAME>, <NAME>
"""
from cross_edge_offloading.utils.tool_function import ToolFunction
import numpy as np
import random
class OffloadingCommon(object):
"""
This class contains several points in a computation offloading problem, including:
(1) the objective function of the cross-edge computation offloading problem;
(2) the solution of the problem (edge_selection, harvested_energys).
"""
def __init__(self, parameter):
"""
Initialize key parameters in offloading problems of one time slot.
:param parameter: the instance of class Parameter
"""
self.__parameter = parameter
# =============================== state information ===============================
self.__battery_energy_levels = np.repeat(parameter.get_perturbation_para() / 2, parameter.get_user_num())
self.__virtual_energy_levels = self.__battery_energy_levels - \
np.repeat(parameter.get_perturbation_para(), parameter.get_user_num())
# =============================== independent variables ===============================
# edge_selections is a list with every element (edge_selection) being a numpy array,
# which is the feasible solution (independent var) of the problem $\mathcal{P}_2^{es}$
# 'self.edge_selections' stores the final optimal solution
self.__edge_selections = []
self.__harvested_energys = []
def obtain_time_consumption(self, division, edge_selection, channel_power_gains):
"""
Calculate the time consumption on transmission and edge execution for one mobile device.
:param division: the number of chosen edge sites (not zero)
:param edge_selection: the edge selection decision of one mobile devices
:param channel_power_gains: the channel power gains of one mobile devices to every connectable servers
:return: the time consumption on transmission and edge execution
"""
parameter = self.get_parameter()
transmit_times = ToolFunction.obtain_transmit_times(division, edge_selection, parameter, channel_power_gains)
edge_exe_times = ToolFunction.obtain_edge_exe_times(division, parameter)
edge_times = transmit_times + edge_exe_times
time_consumption = max(edge_times) + parameter.get_local_exe_time() + parameter.get_coordinate_cost() * division
return time_consumption
def obtain_overall_costs(self, edge_selections):
"""
Calculate the overall costs, which is the sum of cost of each mobile device.
:param edge_selections: the edge selection decisions for all mobile devices
:return: overall costs
"""
parameter = self.get_parameter()
overall_costs = 0
for i in range(parameter.get_user_num()):
if parameter.get_task_requests()[i] == 1:
division = int(sum(edge_selections[i]))
if division:
# cost = self.obtain_time_consumption(
# division, edge_selections[i], parameter.get_connectable_gains[i])
transmit_times = ToolFunction.obtain_transmit_times(division, edge_selections[i], parameter,
parameter.get_connectable_gains()[i])
edge_exe_times = ToolFunction.obtain_edge_exe_times(division, parameter)
edge_times = transmit_times + edge_exe_times
cost = max(edge_times) + parameter.get_local_exe_time() + parameter.get_coordinate_cost() * division
else:
cost = parameter.get_drop_penalty()
else:
cost = 0
overall_costs += cost
return overall_costs
def obtain_edge_selections(self):
"""
Obtain the feasible solution with random policy.
:return: edge_selections, every row denotes a mobile device who has task request
"""
parameter = self.get_parameter()
# first initialize with zero
edge_selections = []
for i in range(parameter.get_user_num()):
edge_selection = np.repeat(0, len(parameter.get_connectable_servers()[i]))
edge_selections.append(edge_selection)
# for every edge site, generate a random integer with [0, max_assign], and distribute connections to
# connectable mobile devices
for j in range(parameter.get_server_num()):
assign_num = random.randint(0, parameter.get_max_assign())
connectable_user_num = len(parameter.get_connectable_users()[j])
if assign_num >= connectable_user_num:
# every mobile device in it can be chosen
for i in range(connectable_user_num):
user_index = parameter.get_connectable_users()[j][i]
edge_index = list.index(parameter.get_connectable_servers()[user_index], j)
edge_selections[user_index][edge_index] = 1
else:
# randomly choose assign_num users to distribute j's computation capacity
user_indices = random.sample(parameter.get_connectable_users()[j], assign_num)
for i in range(len(user_indices)):
user_index = user_indices[i]
edge_index = list.index(parameter.get_connectable_servers()[user_index], j)
edge_selections[user_index][edge_index] = 1
# set those mobile devices who do not have task request to [0, 0, ..., 0]
# we can not delete them from the list because every row is the index of the corresponding mobile device
for i in range(parameter.get_user_num()):
if parameter.get_task_requests()[i] == 0:
edge_selections[i] = np.zeros(len(edge_selections[i]))
else:
division = int(sum(edge_selections[i]))
if division:
times = self.obtain_time_consumption(division, edge_selections[i],
parameter.get_connectable_gains()[i])
energys = ToolFunction.obtain_transmit_energy(division, edge_selections[i], parameter,
parameter.get_connectable_gains()[i])
# satisfy the constraint
if times >= parameter.get_ddl() or energys > self.__battery_energy_levels[i]:
edge_selections[i] = np.zeros(len(edge_selections[i]))
return edge_selections
def obtain_harvested_energys(self):
"""
Randomly choose energy between $[0, E_i^H]$ for every mobile device, and then set self.harvested_energys.
:return: no return
"""
parameter = self.get_parameter()
return list(map(random.uniform, [0] * parameter.get_user_num(), parameter.get_harvestable_energys()))
def update_energy_levels(self):
"""
Update the cost & virtual energy levels according to the involution expression \eqref{10}.
:return: no return
"""
parameter = self.get_parameter()
for i in range(parameter.get_user_num()):
division = int(sum(self.__edge_selections[i]))
if division:
self.__battery_energy_levels[i] = self.__battery_energy_levels[i] + \
self.__harvested_energys[i] - ToolFunction.obtain_transmit_energy(
division, self.__edge_selections[i], parameter, parameter.get_connectable_gains()[i]) - \
parameter.get_local_exe_energy()
else:
# check whether need to minus local_exe_energys
# if self.__battery_energy_levels[i] < parameter.get_local_exe_energy():
# self.__battery_energy_levels[i] = self.__battery_energy_levels[i] + self.__harvested_energys[i]
# else:
# self.__battery_energy_levels[i] = self.__battery_energy_levels[i] + \
# self.__harvested_energys[i] - parameter.get_local_exe_energy()
self.__battery_energy_levels[i] = self.__battery_energy_levels[i] + self.__harvested_energys[i]
self.__virtual_energy_levels[i] = self.__battery_energy_levels[i] - parameter.get_perturbation_para()
def get_parameter(self):
return self.__parameter
def get_battery_energy_levels(self):
return self.__battery_energy_levels
def get_virtual_energy_levels(self):
return self.__virtual_energy_levels
def get_harvested_energys(self):
return self.__harvested_energys
def set_harvested_energys(self, harvested_energys):
self.__harvested_energys = harvested_energys
def get_edge_selections(self):
return self.__edge_selections
def set_edge_selections(self, edge_selections):
self.__edge_selections = edge_selections
| [
"cross_edge_offloading.utils.tool_function.ToolFunction.obtain_transmit_times",
"cross_edge_offloading.utils.tool_function.ToolFunction.obtain_edge_exe_times"
] | [((2436, 2532), 'cross_edge_offloading.utils.tool_function.ToolFunction.obtain_transmit_times', 'ToolFunction.obtain_transmit_times', (['division', 'edge_selection', 'parameter', 'channel_power_gains'], {}), '(division, edge_selection, parameter,\n channel_power_gains)\n', (2470, 2532), False, 'from cross_edge_offloading.utils.tool_function import ToolFunction\n'), ((2554, 2609), 'cross_edge_offloading.utils.tool_function.ToolFunction.obtain_edge_exe_times', 'ToolFunction.obtain_edge_exe_times', (['division', 'parameter'], {}), '(division, parameter)\n', (2588, 2609), False, 'from cross_edge_offloading.utils.tool_function import ToolFunction\n'), ((3762, 3817), 'cross_edge_offloading.utils.tool_function.ToolFunction.obtain_edge_exe_times', 'ToolFunction.obtain_edge_exe_times', (['division', 'parameter'], {}), '(division, parameter)\n', (3796, 3817), False, 'from cross_edge_offloading.utils.tool_function import ToolFunction\n')] |
import os
import nox
@nox.session(venv_backend="none")
def ansible_lint(session):
session.run("ansible-lint")
@nox.session(venv_backend="none")
@nox.parametrize(
"playbook",
sorted(playbook.name for playbook in os.scandir("playbooks") if playbook.is_file()),
)
def ansible_syntax(session, playbook):
session.run("ansible-playbook", "--syntax-check", f"playbooks/{playbook}")
@nox.session(venv_backend="none")
def yamllint(session):
session.run("yamllint", ".")
| [
"os.scandir",
"nox.session"
] | [((25, 57), 'nox.session', 'nox.session', ([], {'venv_backend': '"""none"""'}), "(venv_backend='none')\n", (36, 57), False, 'import nox\n'), ((120, 152), 'nox.session', 'nox.session', ([], {'venv_backend': '"""none"""'}), "(venv_backend='none')\n", (131, 152), False, 'import nox\n'), ((399, 431), 'nox.session', 'nox.session', ([], {'venv_backend': '"""none"""'}), "(venv_backend='none')\n", (410, 431), False, 'import nox\n'), ((228, 251), 'os.scandir', 'os.scandir', (['"""playbooks"""'], {}), "('playbooks')\n", (238, 251), False, 'import os\n')] |
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import F
from impact.v1.helpers.criterion_helper import CriterionHelper
class JudgeCriterionHelper(CriterionHelper):
'''Helper for JudgeCriteria. JudgeCriteria specify a feature which
all applications are "looking for", for example "All startups should be
read by at least one female judge".
JudgeCriteria require one CriterionOptionSpec for each feature sought, for
example, "one female judge and one male judge" would require two
OptionSpecs. Weight and count can be set independently for each option.
'''
def option_for_field(self, field):
return field
def judge_matches_option(self, judge_data, option):
return option == judge_data.get(self.cache_key)
def analysis_fields(self):
return [
self.judge_field,
]
def analysis_annotate_fields(self):
return {
self.cache_key: F(self.judge_field),
}
| [
"django.db.models.F"
] | [((965, 984), 'django.db.models.F', 'F', (['self.judge_field'], {}), '(self.judge_field)\n', (966, 984), False, 'from django.db.models import F\n')] |
import json
import aiohttp
import asyncio
from datetime import datetime
from .const import (
API_URI,
API_VERSION
)
from .errors import InvalidPasswordOrEmail, UnexpectedResponse
class Api:
"""The class to handle communicating with the api"""
def __init__(self, auth, base_url=API_URI + "/v" + API_VERSION + "/gizwits/"):
"""
constructor
"""
self.auth = auth
self.base_url = base_url
async def send_command(self, command, query_string={}):
async with aiohttp.ClientSession() as session:
async with session.post(self.base_url+command, data=self.auth,params=query_string) as r:
if r.status == 200:
return await r.json()
if r.status == 403:
raise InvalidPasswordOrEmail((await r.json())["errors"])
r.raise_for_status()
raise UnexpectedResponse(await r.text())
| [
"aiohttp.ClientSession"
] | [((527, 550), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (548, 550), False, 'import aiohttp\n')] |
from requests import get as rget
from re import compile as reg_compile
def get_word_freq(inputs, init_data):
url = init_data.get("url")
regex = reg_compile('[^a-zA-Z:-]')
data = rget(url).text
word_dict = {}
for word in data.split():
try:
word.encode("ascii")
except UnicodeEncodeError:
continue
word = word.lower()
if word.endswith(',') or word.endswith('.'):
word = word[:-1]
if regex.search(word):
continue
if word in word_dict:
word_dict[word] += 1
else:
word_dict[word] = 1
for word in inputs:
freq = word_dict.get(word, 0)
print("{0} : {1}".format(word, freq))
| [
"requests.get",
"re.compile"
] | [((153, 179), 're.compile', 'reg_compile', (['"""[^a-zA-Z:-]"""'], {}), "('[^a-zA-Z:-]')\n", (164, 179), True, 'from re import compile as reg_compile\n'), ((191, 200), 'requests.get', 'rget', (['url'], {}), '(url)\n', (195, 200), True, 'from requests import get as rget\n')] |
import numpy as np
import tensorflow as tf
class MutualInformation(object):
"""
Compute mutual-information-based metrics.
Inspired by https://github.com/airlab-unibas/airlab/blob/master/airlab/loss/pairwise.py.
"""
def __init__(self, n_bins=64, sigma=3, **kwargs):
self.n_bins = n_bins
self.sigma = 2*sigma**2
self.kwargs = kwargs
self.eps = kwargs.pop('eps', 1e-10)
self.win = kwargs.pop('win', 7); assert self.win % 2 == 1 # window size for local metrics
self._normalizer_1d = np.sqrt(2.0 * np.pi) * sigma
self._normalizer_2d = 2.0 * np.pi * sigma ** 2
self.background_method = kwargs.pop('background_method', 'min')
if self.background_method is None:
self.background_value = kwargs.pop('background_value')
def _compute_marginal_entropy(self, values, bins):
"""
Compute the marginal entropy using Parzen window estimation.
:param values: a tensor of shape [n_batch, *vol_shape, channels]
:param bins: a tensor of shape [n_bins, 1]
:return: entropy - the marginal entropy;
p - the probability distribution
"""
p = tf.math.exp(-(tf.math.square(tf.reshape(tf.reduce_mean(values, axis=-1), [-1]) - bins) / self.sigma)) / self._normalizer_1d
p_norm = tf.reduce_mean(p, axis=1)
p_norm = p_norm / (tf.reduce_sum(p_norm) + self.eps)
entropy = - tf.reduce_sum(p_norm * tf.math.log(p_norm + self.eps))
return entropy, p
def mi(self, target, source):
"""
Compute mutual information: I(target, source) = H(target) + H(source) - H(target, source).
:param target:
:param source:
:return:
"""
target=target
source =source
if self.background_method == 'min':
background_fixed = tf.reduce_min(target)
background_moving = tf.reduce_min(source)
elif self.background_method == 'mean':
background_fixed = tf.reduce_mean(target)
background_moving = tf.reduce_mean(source)
elif self.background_method is None:
background_fixed = self.background_value
background_moving = self.background_value
else:
raise NotImplementedError
bins_target = tf.expand_dims(tf.linspace(background_fixed, tf.reduce_max(target), self.n_bins), axis=-1)
bins_source = tf.expand_dims(tf.linspace(background_moving, tf.reduce_max(source), self.n_bins), axis=-1)
# TODO: add masks
# Compute marginal entropy
entropy_target, p_t = self._compute_marginal_entropy(target, bins_target)
entropy_source, p_s = self._compute_marginal_entropy(source, bins_source)
# compute joint entropy
p_joint = tf.matmul(p_t, tf.transpose(p_s, perm=[1, 0])) / self._normalizer_2d
p_joint = p_joint / (tf.reduce_sum(p_joint) + self.eps)
entropy_joint = - tf.reduce_sum(p_joint * tf.math.log(p_joint + self.eps))
return -(entropy_target + entropy_source - entropy_joint)
def nmi(self, target, source):
"""
Compute normalized mutual information: NMI(target, source) = (H(target) + H(source)) / H(target, source).
:param target:
:param source:
:return:
"""
target=target
source =source
if self.background_method == 'min':
background_fixed = tf.reduce_min(target)
background_moving = tf.reduce_min(source)
elif self.background_method == 'mean':
background_fixed = tf.reduce_mean(target)
background_moving = tf.reduce_mean(source)
elif self.background_method is None:
background_fixed = self.background_value
background_moving = self.background_value
else:
raise NotImplementedError
bins_target = tf.expand_dims(tf.linspace(background_fixed, tf.reduce_max(target), self.n_bins), axis=-1)
bins_source = tf.expand_dims(tf.linspace(background_moving, tf.reduce_max(source), self.n_bins), axis=-1)
# TODO: add masks
# Compute marginal entropy
entropy_target, p_t = self._compute_marginal_entropy(target, bins_target)
entropy_source, p_s = self._compute_marginal_entropy(source, bins_source)
# compute joint entropy
p_joint = tf.matmul(p_t, tf.transpose(p_s, perm=[1, 0])) / self._normalizer_2d
p_joint = p_joint / (tf.reduce_sum(p_joint) + self.eps)
entropy_joint = - tf.reduce_sum(p_joint * tf.math.log(p_joint + self.eps))
return -(entropy_target + entropy_source) / (entropy_joint + self.eps)
def _normalize(self, data):
data -= tf.reduce_min(data)
data /= (tf.reduce_max(data) + self.eps)
return data
def ecc(self, target, source):
"""
Compute entropy correlation coefficient: ECC(target, source) = 2 - 2 / NMI(target, source).
:param target:
:param source:
:return:
"""
return 2 - 2 / (self.nmi(target, source) + self.eps)
def ce(self, target, source):
"""
Compute conditional entropy: H(target|source) = H(target, source) - H(source).
:param target:
:param source:
:return:
"""
if self.background_method == 'min':
background_fixed = tf.reduce_min(target)
background_moving = tf.reduce_min(source)
elif self.background_method == 'mean':
background_fixed = tf.reduce_mean(target)
background_moving = tf.reduce_mean(source)
elif self.background_method is None:
background_fixed = self.background_value
background_moving = self.background_value
else:
raise NotImplementedError
bins_target = tf.expand_dims(tf.linspace(background_fixed, tf.reduce_max(target), self.n_bins), axis=-1)
bins_source = tf.expand_dims(tf.linspace(background_moving, tf.reduce_max(source), self.n_bins), axis=-1)
# TODO: add masks
# Compute marginal entropy
entropy_target, p_t = self._compute_marginal_entropy(target, bins_target)
entropy_source, p_s = self._compute_marginal_entropy(source, bins_source)
# compute joint entropy
p_joint = tf.matmul(p_t, tf.transpose(p_s, perm=[1, 0])) / self._normalizer_2d
p_joint = p_joint / (tf.reduce_sum(p_joint) + self.eps)
entropy_joint = - tf.reduce_sum(p_joint * tf.math.log(p_joint + self.eps))
return entropy_joint - entropy_source | [
"tensorflow.reduce_min",
"numpy.sqrt",
"tensorflow.transpose",
"tensorflow.math.log",
"tensorflow.reduce_sum",
"tensorflow.reduce_max",
"tensorflow.reduce_mean"
] | [((1343, 1368), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['p'], {'axis': '(1)'}), '(p, axis=1)\n', (1357, 1368), True, 'import tensorflow as tf\n'), ((4749, 4768), 'tensorflow.reduce_min', 'tf.reduce_min', (['data'], {}), '(data)\n', (4762, 4768), True, 'import tensorflow as tf\n'), ((551, 571), 'numpy.sqrt', 'np.sqrt', (['(2.0 * np.pi)'], {}), '(2.0 * np.pi)\n', (558, 571), True, 'import numpy as np\n'), ((1874, 1895), 'tensorflow.reduce_min', 'tf.reduce_min', (['target'], {}), '(target)\n', (1887, 1895), True, 'import tensorflow as tf\n'), ((1928, 1949), 'tensorflow.reduce_min', 'tf.reduce_min', (['source'], {}), '(source)\n', (1941, 1949), True, 'import tensorflow as tf\n'), ((3457, 3478), 'tensorflow.reduce_min', 'tf.reduce_min', (['target'], {}), '(target)\n', (3470, 3478), True, 'import tensorflow as tf\n'), ((3511, 3532), 'tensorflow.reduce_min', 'tf.reduce_min', (['source'], {}), '(source)\n', (3524, 3532), True, 'import tensorflow as tf\n'), ((4786, 4805), 'tensorflow.reduce_max', 'tf.reduce_max', (['data'], {}), '(data)\n', (4799, 4805), True, 'import tensorflow as tf\n'), ((5408, 5429), 'tensorflow.reduce_min', 'tf.reduce_min', (['target'], {}), '(target)\n', (5421, 5429), True, 'import tensorflow as tf\n'), ((5462, 5483), 'tensorflow.reduce_min', 'tf.reduce_min', (['source'], {}), '(source)\n', (5475, 5483), True, 'import tensorflow as tf\n'), ((1396, 1417), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['p_norm'], {}), '(p_norm)\n', (1409, 1417), True, 'import tensorflow as tf\n'), ((2028, 2050), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['target'], {}), '(target)\n', (2042, 2050), True, 'import tensorflow as tf\n'), ((2083, 2105), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['source'], {}), '(source)\n', (2097, 2105), True, 'import tensorflow as tf\n'), ((2378, 2399), 'tensorflow.reduce_max', 'tf.reduce_max', (['target'], {}), '(target)\n', (2391, 2399), True, 'import tensorflow as tf\n'), ((2492, 2513), 'tensorflow.reduce_max', 'tf.reduce_max', (['source'], {}), '(source)\n', (2505, 2513), True, 'import tensorflow as tf\n'), ((2831, 2861), 'tensorflow.transpose', 'tf.transpose', (['p_s'], {'perm': '[1, 0]'}), '(p_s, perm=[1, 0])\n', (2843, 2861), True, 'import tensorflow as tf\n'), ((2914, 2936), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['p_joint'], {}), '(p_joint)\n', (2927, 2936), True, 'import tensorflow as tf\n'), ((3611, 3633), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['target'], {}), '(target)\n', (3625, 3633), True, 'import tensorflow as tf\n'), ((3666, 3688), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['source'], {}), '(source)\n', (3680, 3688), True, 'import tensorflow as tf\n'), ((3961, 3982), 'tensorflow.reduce_max', 'tf.reduce_max', (['target'], {}), '(target)\n', (3974, 3982), True, 'import tensorflow as tf\n'), ((4075, 4096), 'tensorflow.reduce_max', 'tf.reduce_max', (['source'], {}), '(source)\n', (4088, 4096), True, 'import tensorflow as tf\n'), ((4414, 4444), 'tensorflow.transpose', 'tf.transpose', (['p_s'], {'perm': '[1, 0]'}), '(p_s, perm=[1, 0])\n', (4426, 4444), True, 'import tensorflow as tf\n'), ((4497, 4519), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['p_joint'], {}), '(p_joint)\n', (4510, 4519), True, 'import tensorflow as tf\n'), ((5562, 5584), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['target'], {}), '(target)\n', (5576, 5584), True, 'import tensorflow as tf\n'), ((5617, 5639), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['source'], {}), '(source)\n', (5631, 5639), True, 'import tensorflow as tf\n'), ((5912, 5933), 'tensorflow.reduce_max', 'tf.reduce_max', (['target'], {}), '(target)\n', (5925, 5933), True, 'import tensorflow as tf\n'), ((6026, 6047), 'tensorflow.reduce_max', 'tf.reduce_max', (['source'], {}), '(source)\n', (6039, 6047), True, 'import tensorflow as tf\n'), ((6365, 6395), 'tensorflow.transpose', 'tf.transpose', (['p_s'], {'perm': '[1, 0]'}), '(p_s, perm=[1, 0])\n', (6377, 6395), True, 'import tensorflow as tf\n'), ((6448, 6470), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['p_joint'], {}), '(p_joint)\n', (6461, 6470), True, 'import tensorflow as tf\n'), ((1473, 1503), 'tensorflow.math.log', 'tf.math.log', (['(p_norm + self.eps)'], {}), '(p_norm + self.eps)\n', (1484, 1503), True, 'import tensorflow as tf\n'), ((3000, 3031), 'tensorflow.math.log', 'tf.math.log', (['(p_joint + self.eps)'], {}), '(p_joint + self.eps)\n', (3011, 3031), True, 'import tensorflow as tf\n'), ((4583, 4614), 'tensorflow.math.log', 'tf.math.log', (['(p_joint + self.eps)'], {}), '(p_joint + self.eps)\n', (4594, 4614), True, 'import tensorflow as tf\n'), ((6534, 6565), 'tensorflow.math.log', 'tf.math.log', (['(p_joint + self.eps)'], {}), '(p_joint + self.eps)\n', (6545, 6565), True, 'import tensorflow as tf\n'), ((1242, 1273), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['values'], {'axis': '(-1)'}), '(values, axis=-1)\n', (1256, 1273), True, 'import tensorflow as tf\n')] |
import copy
from delairstack.core.config import ConnectionConfig
from delairstack.core.errors import ConfigError
from delairstack.core.utils.utils import (new_instance, flatten_dict, find, dict_merge)
from tests.delairstacktest import DelairStackTestBase
d1 = {
'a': {
'b': {
'c': 'd'
}
}
}
d2 = {
'a': {
'b': {
'c': 'd',
'e': 'f'
}
}
}
d3 = {
'a': {
'b': {
'c': 'g'
}
}
}
class TestUtils(DelairStackTestBase):
"""Tests for utilities.
"""
def test_new_instance_existing(self):
"""Test instance creation for existing module and class."""
instance = new_instance('delairstack.core.config', 'ConnectionConfig')
self.assertNotEqual(instance, None)
self.assertIsInstance(instance, ConnectionConfig)
def test_new_instance_non_existing(self):
"""Test instance creation for non existing module or class."""
with self.assertRaises(ConfigError):
new_instance('blabla', class_name=None)
with self.assertRaises(ConfigError):
new_instance(module_path=None, class_name=None)
with self.assertRaises(ConfigError):
new_instance('delairstack.core.config', 'FakeClass')
with self.assertRaises(ConfigError):
new_instance('delairstack.fake_module', 'TestClass')
def test_flatten(self):
res = flatten_dict(copy.deepcopy(d1))
self.assertEqual(res['a.b.c'], 'd')
d = {}
res = flatten_dict(d)
self.assertEqual(len(res), 0)
d = {
'a': 'b'
}
res = flatten_dict(d)
self.assertEqual(res['a'], 'b')
def test_find(self):
self.assertEqual(find(copy.deepcopy(d1), 'a.b.c'), 'd')
def test_merge_dict(self):
res = dict_merge(copy.deepcopy(d1), copy.deepcopy(d2))
self.assertEqual(res, {'a': {'b': {'c': 'd', 'e': 'f'}}})
def test_merge_dict_in_place(self):
h = copy.deepcopy(d1)
dict_merge(h, {'a': {'y':2}})
self.assertEqual(h, {'a': {'b': {'c': 'd'}, 'y': 2}})
def test_merge_dict_with_add_keys(self):
res = dict_merge(copy.deepcopy(d1), copy.deepcopy(d2), add_keys=False)
self.assertEqual(res, {'a': {'b': {'c': 'd'}}})
| [
"delairstack.core.utils.utils.flatten_dict",
"delairstack.core.utils.utils.dict_merge",
"copy.deepcopy",
"delairstack.core.utils.utils.new_instance"
] | [((741, 800), 'delairstack.core.utils.utils.new_instance', 'new_instance', (['"""delairstack.core.config"""', '"""ConnectionConfig"""'], {}), "('delairstack.core.config', 'ConnectionConfig')\n", (753, 800), False, 'from delairstack.core.utils.utils import new_instance, flatten_dict, find, dict_merge\n'), ((1595, 1610), 'delairstack.core.utils.utils.flatten_dict', 'flatten_dict', (['d'], {}), '(d)\n', (1607, 1610), False, 'from delairstack.core.utils.utils import new_instance, flatten_dict, find, dict_merge\n'), ((1714, 1729), 'delairstack.core.utils.utils.flatten_dict', 'flatten_dict', (['d'], {}), '(d)\n', (1726, 1729), False, 'from delairstack.core.utils.utils import new_instance, flatten_dict, find, dict_merge\n'), ((2074, 2091), 'copy.deepcopy', 'copy.deepcopy', (['d1'], {}), '(d1)\n', (2087, 2091), False, 'import copy\n'), ((2100, 2130), 'delairstack.core.utils.utils.dict_merge', 'dict_merge', (['h', "{'a': {'y': 2}}"], {}), "(h, {'a': {'y': 2}})\n", (2110, 2130), False, 'from delairstack.core.utils.utils import new_instance, flatten_dict, find, dict_merge\n'), ((1078, 1117), 'delairstack.core.utils.utils.new_instance', 'new_instance', (['"""blabla"""'], {'class_name': 'None'}), "('blabla', class_name=None)\n", (1090, 1117), False, 'from delairstack.core.utils.utils import new_instance, flatten_dict, find, dict_merge\n'), ((1176, 1223), 'delairstack.core.utils.utils.new_instance', 'new_instance', ([], {'module_path': 'None', 'class_name': 'None'}), '(module_path=None, class_name=None)\n', (1188, 1223), False, 'from delairstack.core.utils.utils import new_instance, flatten_dict, find, dict_merge\n'), ((1282, 1334), 'delairstack.core.utils.utils.new_instance', 'new_instance', (['"""delairstack.core.config"""', '"""FakeClass"""'], {}), "('delairstack.core.config', 'FakeClass')\n", (1294, 1334), False, 'from delairstack.core.utils.utils import new_instance, flatten_dict, find, dict_merge\n'), ((1393, 1445), 'delairstack.core.utils.utils.new_instance', 'new_instance', (['"""delairstack.fake_module"""', '"""TestClass"""'], {}), "('delairstack.fake_module', 'TestClass')\n", (1405, 1445), False, 'from delairstack.core.utils.utils import new_instance, flatten_dict, find, dict_merge\n'), ((1502, 1519), 'copy.deepcopy', 'copy.deepcopy', (['d1'], {}), '(d1)\n', (1515, 1519), False, 'import copy\n'), ((1917, 1934), 'copy.deepcopy', 'copy.deepcopy', (['d1'], {}), '(d1)\n', (1930, 1934), False, 'import copy\n'), ((1936, 1953), 'copy.deepcopy', 'copy.deepcopy', (['d2'], {}), '(d2)\n', (1949, 1953), False, 'import copy\n'), ((2263, 2280), 'copy.deepcopy', 'copy.deepcopy', (['d1'], {}), '(d1)\n', (2276, 2280), False, 'import copy\n'), ((2282, 2299), 'copy.deepcopy', 'copy.deepcopy', (['d2'], {}), '(d2)\n', (2295, 2299), False, 'import copy\n'), ((1826, 1843), 'copy.deepcopy', 'copy.deepcopy', (['d1'], {}), '(d1)\n', (1839, 1843), False, 'import copy\n')] |
from conans.test.utils.tools import TestClient
def test_cmake_lib_template():
client = TestClient(path_with_spaces=False)
client.run("new hello/0.1 --template=cmake_lib")
# Local flow works
client.run("install . -if=install")
client.run("build . -if=install")
# Create works
client.run("create .")
assert "hello/0.1: Hello World Release!" in client.out
client.run("create . -s build_type=Debug")
assert "hello/0.1: Hello World Debug!" in client.out
# Create + shared works
client.run("create . -o hello:shared=True")
assert "hello/0.1: Hello World Release!" in client.out
def test_cmake_exe_template():
client = TestClient(path_with_spaces=False)
client.run("new greet/0.1 --template=cmake_exe")
# Local flow works
client.run("install . -if=install")
client.run("build . -if=install")
# Create works
client.run("create .")
assert "greet/0.1: Hello World Release!" in client.out
client.run("create . -s build_type=Debug")
assert "greet/0.1: Hello World Debug!" in client.out
| [
"conans.test.utils.tools.TestClient"
] | [((93, 127), 'conans.test.utils.tools.TestClient', 'TestClient', ([], {'path_with_spaces': '(False)'}), '(path_with_spaces=False)\n', (103, 127), False, 'from conans.test.utils.tools import TestClient\n'), ((675, 709), 'conans.test.utils.tools.TestClient', 'TestClient', ([], {'path_with_spaces': '(False)'}), '(path_with_spaces=False)\n', (685, 709), False, 'from conans.test.utils.tools import TestClient\n')] |
import pyblish.api
class IntegrateStarterAsset(pyblish.api.InstancePlugin):
"""Move user data to shared location
This plug-in exposes your data to others by encapsulating it
into a new version.
Schema:
Data is written in the following format.
____________________
| |
| version |
| ________________ |
| | | |
| | representation | |
| |________________| |
| | | |
| | ... | |
| |________________| |
|____________________|
"""
label = "Starter Asset"
order = pyblish.api.IntegratorOrder
families = [
"starter.model",
"starter.rig",
"starter.animation"
]
def process(self, instance):
import os
import json
import errno
import shutil
from pyblish_starter import api
context = instance.context
# Atomicity
#
# Guarantee atomic publishes - each asset contains
# an identical set of members.
# __
# / o
# / \
# | o |
# \ /
# o __/
#
if not all(result["success"] for result in context.data["results"]):
raise Exception("Atomicity not held, aborting.")
# Assemble
#
# |
# v
# ---> <----
# ^
# |
#
stagingdir = instance.data.get("stagingDir")
assert stagingdir, (
"Incomplete instance \"%s\": "
"Missing reference to staging area."
% instance
)
root = context.data["workspaceDir"]
instancedir = os.path.join(root, "shared", instance.data["name"])
try:
os.makedirs(instancedir)
except OSError as e:
if e.errno != errno.EEXIST: # Already exists
self.log.critical("An unexpected error occurred.")
raise
version = api.find_latest_version(os.listdir(instancedir)) + 1
versiondir = os.path.join(instancedir, api.format_version(version))
# Metadata
# _________
# | |.key = value
# | |
# | |
# | |
# | |
# |_________|
#
fname = os.path.join(stagingdir, ".metadata.json")
try:
with open(fname) as f:
metadata = json.load(f)
except IOError:
metadata = {
"schema": "pyblish-starter:version-1.0",
"version": version,
"path": versiondir,
"representations": list(),
# Collected by pyblish-base
"time": context.data["date"],
"author": context.data["user"],
# Collected by pyblish-maya
"source": os.path.join(
"{root}",
os.path.relpath(
context.data["currentFile"],
api.root()
)
),
}
for filename in instance.data.get("files", list()):
name, ext = os.path.splitext(filename)
metadata["representations"].append(
{
"schema": "pyblish-starter:representation-1.0",
"format": ext,
"path": "{dirname}/%s{format}" % name
}
)
# Write to disk
# _
# | |
# _| |_
# ____\ /
# |\ \ / \
# \ \ v \
# \ \________.
# \|________|
#
with open(fname, "w") as f:
json.dump(metadata, f, indent=4)
# Metadata is written before being validated -
# this way, if validation fails, the data can be
# inspected by hand from within the user directory.
api.schema.validate(metadata, "version")
shutil.copytree(stagingdir, versiondir)
self.log.info("Successfully integrated \"%s\" to \"%s\"" % (
instance, versiondir))
| [
"os.listdir",
"pyblish_starter.api.root",
"os.makedirs",
"os.path.join",
"os.path.splitext",
"pyblish_starter.api.schema.validate",
"shutil.copytree",
"json.load",
"pyblish_starter.api.format_version",
"json.dump"
] | [((1792, 1843), 'os.path.join', 'os.path.join', (['root', '"""shared"""', "instance.data['name']"], {}), "(root, 'shared', instance.data['name'])\n", (1804, 1843), False, 'import os\n'), ((2430, 2472), 'os.path.join', 'os.path.join', (['stagingdir', '""".metadata.json"""'], {}), "(stagingdir, '.metadata.json')\n", (2442, 2472), False, 'import os\n'), ((4076, 4116), 'pyblish_starter.api.schema.validate', 'api.schema.validate', (['metadata', '"""version"""'], {}), "(metadata, 'version')\n", (4095, 4116), False, 'from pyblish_starter import api\n'), ((4125, 4164), 'shutil.copytree', 'shutil.copytree', (['stagingdir', 'versiondir'], {}), '(stagingdir, versiondir)\n', (4140, 4164), False, 'import shutil\n'), ((1870, 1894), 'os.makedirs', 'os.makedirs', (['instancedir'], {}), '(instancedir)\n', (1881, 1894), False, 'import os\n'), ((2190, 2217), 'pyblish_starter.api.format_version', 'api.format_version', (['version'], {}), '(version)\n', (2208, 2217), False, 'from pyblish_starter import api\n'), ((3303, 3329), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (3319, 3329), False, 'import os\n'), ((3862, 3894), 'json.dump', 'json.dump', (['metadata', 'f'], {'indent': '(4)'}), '(metadata, f, indent=4)\n', (3871, 3894), False, 'import json\n'), ((2114, 2137), 'os.listdir', 'os.listdir', (['instancedir'], {}), '(instancedir)\n', (2124, 2137), False, 'import os\n'), ((2549, 2561), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2558, 2561), False, 'import json\n'), ((3152, 3162), 'pyblish_starter.api.root', 'api.root', ([], {}), '()\n', (3160, 3162), False, 'from pyblish_starter import api\n')] |
import cv2
import numpy as np
cap=cv2.VideoCapture("line.mp4")
while (cap.isOpened()):
_,frame=cap.read()
frame=cv2.resize(frame,(640,480))
hsvFrame=cv2.cvtColor(frame,cv2.COLOR_BGR2HSV) # HSV değerlerini bulmak istediğimiz rengi google yazarak bulabiliriz buna göre alt kısmı doldurabiliriz
l_lineValue=np.array([18,94,140],np.uint8)
h_lineValue=np.array([48,255,255],np.uint8)
mask=cv2.inRange(hsvFrame,l_lineValue,h_lineValue)
edgesFrame=cv2.Canny(mask,75,250)
lines=cv2.HoughLinesP(edgesFrame,1,np.pi/180,50,minLineLength=10,maxLineGap=50)
for line in lines:
(x1,y1,x2,y2)=line[0]
cv2.line(frame,(x1,y1),(x2,y2),(255,0,0),2)
cv2.imshow("Original Video",frame)
cv2.imshow("Mask Video",mask)
if cv2.waitKey(1) & 0xFF==ord("q"):
break
cap.release()
cv2.destroyAllWindows() | [
"cv2.HoughLinesP",
"cv2.inRange",
"cv2.line",
"cv2.imshow",
"numpy.array",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.cvtColor",
"cv2.resize",
"cv2.Canny",
"cv2.waitKey"
] | [((35, 63), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""line.mp4"""'], {}), "('line.mp4')\n", (51, 63), False, 'import cv2\n'), ((839, 862), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (860, 862), False, 'import cv2\n'), ((122, 151), 'cv2.resize', 'cv2.resize', (['frame', '(640, 480)'], {}), '(frame, (640, 480))\n', (132, 151), False, 'import cv2\n'), ((163, 201), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2HSV'], {}), '(frame, cv2.COLOR_BGR2HSV)\n', (175, 201), False, 'import cv2\n'), ((327, 360), 'numpy.array', 'np.array', (['[18, 94, 140]', 'np.uint8'], {}), '([18, 94, 140], np.uint8)\n', (335, 360), True, 'import numpy as np\n'), ((374, 408), 'numpy.array', 'np.array', (['[48, 255, 255]', 'np.uint8'], {}), '([48, 255, 255], np.uint8)\n', (382, 408), True, 'import numpy as np\n'), ((415, 462), 'cv2.inRange', 'cv2.inRange', (['hsvFrame', 'l_lineValue', 'h_lineValue'], {}), '(hsvFrame, l_lineValue, h_lineValue)\n', (426, 462), False, 'import cv2\n'), ((476, 500), 'cv2.Canny', 'cv2.Canny', (['mask', '(75)', '(250)'], {}), '(mask, 75, 250)\n', (485, 500), False, 'import cv2\n'), ((509, 594), 'cv2.HoughLinesP', 'cv2.HoughLinesP', (['edgesFrame', '(1)', '(np.pi / 180)', '(50)'], {'minLineLength': '(10)', 'maxLineGap': '(50)'}), '(edgesFrame, 1, np.pi / 180, 50, minLineLength=10, maxLineGap=50\n )\n', (524, 594), False, 'import cv2\n'), ((700, 735), 'cv2.imshow', 'cv2.imshow', (['"""Original Video"""', 'frame'], {}), "('Original Video', frame)\n", (710, 735), False, 'import cv2\n'), ((739, 769), 'cv2.imshow', 'cv2.imshow', (['"""Mask Video"""', 'mask'], {}), "('Mask Video', mask)\n", (749, 769), False, 'import cv2\n'), ((647, 698), 'cv2.line', 'cv2.line', (['frame', '(x1, y1)', '(x2, y2)', '(255, 0, 0)', '(2)'], {}), '(frame, (x1, y1), (x2, y2), (255, 0, 0), 2)\n', (655, 698), False, 'import cv2\n'), ((777, 791), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (788, 791), False, 'import cv2\n')] |
import sys
def sum_triangular_numbers(n):
if n <= 0:
return 0
else:
t = [int((i + 1) * (i + 2) / 2) for i in range(n)]
return sum(t)
if __name__ == "__main__":
if len(sys.argv) == 2:
print(sum_triangular_numbers(n=int(sys.argv[1])))
else:
sys.exit(1)
| [
"sys.exit"
] | [((298, 309), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (306, 309), False, 'import sys\n')] |
"""Test messages with attachments."""
import future.backports.email as email
import mailmerge
from tests.test_smtp_base import TestSMTPBase
class TestSendAttachment(TestSMTPBase):
"""Test messages with attachments."""
def _validate_message_contents(self, message):
"""Validate the contents and attachments of the message."""
self.assertTrue(message.is_multipart())
# Make sure the attachments are all present and valid
email_body_present = False
expected_attachments = {
"test_send_attachment_1.txt": False,
"test_send_attachment_2.pdf": False,
"test_send_attachment_17.txt": False,
}
for part in message.walk():
if part.get_content_maintype() == 'multipart':
continue
if part['content-type'].startswith('text/plain'):
# This is the email body
email_body = part.get_payload()
expected_email_body = 'Hi, Myself,\n\nYour number is 17.'
self.assertEqual(email_body.rstrip(), expected_email_body)
email_body_present = True
elif part['content-type'].startswith('application/octet-stream'):
# This is an attachment
filename = part.get_param('name')
file_contents = part.get_payload(decode=True)
self.assertIn(filename, expected_attachments)
self.assertFalse(expected_attachments[filename])
with open(filename, 'rb') as expected_attachment:
correct_file_contents = expected_attachment.read()
self.assertEqual(file_contents, correct_file_contents)
expected_attachments[filename] = True
self.assertTrue(email_body_present)
self.assertNotIn(False, expected_attachments.values())
def test_send_attachment(self):
"""Attachments should be sent as part of the email."""
mailmerge.api.main(
database_filename=self.DATABASE_FILENAME,
config_filename=self.SERVER_CONFIG_FILENAME,
template_filename="test_send_attachment.template.txt",
no_limit=False,
dry_run=False,
)
# Check SMTP server after
self.assertEqual(self.smtp.msg_from, "<NAME> <<EMAIL>>")
recipients = ["<EMAIL>"]
self.assertEqual(self.smtp.msg_to, recipients)
# Check that the message is multipart
message = email.parser.Parser().parsestr(self.smtp.msg)
self._validate_message_contents(message)
| [
"mailmerge.api.main",
"future.backports.email.parser.Parser"
] | [((1976, 2176), 'mailmerge.api.main', 'mailmerge.api.main', ([], {'database_filename': 'self.DATABASE_FILENAME', 'config_filename': 'self.SERVER_CONFIG_FILENAME', 'template_filename': '"""test_send_attachment.template.txt"""', 'no_limit': '(False)', 'dry_run': '(False)'}), "(database_filename=self.DATABASE_FILENAME,\n config_filename=self.SERVER_CONFIG_FILENAME, template_filename=\n 'test_send_attachment.template.txt', no_limit=False, dry_run=False)\n", (1994, 2176), False, 'import mailmerge\n'), ((2492, 2513), 'future.backports.email.parser.Parser', 'email.parser.Parser', ([], {}), '()\n', (2511, 2513), True, 'import future.backports.email as email\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""Tests for `epages_client.dataobjects.product_create` package."""
import unittest
# import the package
import epages_client
from epages_client.dataobjects.product_update import ProductUpdate
from epages_client.dataobjects.remove_value import RemoveValue
# import base class for unit testing
from .base_unit_test import BaseUnitTest
class TestStringMethods(BaseUnitTest):
def setUp(self):
pass
def test_1001_correct_inputs(self):
product = ProductUpdate()
product.productNumber = "prod_1"
self.assertEqual(product.productNumber, "prod_1")
product.name = "Deddy Bear"
self.assertEqual(product.name, "Deddy Bear")
product.shortDescription = "Soft Deddy Bear"
self.assertEqual(product.shortDescription, "Soft Deddy Bear")
product.description = "Tom’s 18″ Teddy Bear"
self.assertEqual(product.description, "Tom’s 18″ Teddy Bear")
product.manufacturer = "<NAME>"
self.assertEqual(product.manufacturer, "<NAME>")
product.upc = "9501101530003"
self.assertEqual(product.upc, "9501101530003")
product.ean = "0012345678905"
self.assertEqual(product.ean, "0012345678905")
product.essentialFeatures = "yellow eyes"
self.assertEqual(product.essentialFeatures, "yellow eyes")
product.price = 29.99
self.assertEqual(product.price, 29.99)
product.manufacturerPrice = 49.90
self.assertEqual(product.manufacturerPrice, 49.90)
product.depositPrice = 0.00
self.assertEqual(product.depositPrice, 0.00)
product.deliveryPeriod = "5 days"
self.assertEqual(product.deliveryPeriod, "5 days")
product.minStocklevel = 6
self.assertEqual(product.minStocklevel, 6)
product.manufacturerProductNumber = "00000001ZDEDDY"
self.assertEqual(product.manufacturerProductNumber, "00000001ZDEDDY")
product.productLength = 50
self.assertEqual(product.productLength, 50)
product.productWidth = 60
self.assertEqual(product.productWidth, 60)
product.productHeight = 457
self.assertEqual(product.productHeight, 457)
product.taxClassId = "tax01"
self.assertEqual(product.taxClassId, "tax01")
product.productImage = "teddy0001_big.jpg"
self.assertEqual(product.productImage, "teddy0001_big.jpg")
product.stocklevel = 5
self.assertEqual(product.stocklevel, 5)
product.visible = True
self.assertEqual(product.visible, True)
product.searchKeywords.add('deddy')
self.assertEqual(product.searchKeywords.get(), ['deddy'])
product.searchKeywords.add('toy')
self.assertEqual(product.searchKeywords.get(), ['deddy', 'toy'])
patch_values = [
{'op': 'replace', 'path': '/priceInfo/taxClass/taxClassId', 'value': 'tax01'},
{'op': 'add', 'path': '/visible', 'value': True},
{'op': 'add', 'path': '/productNumber', 'value': 'prod_1'},
{'op': 'add', 'path': '/productImage', 'value': 'teddy0001_big.jpg'},
{'op': 'add', 'path': '/deliveryPeriod', 'value': '5 days'},
{'op': 'add', 'path': '/essentialFeatures', 'value': 'yellow eyes'},
{'op': 'add', 'path': '/description', 'value': 'Tom’s 18″ Teddy Bear'},
{'op': 'add', 'path': '/shortDescription', 'value': 'Soft Deddy Bear'},
{'op': 'add', 'path': '/productWidth', 'value': 60},
{'op': 'add', 'path': '/upc', 'value': '9501101530003'},
{'op': 'add', 'path': '/searchKeywords',
'value': ['deddy', 'toy']},
{'op': 'add', 'path': '/priceInfo/manufacturerPrice/amount', 'value': 49.9},
{'op': 'add', 'path': '/manufacturerProductNumber',
'value': '00000001ZDEDDY'},
{'op': 'add', 'path': '/priceInfo/price/amount', 'value': 29.99},
{'op': 'add', 'path': '/productLength', 'value': 50},
{'op': 'add', 'path': '/name', 'value': '<NAME>'},
{'op': 'add', 'path': '/manufacturer', 'value': '<NAME>'},
{'op': 'add', 'path': '/ean', 'value': '0012345678905'},
{'op': 'add', 'path': '/priceInfo/depositPrice/amount', 'value': 0.0},
{'op': 'add', 'path': '/stocklevel', 'value': 5},
{'op': 'add', 'path': '/productHeight', 'value': 457},
{'op': 'add', 'path': '/minStocklevel', 'value': 6}
]
self.assert_count_items_equal(product.get_patch(), patch_values)
def test_1201_correct_removes(self):
product = ProductUpdate()
product.name = RemoveValue()
# test str
patch_value = [{'op': 'remove', 'path': '/name'}]
self.assert_count_items_equal(product.get_patch(), patch_value)
# test numeric
product.minStocklevel = RemoveValue()
patch_value = [{'op': 'remove', 'path': '/name'},
{'op': 'remove', 'path': '/minStocklevel'}]
self.assert_count_items_equal(product.get_patch(), patch_value)
# ListOfObject - user overwrites when he/she wants to remove values
product.searchKeywords = RemoveValue()
self.assertIsInstance(product.searchKeywords, RemoveValue)
patch_value = [{'op': 'remove', 'path': '/name'}, {'op': 'remove',
'path': '/minStocklevel'}, {'op': 'remove', 'path': "/searchKeywords"}]
self.assert_count_items_equal(product.get_patch(), patch_value)
def test_2000_invalid_inputs(self):
product = ProductUpdate()
with self.assertRaises(TypeError) as e:
product.price = "12.21 €"
with self.assertRaises(TypeError) as e:
product.visible = "True"
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"epages_client.dataobjects.remove_value.RemoveValue",
"epages_client.dataobjects.product_update.ProductUpdate"
] | [((5925, 5940), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5938, 5940), False, 'import unittest\n'), ((536, 551), 'epages_client.dataobjects.product_update.ProductUpdate', 'ProductUpdate', ([], {}), '()\n', (549, 551), False, 'from epages_client.dataobjects.product_update import ProductUpdate\n'), ((4705, 4720), 'epages_client.dataobjects.product_update.ProductUpdate', 'ProductUpdate', ([], {}), '()\n', (4718, 4720), False, 'from epages_client.dataobjects.product_update import ProductUpdate\n'), ((4744, 4757), 'epages_client.dataobjects.remove_value.RemoveValue', 'RemoveValue', ([], {}), '()\n', (4755, 4757), False, 'from epages_client.dataobjects.remove_value import RemoveValue\n'), ((4964, 4977), 'epages_client.dataobjects.remove_value.RemoveValue', 'RemoveValue', ([], {}), '()\n', (4975, 4977), False, 'from epages_client.dataobjects.remove_value import RemoveValue\n'), ((5285, 5298), 'epages_client.dataobjects.remove_value.RemoveValue', 'RemoveValue', ([], {}), '()\n', (5296, 5298), False, 'from epages_client.dataobjects.remove_value import RemoveValue\n'), ((5703, 5718), 'epages_client.dataobjects.product_update.ProductUpdate', 'ProductUpdate', ([], {}), '()\n', (5716, 5718), False, 'from epages_client.dataobjects.product_update import ProductUpdate\n')] |
from django.utils.translation import ugettext_lazy as _
from django_celery_results.apps import CeleryResultConfig
from oauth2_provider.apps import DOTConfig
class GiscubeOauth2ProviderConfig(DOTConfig):
verbose_name = _('Security')
class GiscubeCeleryResultConfig(CeleryResultConfig):
verbose_name = _('Internal Processes Results')
| [
"django.utils.translation.ugettext_lazy"
] | [((225, 238), 'django.utils.translation.ugettext_lazy', '_', (['"""Security"""'], {}), "('Security')\n", (226, 238), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((313, 344), 'django.utils.translation.ugettext_lazy', '_', (['"""Internal Processes Results"""'], {}), "('Internal Processes Results')\n", (314, 344), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
#!/usr/bin/env python3
"""Functions to create a positions file."""
########################################################################
# File: makePositionsFiles.py
# executable: makePositionsFiles.py
#
# Author: <NAME>
# History: 5/21/18 Created
########################################################################
import os
import sys
import string
import array
import subprocess
import numpy as np
import pandas as pd
import pysam
from contextlib import closing
from collections import Counter
from signalalign.utils.parsers import read_fasta
from py3helpers.utils import find_substring_indices, all_string_permutations
def find_gatc_motifs(sequence):
"""Generate index of 'A' within the 'GATC' motifs in a nucleotide sequence
:param sequence: since GATC motif is in DNA, expecting a DNA nucleotide sequence
:return: generator yielding index of 'A' within the 'GATC'
"""
return find_substring_indices(sequence.upper(), "GATC", offset=1)
def find_different_char_index(start_string, edit_string):
"""Compares standard and modified string and identifies the index of the modified character
ex: find_char_difference_index("CCAGG","CFAGG") = 1
:param start_string: starting string
:param edit_string: string with a single character edit from start_string
:return: index of string difference
"""
assert len(start_string) == len(edit_string), ""
pos = [i for i in range(len(start_string)) if start_string[i] != edit_string[i]]
assert len(pos) == 1, "Only one character difference allowed. " \
"start_string={}, edit_string={}".format(start_string, edit_string)
return pos[0]
def find_modification_index_and_character(canonical_motif, replacement_motif):
"""Compares canonical and modified motif and identifies the
index of the modified nucleotide, canonical character, and replacement character.
note. everything is converted to uppercase
ex. find_modification_index_and_character("ATGC", "ETGC") = 0, "A", "E"
:param canonical_motif: canonical nucleotide bases
:param replacement_motif: replacement motif
:return: mod_index, old_char, new_char
"""
canonical_motif = canonical_motif.upper()
replacement_motif = replacement_motif.upper()
assert canonical_motif != replacement_motif, "Canonical motif cannot be the same as replacement motif"
assert set(canonical_motif) <= set("ATGC"), "Canonical motif must only have canonical nucleotides"
pos = find_different_char_index(canonical_motif, replacement_motif)
old_char = canonical_motif[pos]
new_char = replacement_motif[pos]
return pos, old_char, new_char
def make_positions_file(reference, output_path, motifs, overlap=False):
"""Creates a tsv file with the following format ("contig", "position", "strand", "change_from", "change_to").
Given a reference sequence and sets of sequence motif changes we report the location of each change.
NOTE: the motifs cannot create new characters on the opposite strand!
:param reference: path to reference sequence
:param output_path: output path of positions file
:param motifs: list of lists of find replace motifs ex: [("CCAGG","CFAGG"), ("CCTGG","CFTGG")]
:param overlap: if the motif can overlap with its self, find index of overlap if set to true
"""
rev_motifs = []
for motif in motifs:
rev_motifs.append([x[::-1] for x in motif])
with open(output_path, "w") as outfile:
for header, comment, sequence in read_fasta(reference):
fwd_seq = sequence
bwd_seq = reverse_complement(fwd_seq, reverse=False, complement=True).upper()
for index, old_char, substitution_char in find_motifs_sequence_positions(fwd_seq, motifs, overlap=overlap):
outfile.write(header + "\t" + np.str(index) + "\t" + "+" + "\t"
+ old_char + "\t" + substitution_char + "\n")
for index, old_char, substitution_char in find_motifs_sequence_positions(bwd_seq, rev_motifs, overlap=overlap):
outfile.write(header + "\t" + np.str(index) + "\t" + "-" + "\t"
+ old_char + "\t" + substitution_char + "\n")
return output_path
def replace_motifs_sequence_positions(sequence, motifs, overlap=False):
"""Edit nucleotide sequence using find and replace motifs
note: we convert sequence to uppercase
:param sequence: nucleotide sequence
:param motifs: list of motif's which need to be replaced: eg [[find, replace]], [["CCAGG", "CEAGG"]]
:param overlap: boolean option to look for motif overlaps
"""
new_sequence = list(sequence)
for index, old_char, substitution_char in find_motifs_sequence_positions(sequence, motifs, overlap=overlap):
new_sequence[index] = substitution_char
subst_sequence = ''.join(new_sequence).upper()
return subst_sequence
def find_motifs_sequence_positions(sequence, motifs, overlap=False):
"""Find locations of edited nucleotide nucleotide sequence using find and replace motifs
note: we convert sequence to uppercase
:param sequence: nucleotide sequence
:param motifs: list of motif's which need to be replaced: eg [[find, replace]], [["CCAGG", "CEAGG"]]
:param overlap: boolean option to look for motif overlaps
"""
already_repaced_indexes = set()
# gather motifs
for motif_pair in motifs:
assert len(motif_pair) == 2 and type(
motif_pair) is list, "Motifs must be structured as list of lists, even for one motif find and replace"
# find edit character and offset
offset, old_char, substitution_char = find_modification_index_and_character(motif_pair[0], motif_pair[1])
for index in find_substring_indices(sequence.upper(), motif_pair[0].upper(), offset=offset, overlap=overlap):
# make sure that there is no overlapping assignments of characters
assert index not in already_repaced_indexes, "Motifs has two different edits to a single nucleotide " \
"location. Check motifs {}".format(motifs)
already_repaced_indexes.add(index)
yield index, old_char, substitution_char
def replace_periodic_sequence_positions(sequence, step_size, offset, substitution_char):
"""Edit nucleotide sequence using by replacing every 'step_size' nucleotides with an offset
note: we convert sequence to uppercase
eg: replace_periodic_sequence_positions("ATGCATGC", 3, 1, "F") = "AFGCFTGF"
:param sequence: nucleotide sequence
:param step_size: every 'step_size' locations the offset position is changed
:param offset: the
:param substitution_char: replacement character
"""
assert offset < step_size, "Offset has to be less than step size"
sequence = list(sequence)
for i in range(offset, len(sequence), step_size):
sequence[i] = substitution_char
subst_sequence = ''.join(sequence).upper()
return subst_sequence
def replace_periodic_reference_positions(reference_location, sub_fasta_path, step, offset, substitution_char='X'):
"""Edit and write a reference sequence to a specified path by replacing periodic characters
note: if sub_fasta_path exists it will return the path without creating a new file
:param reference_location: input reference
:param sub_fasta_path: location of edited reference
:param step: size of gap between substitution characters
:param offset: offset of when to start creating substiutions
:param substitution_char: character to replace original character
"""
if os.path.isfile(sub_fasta_path):
print("[substitute_reference_positions] Substituted reference fasta file exists: {}".format(
sub_fasta_path))
return sub_fasta_path
else:
print("[substitute_reference_positions] Creating substituted reference fasta file: {}".format(
sub_fasta_path))
# write
with open(sub_fasta_path, 'w') as outfasta:
for header, comment, sequence in read_fasta(reference_location):
subst_sequence = replace_periodic_sequence_positions(sequence, step, offset, substitution_char)
print(
">%s %s\n%s" % (header, "substituted:{},step:{},offset:{}".format(substitution_char, step, offset),
subst_sequence), file=outfasta)
return sub_fasta_path
def replace_motif_reference_positions(reference_location, sub_fasta_path, motifs, overlap=False):
"""Replace motif reference sequence to a specific path
:param reference_location: input reference
:param sub_fasta_path: location of edited reference
:param motifs: list of motif's which need to be replaced: eg [[find, replace]], [["CCAGG", "CEAGG"]]
:param overlap: of overlap is possible, replace with overlap: eg [["AAA", "AAT"]] : AAAA -> AATT
"""
if os.path.isfile(sub_fasta_path):
print("[substitute_reference_positions] Substituted reference fasta file exists: {}".format(
sub_fasta_path))
return sub_fasta_path
else:
print("[substitute_reference_positions] Creating substituted reference fasta file: {}".format(
sub_fasta_path))
# write
with open(sub_fasta_path, 'w') as outfasta:
for header, comment, sequence in read_fasta(reference_location):
subst_sequence = replace_motifs_sequence_positions(sequence, motifs, overlap)
print(">%s %s\n%s" % (header, "substituted:{}".format(motifs),
subst_sequence), file=outfasta)
return sub_fasta_path
def samtools_faidx_fasta(fasta_path, log=None):
"""Index fasta using samtools faidx
note: samtools must be in PATH
:param fasta_path: path to fasta file
"""
# produce faidx file
assert os.path.isfile(fasta_path), "Path to fasta file does not exist"
index_path = "{}.fai".format(fasta_path)
if not os.path.exists(index_path):
if log:
print("[{}] indexing reference {}".format(log, fasta_path))
args = ["samtools", "faidx", fasta_path]
subprocess.check_call(args)
assert os.path.isfile(index_path), "Error creating FAIDX file for: {}".format(fasta_path)
return index_path
def count_all_sequence_kmers(seq, k=5, rev_comp=False):
"""Count all the 5'-3' kmers of a nucleotide sequence, rev_comp counts rev_comp seq IN ADDITION to given sequence
:param seq: nucleotide sequence
:param k: size of kmer
:param rev_comp: boolean option to count reverse complement kmers as well
:return: dictionary of kmers with counts as values
"""
# loop through kmers
kmers = Counter()
for kmer in kmer_iterator(seq, k):
kmers[kmer] += 1
if rev_comp:
# loop through rev_comp kmers
seq1 = reverse_complement(seq, reverse=True, complement=True)
for kmer in kmer_iterator(seq1, k):
kmers[kmer] += 1
return kmers
def get_sequence_kmers(seq, k=5, rev_comp=False):
"""Get the set of all kmers from a sequence.
:param seq: nucleotide sequence
:param k: size of kmer
:param rev_comp: boolean option to count reverse complement kmers as well
:return: set of kmers
"""
return set(count_all_sequence_kmers(seq, k=k, rev_comp=rev_comp).keys())
def get_motif_kmers(motif_pair, k, alphabet="ATGC"):
"""Given a motif pair, create a list of all kmers which contain modification
"""
assert len(motif_pair) == 2, "Motif pair must be a list of length 2. len(motif_pair) = {}".format(len(motif_pair))
canonical = motif_pair[0]
modified = motif_pair[1]
motif_len = len(canonical)
# get mod index and chars
mod_index, old_char, new_char = find_modification_index_and_character(canonical, modified)
bases_after = motif_len - mod_index - 1
# get overlaps for front and back of kmer
front_overlap, back_overlap = get_front_back_kmer_overlap(k, motif_len, mod_index)
# pre-compute kmers
kmer_set_dict = dict()
for i in range(1, max(front_overlap, back_overlap) + 1):
kmer_set_dict[i] = [x for x in all_string_permutations(alphabet, i)]
kmer_set_dict[0] = ['']
motif_kmers = []
for i in range(k):
# get prepend kmers and index for front of motif
if i >= front_overlap:
front_index = i - front_overlap
prepend_kmers = ['']
else:
prepend_kmers = kmer_set_dict[front_overlap - i]
front_index = 0
# get append kmers and index for back of motif
if i > bases_after:
append_kmers = kmer_set_dict[i - bases_after]
back_index = motif_len
else:
back_index = mod_index + i + 1
append_kmers = ['']
kmer = modified[front_index:back_index]
motif_kmers.extend(
[front + kmer + back for front in prepend_kmers for back in append_kmers if front + kmer + back is not ''])
return set(motif_kmers)
def get_front_back_kmer_overlap(k, motif_len, mod_index):
"""Get the largest number of bases overlap at front and back of motif
eg: k=3 , motif_len = 2, mod_index = 1
motif = GE
X G E X
_ _ _ max front_overlap = 1
_ _ _ max back_overlap = 1
:param k: length of kmer
:param motif_len: length of motif
:param mod_index: index position of modification
:return: largest overlap in the front and back of a generated kmer
"""
assert k >= 1, "k cannot be less than 1. k: {}".format(k)
front_overlap = k - mod_index - 1
back_overlap = k - (motif_len - mod_index)
return front_overlap, back_overlap
# TODO write these tests ya dig
def getFastaDictionary(fastaFile):
"""Returns a dictionary of the first words of fasta headers to their corresponding
fasta sequence
"""
namesAndSequences = [(x[0].split()[0], x[1]) for x in fastaRead(open(fastaFile, 'r'))]
names = [x[0] for x in namesAndSequences]
assert len(names) == len(set(names)) # Check all the names are unique
return dict(namesAndSequences) # Hash of names to sequences
def fastaRead(fileHandleOrFile):
"""iteratively yields a sequence for each '>' it encounters, ignores '#' lines
"""
fileHandle = _getFileHandle(fileHandleOrFile)
line = fileHandle.readline()
chars_to_remove = "\n "
valid_chars = {x for x in string.ascii_letters + "-"}
while line != '':
if line[0] == '>':
name = line[1:-1]
line = fileHandle.readline()
seq = array.array('b')
while line != '' and line[0] != '>':
line = line.translate(str.maketrans('', '', chars_to_remove))
if len(line) > 0 and line[0] != '#':
seq.extend(list(map(ord, line)))
line = fileHandle.readline()
try:
assert all(chr(x) in valid_chars for x in seq)
except AssertionError:
bad_chars = {chr(x) for x in seq if chr(x) not in valid_chars}
raise RuntimeError("Invalid FASTA character(s) see in fasta sequence: {}".format(bad_chars))
yield name, seq.tobytes()
else:
line = fileHandle.readline()
if isinstance(fileHandleOrFile, "".__class__):
fileHandle.close()
def _getFileHandle(fileHandleOrFile, mode="r"):
if isinstance(fileHandleOrFile, "".__class__):
return open(fileHandleOrFile, mode)
else:
return fileHandleOrFile
def fastaWrite(fileHandleOrFile, name, seq, mode="w"):
"""Writes out fasta file
"""
fileHandle = _getFileHandle(fileHandleOrFile, mode)
valid_chars = {x for x in string.ascii_letters + "-"}
try:
assert any([isinstance(seq, str), isinstance(seq, str)])
except AssertionError:
raise RuntimeError("Sequence is not unicode or string")
try:
assert all(x in valid_chars for x in seq)
except AssertionError:
bad_chars = {x for x in seq if x not in valid_chars}
raise RuntimeError("Invalid FASTA character(s) see in fasta sequence: {}".format(bad_chars))
fileHandle.write(">%s\n" % name)
chunkSize = 100
for i in range(0, len(seq), chunkSize):
fileHandle.write("%s\n" % seq[i:i + chunkSize])
if isinstance(fileHandleOrFile, "".__class__):
fileHandle.close()
def getFastaDictionary(fastaFile):
"""Returns a dictionary of the first words of fasta headers to their corresponding
fasta sequence
"""
namesAndSequences = [(x[0].split()[0], x[1]) for x in fastaRead(open(fastaFile, 'r'))]
names = [x[0] for x in namesAndSequences]
assert len(names) == len(set(names)) # Check all the names are unique
return dict(namesAndSequences) # Hash of names to sequences
def kmer_iterator(dna, k):
"""Generates kmers of length k from a string with one step between kmers
:param dna: string to generate kmers from
:param k: size of kmer to generate
"""
assert len(dna) >= 1, "You must select a substring with len(dna) >= 1: {}".format(dna)
assert k >= 1, "You must select a main_string with k >= 1: {}".format(k)
for i in range(len(dna)):
kmer = dna[i:(i + k)]
if len(kmer) == k:
yield kmer
def reverse_complement(dna, reverse=True, complement=True):
"""
Make the reverse complement of a DNA sequence. You can also just make the
complement or the reverse strand (see options).
Input: A DNA sequence containing 'ATGC' base pairs and wild card letters
Output: DNA sequence as a string.
Options: Specify reverse and/or complement as False to get the complement or
reverse of the input DNA. If both are False, input is returned.
"""
# Make translation table
trans_table = str.maketrans('ACGTMKRYBVDHNacgtmkrybvdhn',
"TGCAKMYRVBHDNtgcakmyrvbhdn")
# Make complement to DNA
comp_dna = dna.translate(trans_table)
# Output all as strings
if reverse and complement:
return comp_dna[::-1]
if reverse and not complement:
return dna[::-1]
if complement and not reverse:
return comp_dna
if not complement and not reverse:
return dna
def count_kmers(dna, k):
"""Count all kmers of length k in a string
:param dna: string to search and count kmers
:param k: size of kmer
"""
assert len(dna) >= 1, "You must select a substring with len(dna) >= 1: {}".format(dna)
assert k >= 1, "You must select a main_string with k >= 1: {}".format(k)
kmer_count = Counter()
for i in range(len(dna)):
kmer = dna[i:(i + k)]
if len(kmer) == k:
kmer_count[kmer] += 1
return kmer_count
def parse_full_alignment_file(alignment_file):
data = pd.read_table(alignment_file, usecols=(1, 4, 5, 9, 12, 13),
dtype={'ref_pos': np.int64,
'strand': np.str,
'event_index': np.int64,
'kmer': np.str,
'posterior_prob': np.float64,
'event_mean': np.float64},
header=None,
names=['ref_pos', 'strand', 'event_index', 'kmer', 'posterior_prob', 'event_mean'])
return data
class CustomAmbiguityPositions(object):
def __init__(self, ambig_filepath):
"""Deal with ambiguous positions from a tsv ambiguity position file with the format of
contig position strand change_from change_to
'name' 0 indexed position +/- C E
:param ambig_filepath: path to ambiguity position file"""
self.ambig_df = self.parseAmbiguityFile(ambig_filepath)
@staticmethod
def parseAmbiguityFile(ambig_filepath):
"""Parses a 'ambiguity position file' that should have the format:
contig position strand change_from change_to
:param ambig_filepath: path to ambiguity position file
"""
return pd.read_table(ambig_filepath,
usecols=(0, 1, 2, 3, 4),
names=["contig", "position", "strand", "change_from", "change_to"],
dtype={"contig": np.str,
"position": np.int,
"strand": np.str,
"change_from": np.str,
"change_to": np.str})
def getForwardSequence(self, contig, raw_sequence):
"""Edit 'raw_sequence' given a ambiguity positions file. Assumes raw_sequence is forward direction( 5'-3')
:param contig: which contig the sequence belongs (aka header)
:param raw_sequence: raw nucleotide sequence
:return: edited nucleotide sequence
"""
return self._get_substituted_sequence(contig, raw_sequence, "+")
def getBackwardSequence(self, contig, raw_sequence):
"""Edit 'raw_sequence' given a ambiguity positions file, Assumes raw_sequence is forward direction( 5'-3')
:param contig: which contig the sequence belongs (aka header)
:param raw_sequence: raw nucleotide sequence
:return: edited nucleotide sequence
"""
raw_sequence = reverse_complement(raw_sequence, reverse=False, complement=True)
return self._get_substituted_sequence(contig, raw_sequence, "-")
def _get_substituted_sequence(self, contig, raw_sequence, strand):
"""Change the given raw nucleotide sequence using the edits defined in the positions file
:param contig: name of contig to find
:param raw_sequence: nucleotide sequence (note: this is note edited in this function)
:param strand: '+' or '-' to indicate strand
"""
contif_df = self._get_contig_positions(contig, strand)
raw_sequence = list(raw_sequence)
for _, row in contif_df.iterrows():
if raw_sequence[row["position"]] != row["change_from"]:
raise RuntimeError(
"[CustomAmbiguityPositions._get_substituted_sequence]Illegal substitution requesting "
"change from %s to %s, row: %s" % (raw_sequence[row["position"]], row["change_to"], row))
raw_sequence[row["position"]] = row["change_to"]
return "".join(raw_sequence)
def _get_contig_positions(self, contig, strand):
"""Get all unique locations within the positions file
:param contig: name of contig to find
:param strand: '+' or '-' to indicate strand
"""
df = self.ambig_df.loc[
(self.ambig_df["contig"] == contig) & (self.ambig_df["strand"] == strand)].drop_duplicates()
assert len(df['position']) == len(set(df['position'])), "Multiple different changes for a single position. {}" \
.format(df['position'])
return df
def processReferenceFasta(fasta, work_folder, name, motifs=None, positions_file=None):
"""loops over all of the contigs in the reference file, writes the forward and backward sequences
as flat files (no headers or anything) for signalMachine, returns a dict that has the sequence
names as keys and the paths to the processed sequence as keys
:param fasta: path to un-edited fasta file
:param work_folder: FolderHandler object
:param motifs: list of tuple pairs for motif edits. ex [["CCAGG", "CEAGG"]]
:param positions_file: ambiguous positions file which can be processed via CustomAmbiguityPositions
:return: paths to possibly edited forward reference sequence and backward reference sequence
"""
positions = None
# if no processing needs to happen
if positions_file is None and motifs is None:
return fasta, None
# Cant pass positions file and motifs
if positions_file is not None and motifs is not None:
raise RuntimeError("[processReferenceFasta] Cannot specify motif key and ambiguity position file")
# get positions object (if appropriate)
if positions_file:
if not os.path.exists(positions_file):
raise RuntimeError("[processReferenceFasta] Did not find ambiguity position file here: %s" %
positions_file)
positions = CustomAmbiguityPositions(positions_file)
# process fasta
fw_fasta_path = work_folder.add_file_path("forward.{}.{}".format(name, os.path.basename(fasta)))
bw_fasta_path = work_folder.add_file_path("backward.{}.{}".format(name, os.path.basename(fasta)))
print("[SignalAlignment.run] NOTICE: Creating forward and backward fasta files.")
with open(bw_fasta_path, 'w') as bw_outfasta, open(fw_fasta_path, 'w') as fw_outfasta:
for header, comment, sequence in read_fasta(fasta):
# signalAlign likes uppercase
if positions is not None:
fw_sequence = positions.getForwardSequence(contig=header, raw_sequence=sequence.upper())
bw_sequence = positions.getBackwardSequence(contig=header, raw_sequence=sequence.upper())
else:
fw_sequence = sequence.upper()
bw_sequence = reverse_complement(fw_sequence, reverse=False, complement=True).upper()
if motifs:
fw_sequence = replace_motifs_sequence_positions(fw_sequence, motifs, True)
bw_sequence = replace_motifs_sequence_positions(bw_sequence, motifs, True)
print(">%s %s\n%s" % (header, "backward", bw_sequence), file=bw_outfasta)
print(">%s %s\n%s" % (header, "forward", fw_sequence), file=fw_outfasta)
return fw_fasta_path, bw_fasta_path
def get_full_nucleotide_read_from_alignment(alignment_location, read_name, hardclip_character=None):
sequence, qualities, hardclipped_start, hardclipped_end = None, None, 0, 0
with closing(pysam.AlignmentFile(alignment_location, 'rb' if alignment_location.endswith("bam") else 'r')) as aln:
for aligned_segment in aln.fetch(until_eof=True):
if read_name not in aligned_segment.qname:
continue
BAM_CHARD_CLIP = 5
# get data and sanity check
sequence = aligned_segment.query_sequence.upper()
qualities = aligned_segment.qual
cigar_tuples = aligned_segment.cigartuples
if cigar_tuples is None or len(cigar_tuples) == 0:
print("[get_full_nucleotide_read_from_alignment] no alignment found for {} in {}".format(
read_name, alignment_location), file=sys.stderr)
break
# check for hard clipping
if cigar_tuples[0][0] == BAM_CHARD_CLIP:
hardclipped_start = cigar_tuples[0][1]
if hardclip_character is not None:
sequence = (hardclip_character * hardclipped_start) + sequence
if qualities is not None and len(qualities) != 0:
qualities = ("!" * hardclipped_start) + qualities
if cigar_tuples[-1][0] == BAM_CHARD_CLIP:
hardclipped_end = cigar_tuples[-1][1]
if hardclip_character is not None:
sequence = sequence + (hardclip_character * hardclipped_end)
if qualities is not None and len(qualities) != 0:
qualities = qualities + ("!" * hardclipped_end)
# check for reverse mapping
if aligned_segment.is_reverse:
sequence = reverse_complement(sequence, reverse=True, complement=True)
if qualities is not None and len(qualities) != 0:
qualities = ''.join(reversed(list(qualities)))
tmp = hardclipped_end
hardclipped_end = hardclipped_start
hardclipped_start = tmp
# stop looking (assuming only one alignment per read in file)
break
return sequence, qualities, hardclipped_start, hardclipped_end, aligned_segment
| [
"os.path.exists",
"array.array",
"subprocess.check_call",
"signalalign.utils.parsers.read_fasta",
"os.path.isfile",
"collections.Counter",
"os.path.basename",
"pandas.read_table",
"py3helpers.utils.all_string_permutations",
"numpy.str"
] | [((7680, 7710), 'os.path.isfile', 'os.path.isfile', (['sub_fasta_path'], {}), '(sub_fasta_path)\n', (7694, 7710), False, 'import os\n'), ((8995, 9025), 'os.path.isfile', 'os.path.isfile', (['sub_fasta_path'], {}), '(sub_fasta_path)\n', (9009, 9025), False, 'import os\n'), ((9956, 9982), 'os.path.isfile', 'os.path.isfile', (['fasta_path'], {}), '(fasta_path)\n', (9970, 9982), False, 'import os\n'), ((10288, 10314), 'os.path.isfile', 'os.path.isfile', (['index_path'], {}), '(index_path)\n', (10302, 10314), False, 'import os\n'), ((10811, 10820), 'collections.Counter', 'Counter', ([], {}), '()\n', (10818, 10820), False, 'from collections import Counter\n'), ((18750, 18759), 'collections.Counter', 'Counter', ([], {}), '()\n', (18757, 18759), False, 'from collections import Counter\n'), ((18963, 19281), 'pandas.read_table', 'pd.read_table', (['alignment_file'], {'usecols': '(1, 4, 5, 9, 12, 13)', 'dtype': "{'ref_pos': np.int64, 'strand': np.str, 'event_index': np.int64, 'kmer': np\n .str, 'posterior_prob': np.float64, 'event_mean': np.float64}", 'header': 'None', 'names': "['ref_pos', 'strand', 'event_index', 'kmer', 'posterior_prob', 'event_mean']"}), "(alignment_file, usecols=(1, 4, 5, 9, 12, 13), dtype={\n 'ref_pos': np.int64, 'strand': np.str, 'event_index': np.int64, 'kmer':\n np.str, 'posterior_prob': np.float64, 'event_mean': np.float64}, header\n =None, names=['ref_pos', 'strand', 'event_index', 'kmer',\n 'posterior_prob', 'event_mean'])\n", (18976, 19281), True, 'import pandas as pd\n'), ((3543, 3564), 'signalalign.utils.parsers.read_fasta', 'read_fasta', (['reference'], {}), '(reference)\n', (3553, 3564), False, 'from signalalign.utils.parsers import read_fasta\n'), ((10076, 10102), 'os.path.exists', 'os.path.exists', (['index_path'], {}), '(index_path)\n', (10090, 10102), False, 'import os\n'), ((10249, 10276), 'subprocess.check_call', 'subprocess.check_call', (['args'], {}), '(args)\n', (10270, 10276), False, 'import subprocess\n'), ((20240, 20483), 'pandas.read_table', 'pd.read_table', (['ambig_filepath'], {'usecols': '(0, 1, 2, 3, 4)', 'names': "['contig', 'position', 'strand', 'change_from', 'change_to']", 'dtype': "{'contig': np.str, 'position': np.int, 'strand': np.str, 'change_from': np.\n str, 'change_to': np.str}"}), "(ambig_filepath, usecols=(0, 1, 2, 3, 4), names=['contig',\n 'position', 'strand', 'change_from', 'change_to'], dtype={'contig': np.\n str, 'position': np.int, 'strand': np.str, 'change_from': np.str,\n 'change_to': np.str})\n", (20253, 20483), True, 'import pandas as pd\n'), ((24974, 24991), 'signalalign.utils.parsers.read_fasta', 'read_fasta', (['fasta'], {}), '(fasta)\n', (24984, 24991), False, 'from signalalign.utils.parsers import read_fasta\n'), ((8127, 8157), 'signalalign.utils.parsers.read_fasta', 'read_fasta', (['reference_location'], {}), '(reference_location)\n', (8137, 8157), False, 'from signalalign.utils.parsers import read_fasta\n'), ((9442, 9472), 'signalalign.utils.parsers.read_fasta', 'read_fasta', (['reference_location'], {}), '(reference_location)\n', (9452, 9472), False, 'from signalalign.utils.parsers import read_fasta\n'), ((14710, 14726), 'array.array', 'array.array', (['"""b"""'], {}), "('b')\n", (14721, 14726), False, 'import array\n'), ((24287, 24317), 'os.path.exists', 'os.path.exists', (['positions_file'], {}), '(positions_file)\n', (24301, 24317), False, 'import os\n'), ((24628, 24651), 'os.path.basename', 'os.path.basename', (['fasta'], {}), '(fasta)\n', (24644, 24651), False, 'import os\n'), ((24730, 24753), 'os.path.basename', 'os.path.basename', (['fasta'], {}), '(fasta)\n', (24746, 24753), False, 'import os\n'), ((12264, 12300), 'py3helpers.utils.all_string_permutations', 'all_string_permutations', (['alphabet', 'i'], {}), '(alphabet, i)\n', (12287, 12300), False, 'from py3helpers.utils import find_substring_indices, all_string_permutations\n'), ((3853, 3866), 'numpy.str', 'np.str', (['index'], {}), '(index)\n', (3859, 3866), True, 'import numpy as np\n'), ((4134, 4147), 'numpy.str', 'np.str', (['index'], {}), '(index)\n', (4140, 4147), True, 'import numpy as np\n')] |
import os, sys, re, clr
import math, cmath
import collections
import json
win64_dir = oDesktop.GetExeDir()
dll_dir = os.path.join(win64_dir, 'common/IronPython/DLLs')
sys.path.append(dll_dir)
clr.AddReference('IronPython.Wpf')
import wpf
from System.Windows import Window, Visibility
from System.Windows.Controls import ListBoxItem
from System.Windows.Forms import OpenFileDialog, SaveFileDialog, DialogResult, FolderBrowserDialog
os.chdir(os.path.dirname(__file__))
oProject = oDesktop.GetActiveProject()
oDesign = oProject.GetActiveDesign()
oEditor = oDesign.GetActiveEditor()
oDesktop.ClearMessages("", "", 2)
#Functions---------------------------------------------------------------------|
def switch(bw_name):
unit = oEditor.GetActiveUnits()
start_layer = oEditor.GetPropertyValue("BaseElementTab", bw_name, 'Start Layer')
end_layer = oEditor.GetPropertyValue("BaseElementTab", bw_name, 'End Layer')
pt0 = oEditor.GetPropertyValue("BaseElementTab", bw_name, 'Pt0').split(',')
pt1 = oEditor.GetPropertyValue("BaseElementTab", bw_name, 'Pt1').split(',')
try:
oEditor.ChangeProperty(
[
"NAME:AllTabs",
[
"NAME:BaseElementTab",
[
"NAME:PropServers",
bw_name
],
[
"NAME:ChangedProps",
[
"NAME:Start Layer",
"Value:=" , end_layer
]
]
]
])
oEditor.ChangeProperty(
[
"NAME:AllTabs",
[
"NAME:BaseElementTab",
[
"NAME:PropServers",
bw_name
],
[
"NAME:ChangedProps",
[
"NAME:End Layer",
"Value:=" , start_layer
]
]
]
])
oEditor.ChangeProperty(
[
"NAME:AllTabs",
[
"NAME:BaseElementTab",
[
"NAME:PropServers",
bw_name
],
[
"NAME:ChangedProps",
[
"NAME:Pt0",
"X:=" , "{}{}".format(pt1[0], unit),
"Y:=" , "{}{}".format(pt1[1], unit)
]
]
]
])
oEditor.ChangeProperty(
[
"NAME:AllTabs",
[
"NAME:BaseElementTab",
[
"NAME:PropServers",
bw_name
],
[
"NAME:ChangedProps",
[
"NAME:Pt1",
"X:=" , "{}{}".format(pt0[0], unit),
"Y:=" , "{}{}".format(pt0[1], unit)
]
]
]
])
AddWarningMessage('{} is switched!'.format(bw_name))
except:
AddWarningMessage('{} failed in switching!'.format(bw_name))
def change(bondwire_name, direction, distance):
pt0 = oEditor.GetPropertyValue("BaseElementTab", bondwire_name, 'pt0')
pt1 = oEditor.GetPropertyValue("BaseElementTab", bondwire_name, 'pt1')
x0, y0 = map(float, pt0.strip().split(','))
x1, y1 = map(float, pt1.strip().split(','))
if direction == "Move in x":
x, y = x1 + distance, y1
elif direction == "Move in y":
x, y = x1, y1 + distance
elif direction == "Move along":
length = math.sqrt((x1-x0)**2+(y1-y0)**2)
x, y = x1 + distance*(x1-x0)/(length), y1 + distance*(y1-y0)/(length)
elif direction == "Switch Pts":
switch(bondwire_name)
return None
else:
pass
oEditor.ChangeProperty(
[
"NAME:AllTabs",
[
"NAME:BaseElementTab",
[
"NAME:PropServers",
bondwire_name
],
[
"NAME:ChangedProps",
[
"NAME:Pt1",
"X:=" , "{}mm".format(x),
"Y:=" , "{}mm".format(y)
]
]
]
])
def getProfile():
profile = {}
for bondwire_name in oEditor.FindObjects('Type', 'bondwire'):
name = oEditor.GetPropertyValue("BaseElementTab", bondwire_name, 'Profile')
try:
profile[name]+=[bondwire_name]
except:
profile[name]=[bondwire_name]
return profile
#GUI---------------------------------------------------------------------------|
class MyWindow(Window):
def __init__(self):
wpf.LoadComponent(self, 'MoveBondWire.xaml')
try:
with open('movebw.json') as f:
data = json.load(f)
self.direction_cb.Text = data['direction']
self.dist_tb.Text = data['dist']
except:
pass
self.profiles = getProfile()
for i in self.profiles:
self.profile_cb.Items.Add(i)
def direction_cb_SelectionChanged(self, sender, e):
if self.direction_cb.SelectedItem.Content == 'Switch Pts':
self.dist_tb.Visibility = Visibility.Hidden
self.move_bt.Content = 'Switch'
else:
self.dist_tb.Visibility = Visibility.Visible
self.move_bt.Content = 'Move'
def dist_tb_TextChanged(self, sender, e):
pass
def move_bt_Click(self, sender, e):
selected = oEditor.GetSelections()
for i in selected:
change(i, self.direction_cb.Text, float(self.dist_tb.Text))
data = {'direction': self.direction_cb.Text,
'dist': self.dist_tb.Text}
with open('movebw.json', 'w') as f:
json.dump(data, f, indent=4)
oEditor.Select(selected)
def profile_cb_SelectionChanged(self, sender, e):
AddWarningMessage(str(self.profiles[self.profile_cb.SelectedValue]))
oEditor.Select(self.profiles[self.profile_cb.SelectedValue])
'''
class MyWindow(Window):
def __init__(self):
wpf.LoadComponent(self, 'moveBondwire.xaml')
try:
with open('movebw.json') as f:
data = json.load(f)
self.direction_cb.Text = data['direction']
self.dist_tb.Text = data['dist']
except:
pass
self.profiles = getProfile()
for i in self.profiles:
self.profile_cb.Items.Add(i)
def Button_Click(self, sender, e):
selected = oEditor.GetSelections()
for i in selected:
change(i, self.direction_cb.Text, float(self.dist_tb.Text))
data = {'direction': self.direction_cb.Text,
'dist': self.dist_tb.Text}
with open('movebw.json', 'w') as f:
json.dump(data, f, indent=4)
oEditor.Select(selected)
def profile_cb_SelectionChanged(self, sender, e):
AddWarningMessage(str(self.profiles[self.profile_cb.SelectedValue]))
oEditor.Select(self.profiles[self.profile_cb.SelectedValue])
def direction_cb_SelectionChanged(self, sender, e):
if self.direction_cb.SelectedValue == "Switch Pts":
self.dist_tb.IsEnabled= False
'''
#Code End----------------------------------------------------------------------|
MyWindow().ShowDialog()
| [
"os.path.join",
"math.sqrt",
"os.path.dirname",
"clr.AddReference",
"wpf.LoadComponent",
"json.load",
"sys.path.append",
"json.dump"
] | [((122, 171), 'os.path.join', 'os.path.join', (['win64_dir', '"""common/IronPython/DLLs"""'], {}), "(win64_dir, 'common/IronPython/DLLs')\n", (134, 171), False, 'import os, sys, re, clr\n'), ((173, 197), 'sys.path.append', 'sys.path.append', (['dll_dir'], {}), '(dll_dir)\n', (188, 197), False, 'import os, sys, re, clr\n'), ((199, 233), 'clr.AddReference', 'clr.AddReference', (['"""IronPython.Wpf"""'], {}), "('IronPython.Wpf')\n", (215, 233), False, 'import os, sys, re, clr\n'), ((454, 479), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (469, 479), False, 'import os, sys, re, clr\n'), ((5392, 5436), 'wpf.LoadComponent', 'wpf.LoadComponent', (['self', '"""MoveBondWire.xaml"""'], {}), "(self, 'MoveBondWire.xaml')\n", (5409, 5436), False, 'import wpf\n'), ((6589, 6617), 'json.dump', 'json.dump', (['data', 'f'], {'indent': '(4)'}), '(data, f, indent=4)\n', (6598, 6617), False, 'import json\n'), ((4136, 4178), 'math.sqrt', 'math.sqrt', (['((x1 - x0) ** 2 + (y1 - y0) ** 2)'], {}), '((x1 - x0) ** 2 + (y1 - y0) ** 2)\n', (4145, 4178), False, 'import math, cmath\n'), ((5521, 5533), 'json.load', 'json.load', (['f'], {}), '(f)\n', (5530, 5533), False, 'import json\n')] |
from django.test import Client, TestCase
from django.contrib.auth import authenticate
from users.models import CustomUser
from users.forms import CustomUserChangeForm
from users.custom_social_auth_pipeline import allowed_email
from django.test.utils import override_settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from drives.models import *
import RideOn.SeleniumTester as selenium_tester
class UserTester(TestCase):
"""
Social auth tests.
"""
def setUp(self):
pass
def testCreateValidNewUser(self):
self.user = CustomUser.objects.create_user(username='<EMAIL>', email='<EMAIL>', password='<PASSWORD>')
self.user.set_password('<PASSWORD>')
self.user.save()
self.assertEqual(CustomUser.objects.all().count(), 1)
def testCreateUserInvalidEmail(self):
self.user = CustomUser.objects.create_user(username='testuser', email='<EMAIL>', password='<PASSWORD>')
self.assertFalse(allowed_email(self.user.email))
def testUserLogin(self):
self.user = CustomUser.objects.create_user(username='testuser', email='<EMAIL>', password='<PASSWORD>')
self.user.set_password('<PASSWORD>')
self.user.save()
authenticated_user = authenticate(username='testuser', password='<PASSWORD>')
self.assertEqual(self.user, authenticated_user)
class UserEditsTester(TestCase):
def setUp(self):
self.user = CustomUser.objects.create_user(username='<EMAIL>', email='<EMAIL>', password='<PASSWORD>')
def testFormEditAllFieldsValid(self):
form = CustomUserChangeForm(data={'username':'John_Doe' , 'gender':'Male', 'phone':'+17037561234', 'about':'Some About'})
self.assertTrue(form.is_valid())
def testFormEditInvalidPhone(self):
form = CustomUserChangeForm(data={'username':'John_Doe' , 'gender':'Male', 'phone':'+1703756123w', 'about':'Some About'})
self.assertFalse(form.is_valid())
def testProfileEditAllFieldsValid(self):
local_fields = ['username', 'gender', 'phone', 'about']
correct_data = {'username':'striker417' , 'gender':'Male', 'phone':'+17037561234', 'about':'I am a god at soccer'}
self.form = CustomUserChangeForm(data={'username':'striker417' , 'gender':'Male', 'phone':'+17037561234', 'about':'I am a god at soccer'}, instance=self.user)
if self.form.is_valid():
self.form.save()
for field in local_fields:
self.assertTrue(getattr(self.user, field, '') == correct_data[field] )
'''
Used to test the review sub-system from a GUI perspective.
Verifies underlying models as well.
Does not test API endpoints.
'''
@override_settings(DEBUG=True)
class UserTestReviews(StaticLiveServerTestCase):
# Setup for all tests
# Creates a sample drive with default values and none of the optional values
def setUp(self):
self.start_location, self.end_location, self.driver, self.drive, self.dropoff = create_drive(
"Name", start_location_str = "Start Location", end_location_str = "End Location",
title_str = "Drive Title", description_str = "Drive Description")
self.start_location2, self.end_location2, self.driver2, self.drive2, self.dropoff2 = create_drive(
"Name2", start_location_str = "Start Location2", end_location_str = "End Location2",
title_str = "Drive Title2", description_str = "Drive Description2")
self.browser = selenium_tester.create_chrome_driver()
self.browser.get(self.live_server_url)
def tearDown(self):
self.browser.close()
# Ensures driver review exists when drive is completed
def testDriverReviewExists(self):
# Add passenger and complete drive
self.drive.add_passenger(self.driver2)
Drive.objects.filter(id=self.drive.id).update(status="Completed")
# Login as passenger and go to reviews
# View the ride not as the owner
self.browser.delete_all_cookies()
selenium_tester.login_as(self.browser, self.driver2)
self.browser.get(self.live_server_url + '/users/' + str(self.driver2.id) + '/myrides/')
# Verify that the review driver button exists
driver_review = self.browser.find_element_by_id("driverReviewBtn")
self.assertEqual(driver_review.text, "Review " + self.driver.username)
# Ensures driver review does not exist for driver
def testDriverReviewNotExist(self):
# Add passenger and complete drive
self.drive.add_passenger(self.driver2)
Drive.objects.filter(id=self.drive.id).update(status="Completed")
# Login as driver and go to reviews
# View the ride as the owner
self.browser.delete_all_cookies()
selenium_tester.login_as(self.browser, self.driver)
self.browser.get(self.live_server_url + '/users/' + str(self.driver.id) + '/myrides/')
# Verify that the review driver button exists
driver_review = selenium_tester.safe_find_element_by_id(self.browser, "driverReviewBtn")
self.assertEqual(driver_review, None)
# Ensures passenger review exists when drive is completed
def testPassengerReviewExists(self):
# Add passenger and complete drive
self.drive.add_passenger(self.driver2)
Drive.objects.filter(id=self.drive.id).update(status="Completed")
# Login as passenger and go to reviews
# View the ride as the owner
self.browser.delete_all_cookies()
selenium_tester.login_as(self.browser, self.driver)
self.browser.get(self.live_server_url + '/users/' + str(self.driver.id) + '/myrides/')
# Verify that the review driver button exists
passenger_review = self.browser.find_element_by_id("passengerReviewBtn")
self.assertEqual(passenger_review.text, "Review " + self.driver2.username)
# Ensures passenger review does not exist for passenger
def testPassengerReviewNotExist(self):
# Add passenger and complete drive
self.drive.add_passenger(self.driver2)
Drive.objects.filter(id=self.drive.id).update(status="Completed")
# Login as driver and go to reviews
# View the ride as passenger
self.browser.delete_all_cookies()
selenium_tester.login_as(self.browser, self.driver2)
self.browser.get(self.live_server_url + '/users/' + str(self.driver2.id) + '/myrides/')
# Verify that the review driver button exists
passenger_review = selenium_tester.safe_find_element_by_id(self.browser, "passengerReviewBtn")
self.assertEqual(passenger_review, None)
def submitReview(self):
# Sleep to make sure the modal loads properly
import time
time.sleep(3)
title = self.browser.find_element_by_id("reviewTitle")
title.send_keys("Test Title")
rating = self.browser.find_element_by_id("reviewRating")
rating.send_keys("4")
description = self.browser.find_element_by_id("reviewDescription")
description.send_keys("Test Description")
submit = self.browser.find_element_by_id("reviewSubmitBtn")
submit.click()
# Sleep to make sure the modal loads properly
import time
time.sleep(3)
# Ensures driver review submission works
def testDriverReviewSubmits(self):
# Add passenger and complete drive
self.drive.add_passenger(self.driver2)
Drive.objects.filter(id=self.drive.id).update(status="Completed")
# Login as passenger and go to reviews
# View the ride not as the owner
self.browser.delete_all_cookies()
selenium_tester.login_as(self.browser, self.driver2)
self.browser.get(self.live_server_url + '/users/' + str(self.driver2.id) + '/myrides/')
# Submit a review
driver_review = self.browser.find_element_by_id("driverReviewBtn")
driver_review.click()
self.submitReview()
# Ensures passenger review submission works
def testPassengerReviewSubmits(self):
# Add passenger and complete drive
self.drive.add_passenger(self.driver2)
Drive.objects.filter(id=self.drive.id).update(status="Completed")
# Login as passenger and go to reviews
# View the ride not as the owner
self.browser.delete_all_cookies()
selenium_tester.login_as(self.browser, self.driver)
self.browser.get(self.live_server_url + '/users/' + str(self.driver.id) + '/myrides/')
# Submit a review
passenger_review = self.browser.find_element_by_id("passengerReviewBtn")
passenger_review.click()
self.submitReview()
# Asserts that after the review is submitted
# the button to submit no longer shows
def testPassengerReviewGone(self):
# Add passenger and complete drive
self.drive.add_passenger(self.driver2)
Drive.objects.filter(id=self.drive.id).update(status="Completed")
# Login as passenger and go to reviews
# View the ride not as the owner
self.browser.delete_all_cookies()
selenium_tester.login_as(self.browser, self.driver)
self.browser.get(self.live_server_url + '/users/' + str(self.driver.id) + '/myrides/')
# Submit a review
passenger_review = self.browser.find_element_by_id("passengerReviewBtn")
passenger_review.click()
self.submitReview()
# Assert that the submit button is gone
passenger_review = selenium_tester.safe_find_element_by_id(self.browser, "passengerReviewBtn")
self.assertEqual(passenger_review, None)
# Asserts that after the review is submitted
# the button to submit no longer shows
def testDriverReviewGone(self):
# Add passenger and complete drive
self.drive.add_passenger(self.driver2)
Drive.objects.filter(id=self.drive.id).update(status="Completed")
# Login as passenger and go to reviews
# View the ride not as the owner
self.browser.delete_all_cookies()
selenium_tester.login_as(self.browser, self.driver2)
self.browser.get(self.live_server_url + '/users/' + str(self.driver2.id) + '/myrides/')
# Submit a review
driver_review = self.browser.find_element_by_id("driverReviewBtn")
driver_review.click()
self.submitReview()
# Assert that the submit button is gone
driver_review = selenium_tester.safe_find_element_by_id(self.browser, "driverReviewBtn")
self.assertEqual(driver_review, None)
# Asserts that the driver rating changes after a review
# is submitted
def testDriverRatingChanges(self):
# Add passenger and complete drive
self.drive.add_passenger(self.driver2)
Drive.objects.filter(id=self.drive.id).update(status="Completed")
# Login as passenger and go to reviews
# View the ride not as the owner
self.browser.delete_all_cookies()
selenium_tester.login_as(self.browser, self.driver2)
self.browser.get(self.live_server_url + '/users/' + str(self.driver2.id) + '/myrides/')
# Submit a review
driver_review = self.browser.find_element_by_id("driverReviewBtn")
driver_review.click()
self.submitReview()
# View the rating
self.browser.get(self.live_server_url + '/users/' + str(self.driver.id))
rating = self.browser.find_element_by_id("driverRating")
self.assertEqual(rating.text.strip(), "4.00")
def testPassengerRatingChanges(self):
# Add passenger and complete drive
self.drive.add_passenger(self.driver2)
Drive.objects.filter(id=self.drive.id).update(status="Completed")
# Login as passenger and go to reviews
# View the ride not as the owner
self.browser.delete_all_cookies()
selenium_tester.login_as(self.browser, self.driver)
self.browser.get(self.live_server_url + '/users/' + str(self.driver.id) + '/myrides/')
# Submit a review
passenger_review = self.browser.find_element_by_id("passengerReviewBtn")
passenger_review.click()
self.submitReview()
# View the rating
self.browser.get(self.live_server_url + '/users/' + str(self.driver2.id))
rating = self.browser.find_element_by_id("riderRating")
self.assertEqual(rating.text.strip(), "4.00")
| [
"django.contrib.auth.authenticate",
"RideOn.SeleniumTester.safe_find_element_by_id",
"users.models.CustomUser.objects.create_user",
"time.sleep",
"RideOn.SeleniumTester.create_chrome_driver",
"users.models.CustomUser.objects.all",
"users.forms.CustomUserChangeForm",
"users.custom_social_auth_pipeline.allowed_email",
"django.test.utils.override_settings",
"RideOn.SeleniumTester.login_as"
] | [((2710, 2739), 'django.test.utils.override_settings', 'override_settings', ([], {'DEBUG': '(True)'}), '(DEBUG=True)\n', (2727, 2739), False, 'from django.test.utils import override_settings\n'), ((585, 679), 'users.models.CustomUser.objects.create_user', 'CustomUser.objects.create_user', ([], {'username': '"""<EMAIL>"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='<EMAIL>', email='<EMAIL>',\n password='<PASSWORD>')\n", (615, 679), False, 'from users.models import CustomUser\n'), ((879, 974), 'users.models.CustomUser.objects.create_user', 'CustomUser.objects.create_user', ([], {'username': '"""testuser"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='testuser', email='<EMAIL>',\n password='<PASSWORD>')\n", (909, 974), False, 'from users.models import CustomUser\n'), ((1082, 1177), 'users.models.CustomUser.objects.create_user', 'CustomUser.objects.create_user', ([], {'username': '"""testuser"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='testuser', email='<EMAIL>',\n password='<PASSWORD>')\n", (1112, 1177), False, 'from users.models import CustomUser\n'), ((1282, 1338), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': '"""testuser"""', 'password': '"""<PASSWORD>"""'}), "(username='testuser', password='<PASSWORD>')\n", (1294, 1338), False, 'from django.contrib.auth import authenticate\n'), ((1474, 1568), 'users.models.CustomUser.objects.create_user', 'CustomUser.objects.create_user', ([], {'username': '"""<EMAIL>"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='<EMAIL>', email='<EMAIL>',\n password='<PASSWORD>')\n", (1504, 1568), False, 'from users.models import CustomUser\n'), ((1623, 1744), 'users.forms.CustomUserChangeForm', 'CustomUserChangeForm', ([], {'data': "{'username': 'John_Doe', 'gender': 'Male', 'phone': '+17037561234', 'about':\n 'Some About'}"}), "(data={'username': 'John_Doe', 'gender': 'Male',\n 'phone': '+17037561234', 'about': 'Some About'})\n", (1643, 1744), False, 'from users.forms import CustomUserChangeForm\n'), ((1835, 1956), 'users.forms.CustomUserChangeForm', 'CustomUserChangeForm', ([], {'data': "{'username': 'John_Doe', 'gender': 'Male', 'phone': '+1703756123w', 'about':\n 'Some About'}"}), "(data={'username': 'John_Doe', 'gender': 'Male',\n 'phone': '+1703756123w', 'about': 'Some About'})\n", (1855, 1956), False, 'from users.forms import CustomUserChangeForm\n'), ((2245, 2403), 'users.forms.CustomUserChangeForm', 'CustomUserChangeForm', ([], {'data': "{'username': 'striker417', 'gender': 'Male', 'phone': '+17037561234',\n 'about': 'I am a god at soccer'}", 'instance': 'self.user'}), "(data={'username': 'striker417', 'gender': 'Male',\n 'phone': '+17037561234', 'about': 'I am a god at soccer'}, instance=\n self.user)\n", (2265, 2403), False, 'from users.forms import CustomUserChangeForm\n'), ((3443, 3481), 'RideOn.SeleniumTester.create_chrome_driver', 'selenium_tester.create_chrome_driver', ([], {}), '()\n', (3479, 3481), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((3927, 3979), 'RideOn.SeleniumTester.login_as', 'selenium_tester.login_as', (['self.browser', 'self.driver2'], {}), '(self.browser, self.driver2)\n', (3951, 3979), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((4609, 4660), 'RideOn.SeleniumTester.login_as', 'selenium_tester.login_as', (['self.browser', 'self.driver'], {}), '(self.browser, self.driver)\n', (4633, 4660), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((4819, 4891), 'RideOn.SeleniumTester.safe_find_element_by_id', 'selenium_tester.safe_find_element_by_id', (['self.browser', '"""driverReviewBtn"""'], {}), "(self.browser, 'driverReviewBtn')\n", (4858, 4891), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((5291, 5342), 'RideOn.SeleniumTester.login_as', 'selenium_tester.login_as', (['self.browser', 'self.driver'], {}), '(self.browser, self.driver)\n', (5315, 5342), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((5990, 6042), 'RideOn.SeleniumTester.login_as', 'selenium_tester.login_as', (['self.browser', 'self.driver2'], {}), '(self.browser, self.driver2)\n', (6014, 6042), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((6205, 6280), 'RideOn.SeleniumTester.safe_find_element_by_id', 'selenium_tester.safe_find_element_by_id', (['self.browser', '"""passengerReviewBtn"""'], {}), "(self.browser, 'passengerReviewBtn')\n", (6244, 6280), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((6420, 6433), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (6430, 6433), False, 'import time\n'), ((6876, 6889), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (6886, 6889), False, 'import time\n'), ((7233, 7285), 'RideOn.SeleniumTester.login_as', 'selenium_tester.login_as', (['self.browser', 'self.driver2'], {}), '(self.browser, self.driver2)\n', (7257, 7285), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((7864, 7915), 'RideOn.SeleniumTester.login_as', 'selenium_tester.login_as', (['self.browser', 'self.driver'], {}), '(self.browser, self.driver)\n', (7888, 7915), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((8541, 8592), 'RideOn.SeleniumTester.login_as', 'selenium_tester.login_as', (['self.browser', 'self.driver'], {}), '(self.browser, self.driver)\n', (8565, 8592), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((8895, 8970), 'RideOn.SeleniumTester.safe_find_element_by_id', 'selenium_tester.safe_find_element_by_id', (['self.browser', '"""passengerReviewBtn"""'], {}), "(self.browser, 'passengerReviewBtn')\n", (8934, 8970), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((9400, 9452), 'RideOn.SeleniumTester.login_as', 'selenium_tester.login_as', (['self.browser', 'self.driver2'], {}), '(self.browser, self.driver2)\n', (9424, 9452), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((9744, 9816), 'RideOn.SeleniumTester.safe_find_element_by_id', 'selenium_tester.safe_find_element_by_id', (['self.browser', '"""driverReviewBtn"""'], {}), "(self.browser, 'driverReviewBtn')\n", (9783, 9816), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((10232, 10284), 'RideOn.SeleniumTester.login_as', 'selenium_tester.login_as', (['self.browser', 'self.driver2'], {}), '(self.browser, self.driver2)\n', (10256, 10284), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((11026, 11077), 'RideOn.SeleniumTester.login_as', 'selenium_tester.login_as', (['self.browser', 'self.driver'], {}), '(self.browser, self.driver)\n', (11050, 11077), True, 'import RideOn.SeleniumTester as selenium_tester\n'), ((996, 1026), 'users.custom_social_auth_pipeline.allowed_email', 'allowed_email', (['self.user.email'], {}), '(self.user.email)\n', (1009, 1026), False, 'from users.custom_social_auth_pipeline import allowed_email\n'), ((771, 795), 'users.models.CustomUser.objects.all', 'CustomUser.objects.all', ([], {}), '()\n', (793, 795), False, 'from users.models import CustomUser\n')] |
from django.db import models
from backend.models.driver import Driver
from backend.models.client import Client
from backend.models.routeList import RouteList
class Route(models.Model):
id = models.AutoField(primary_key=True)
assigned_to = models.ForeignKey(Driver, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True, editable=False)
total_quantity = models.PositiveIntegerField(blank=True, null=True)
total_distance = models.DecimalField(max_digits=40, decimal_places=20, blank=True, null=True)
total_duration = models.DecimalField(max_digits=40, decimal_places=20, blank=True, null=True)
itinerary = models.JSONField(null=True)
route_list = models.ForeignKey(RouteList, related_name='routes', on_delete=models.CASCADE, blank=True, null=True)
| [
"django.db.models.ForeignKey",
"django.db.models.JSONField",
"django.db.models.AutoField",
"django.db.models.PositiveIntegerField",
"django.db.models.DateTimeField",
"django.db.models.DecimalField"
] | [((196, 230), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (212, 230), False, 'from django.db import models\n'), ((249, 300), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Driver'], {'on_delete': 'models.CASCADE'}), '(Driver, on_delete=models.CASCADE)\n', (266, 300), False, 'from django.db import models\n'), ((318, 373), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'editable': '(False)'}), '(auto_now_add=True, editable=False)\n', (338, 373), False, 'from django.db import models\n'), ((395, 445), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (422, 445), False, 'from django.db import models\n'), ((467, 543), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(40)', 'decimal_places': '(20)', 'blank': '(True)', 'null': '(True)'}), '(max_digits=40, decimal_places=20, blank=True, null=True)\n', (486, 543), False, 'from django.db import models\n'), ((565, 641), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(40)', 'decimal_places': '(20)', 'blank': '(True)', 'null': '(True)'}), '(max_digits=40, decimal_places=20, blank=True, null=True)\n', (584, 641), False, 'from django.db import models\n'), ((658, 685), 'django.db.models.JSONField', 'models.JSONField', ([], {'null': '(True)'}), '(null=True)\n', (674, 685), False, 'from django.db import models\n'), ((703, 808), 'django.db.models.ForeignKey', 'models.ForeignKey', (['RouteList'], {'related_name': '"""routes"""', 'on_delete': 'models.CASCADE', 'blank': '(True)', 'null': '(True)'}), "(RouteList, related_name='routes', on_delete=models.\n CASCADE, blank=True, null=True)\n", (720, 808), False, 'from django.db import models\n')] |
from datetime import datetime
from sqlalchemy import Column, Integer, String, ForeignKey, CheckConstraint
from sqlalchemy.orm import relationship
from .base import Base
class SpeciesStatusRating(Base):
"""
Class representing the a conservation status scheme rating
"""
DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
DISPLAY_DATE_FORMAT = "%d/%m/%Y"
IMPORT_DATE_FORMAT = "%d/%m/%Y"
__tablename__ = "SpeciesStatusRatings"
__table_args__ = (CheckConstraint("LENGTH(TRIM(region)) > 0"),
CheckConstraint("(end IS NULL) or (end >= start)"))
#: Primary key
id = Column(Integer, primary_key=True)
#: Related Species Id
speciesId = Column(Integer, ForeignKey("Species.id"), nullable=False)
#: Related Rating Id
statusRatingId = Column(Integer, ForeignKey("StatusRatings.id"), nullable=False)
#: Region where the rating applies
region = Column(String, nullable=False)
#: Start date for the rating. The database is shared between .NET and Python code and Entity Framework
#: creates a TEXT column in SQLite where data's written in the form YYYY-MM-DD HH:MM:SS. So, while
#: this field is the one that's persisted to the DB the intention is that it should be accessed via
#: the corresponding property
start = Column(String, nullable=False)
#: End date for the rating - see comments about the start date
end = Column(String, nullable=True)
#: Related species
species = relationship("Species", lazy="joined")
#: Related status rating
rating = relationship("StatusRating", lazy="joined")
def __repr__(self):
return f"{type(self).__name__}(id={self.id!r}, " \
f"speciesId={self.speciesId!r}, " \
f"statusRatingId={self.statusRatingId!r}, " \
f"region={self.region!r}, " \
f"start={self.start!r}," \
f"end={self.end!r})"
@property
def start_date(self):
return datetime.strptime(self.start, self.DATE_FORMAT).date()
@start_date.setter
def start_date(self, value):
self.start = value.strftime(self.DATE_FORMAT) if value else None
@property
def end_date(self):
return datetime.strptime(self.end, self.DATE_FORMAT).date() if self.end is not None else None
@end_date.setter
def end_date(self, value):
self.end = value.strftime(self.DATE_FORMAT) if value else None
@property
def display_start_date(self):
return self.start_date.strftime(self.DISPLAY_DATE_FORMAT)
@property
def display_end_date(self):
date = self.end_date
return date.strftime(self.DISPLAY_DATE_FORMAT) if date else None
| [
"sqlalchemy.orm.relationship",
"datetime.datetime.strptime",
"sqlalchemy.ForeignKey",
"sqlalchemy.CheckConstraint",
"sqlalchemy.Column"
] | [((608, 641), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (614, 641), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, CheckConstraint\n'), ((904, 934), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(False)'}), '(String, nullable=False)\n', (910, 934), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, CheckConstraint\n'), ((1295, 1325), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(False)'}), '(String, nullable=False)\n', (1301, 1325), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, CheckConstraint\n'), ((1403, 1432), 'sqlalchemy.Column', 'Column', (['String'], {'nullable': '(True)'}), '(String, nullable=True)\n', (1409, 1432), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, CheckConstraint\n'), ((1471, 1509), 'sqlalchemy.orm.relationship', 'relationship', (['"""Species"""'], {'lazy': '"""joined"""'}), "('Species', lazy='joined')\n", (1483, 1509), False, 'from sqlalchemy.orm import relationship\n'), ((1552, 1595), 'sqlalchemy.orm.relationship', 'relationship', (['"""StatusRating"""'], {'lazy': '"""joined"""'}), "('StatusRating', lazy='joined')\n", (1564, 1595), False, 'from sqlalchemy.orm import relationship\n'), ((460, 503), 'sqlalchemy.CheckConstraint', 'CheckConstraint', (['"""LENGTH(TRIM(region)) > 0"""'], {}), "('LENGTH(TRIM(region)) > 0')\n", (475, 503), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, CheckConstraint\n'), ((527, 577), 'sqlalchemy.CheckConstraint', 'CheckConstraint', (['"""(end IS NULL) or (end >= start)"""'], {}), "('(end IS NULL) or (end >= start)')\n", (542, 577), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, CheckConstraint\n'), ((700, 724), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Species.id"""'], {}), "('Species.id')\n", (710, 724), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, CheckConstraint\n'), ((804, 834), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""StatusRatings.id"""'], {}), "('StatusRatings.id')\n", (814, 834), False, 'from sqlalchemy import Column, Integer, String, ForeignKey, CheckConstraint\n'), ((1971, 2018), 'datetime.datetime.strptime', 'datetime.strptime', (['self.start', 'self.DATE_FORMAT'], {}), '(self.start, self.DATE_FORMAT)\n', (1988, 2018), False, 'from datetime import datetime\n'), ((2210, 2255), 'datetime.datetime.strptime', 'datetime.strptime', (['self.end', 'self.DATE_FORMAT'], {}), '(self.end, self.DATE_FORMAT)\n', (2227, 2255), False, 'from datetime import datetime\n')] |
import numpy as np
# Part one
boards = []
with open('input.txt') as f:
numbers_drawn = [int(i) for i in f.readline().split(',')]
board = []
for line in f:
if line.strip():
board.append(np.array([int(i) for i in line.split()]))
else:
if board:
boards.append(np.array(board))
board = []
boards.append(np.array(board))
def check(board):
def sum_unmarked():
return board[board != -1].sum()
for row in board:
if row.sum() == -board.shape[1]:
return sum_unmarked()
for col in board.T:
if col.sum() == -board.shape[0]:
return sum_unmarked()
def play(until='first win'):
wins = 0
for n in numbers_drawn:
for i, board in enumerate(boards):
if board is None:
continue
board[board == n] = -1
if (s := check(board)):
wins += 1
score = s*n
if until == 'first win' or wins == len(boards):
return score
boards[i] = None
score = play()
print('part one:', score)
# Part two
score = play('last win')
print('part two:', score)
| [
"numpy.array"
] | [((384, 399), 'numpy.array', 'np.array', (['board'], {}), '(board)\n', (392, 399), True, 'import numpy as np\n'), ((326, 341), 'numpy.array', 'np.array', (['board'], {}), '(board)\n', (334, 341), True, 'import numpy as np\n')] |
import click
def role_id_arg(f):
return click.argument("role_id")(f)
| [
"click.argument"
] | [((46, 71), 'click.argument', 'click.argument', (['"""role_id"""'], {}), "('role_id')\n", (60, 71), False, 'import click\n')] |
#!/usr/bin/env python
import rospy
from geometry_msgs.msg import PoseStamped
from styx_msgs.msg import Lane, Waypoint
from std_msgs.msg import Int32
from scipy.spatial import KDTree
import numpy as np
import math
'''
This node will publish waypoints from the car's current position to some `x` distance ahead.
As mentioned in the doc, you should ideally first implement a version which does not care
about traffic lights or obstacles.
Once you have created dbw_node, you will update this node to use the status of traffic lights too.
Please note that our simulator also provides the exact location of traffic lights and their
current status in `/vehicle/traffic_lights` message. You can use this message to build this node
as well as to verify your TL classifier.
TODO (for Yousuf and Aaron): Stopline location for each traffic light.
'''
#LOOKAHEAD_WPS = 200 # Number of waypoints we will publish. You can change this number
LOOKAHEAD_WPS = 100 # Number of waypoints we will publish. You can change this number
MAX_DECEL = 0.5
class WaypointUpdater(object):
def __init__(self):
rospy.init_node('waypoint_updater')
rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
# basic_waypoints are static and will not change so will
# be loaded only subscribed only once
rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
# TODO: Add a subscriber for /traffic_waypoint and /obstacle_waypoint below
# get the traffic light info to adjust final published way points
# in case need to slow down and stop at the traffic light stop line
rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)
# TODO: Add other member variables you need below
self.pose = None
self.base_lane = None
self.waypoints_2d = None
self.waypoint_tree = None
self.stopline_wp_idx = -1
self.final_waypoints_pub = rospy.Publisher('final_waypoints', Lane, queue_size=1)
self.loop()
def loop(self):
rate = rospy.Rate(20)
while not rospy.is_shutdown():
if self.pose and self.base_lane:
# get the closest waypoints
closest_waypoint_idx = self.get_closest_waypoint_idx()
self.publish_waypoints(closest_waypoint_idx)
rate.sleep()
def get_closest_waypoint_idx(self):
# get the currenty position of the car
x = self.pose.pose.position.x
y = self.pose.pose.position.y
# find the index of the points that is cloese to the current car position
# we ask to return 1 closet point KDTree returns the position as well as the index of
# the cloese waypoint, index would be the same order as we construct the KDTree
closest_idx = self.waypoint_tree.query([x,y],1)[1]
# check the closet way point is ahead or behind the car
closest_coord = self.waypoints_2d[closest_idx]
prev_coord = self.waypoints_2d[closest_idx-1]
# equation for hyperplane through closest_coords
# test to see if the prev_vect and cl_vect are the same direction as
# from current to cl_vect
cl_vect = np.array(closest_coord)
prev_vect = np.array(prev_coord)
pos_vect = np.array([x,y])
val = np.dot(cl_vect-prev_vect,pos_vect-cl_vect)
if val > 0: # the direction is opposite the closet point is behind the car
# we need to use the next points insteand , modula len to wrap around
closest_idx = (closest_idx + 1) % len(self.waypoints_2d)
return closest_idx
def publish_waypoints(self, closest_idx):
final_lane = self.generate_lane()
self.final_waypoints_pub.publish(final_lane)
"""
lane = Lane()
lane.header = self.base_lane.header # same format we do not need hearder anyway
# no need to worry about the greater than the base waypoint len since python slice will
# just slice to the end if the lengths is greater
lane.waypoints = self.base_lane.waypoints[closest_idx: closest_idx + LOOKAHEAD_WPS]
self.final_waypoints_pub.publish(lane)
"""
def generate_lane(self):
lane = Lane()
closest_idx = self.get_closest_waypoint_idx()
farthest_idx = closest_idx + LOOKAHEAD_WPS
base_waypoints = self.base_lane.waypoints[closest_idx:farthest_idx]
# if there is no traffic light or the traffic light is further away
# than th e furthest planing rout we just publish the base_waypoint ahead of the car
if self.stopline_wp_idx == -1 or (self.stopline_wp_idx >= farthest_idx):
lane.waypoints = base_waypoints
# else there is a red traffic light in the planning route and need to deaccelarte
# to stop at the traffic light stop line
else:
lane.waypoints = self.decelerate_waypoints(base_waypoints,closest_idx)
return lane
def decelerate_waypoints(self,waypoints, closest_idx):
temp = []
for i , wp in enumerate(waypoints):
p = Waypoint()
p.pose = wp.pose
# since the current waypoint of the car is based on the centre of the car
# we want to back 2 waypoints to let the front of the car to stop at the
# stop line instead of the centre of the car
stop_idx = max(self.stopline_wp_idx - closest_idx - 2,0)
# calculate distance of the current waypoint to the stoping point
dist = self.distance(waypoints,i,stop_idx)
# based on the distance to the stoping point we fit in a sqrt curve
# for smooth deacceleration
# could use just a linear factor as well
vel = math.sqrt(2*MAX_DECEL * dist)
if vel < 1:
vel = 0.0
# when the distance is large the sqrt computed velocity could be
# large as well, so we need to cap it with the original velocity
p.twist.twist.linear.x = min(vel, wp.twist.twist.linear.x)
temp.append(p)
return temp
def pose_cb(self, msg):
# TODO: Implement
self.pose = msg # store the car's pose about 50hz
def waypoints_cb(self, waypoints):
# TODO: Implement
# base_waypoints will be called only once since the base way point
# would not change ,so it will be stroed in the class
self.base_lane = waypoints
if not self.waypoints_2d:
# just to get the coordinates of the waypoints (x,y)
self.waypoints_2d = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] \
for waypoint in waypoints.waypoints]
self.waypoint_tree = KDTree(self.waypoints_2d) # constructa KDTree using the 2d waypoints
def traffic_cb(self, msg):
# TODO: Callback for /traffic_waypoint message. Implement
self.stopline_wp_idx = msg.data
def obstacle_cb(self, msg):
# TODO: Callback for /obstacle_waypoint message. We will implement it later
pass
def get_waypoint_velocity(self, waypoint):
return waypoint.twist.twist.linear.x
def set_waypoint_velocity(self, waypoints, waypoint, velocity):
waypoints[waypoint].twist.twist.linear.x = velocity
def distance(self, waypoints, wp1, wp2):
dist = 0
dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)
for i in range(wp1, wp2+1):
dist += dl(waypoints[wp1].pose.pose.position, waypoints[i].pose.pose.position)
wp1 = i
return dist
if __name__ == '__main__':
try:
WaypointUpdater()
except rospy.ROSInterruptException:
rospy.logerr('Could not start waypoint updater node.')
| [
"rospy.logerr",
"rospy.Subscriber",
"rospy.is_shutdown",
"rospy.init_node",
"scipy.spatial.KDTree",
"math.sqrt",
"numpy.array",
"numpy.dot",
"rospy.Rate",
"styx_msgs.msg.Waypoint",
"rospy.Publisher",
"styx_msgs.msg.Lane"
] | [((1102, 1137), 'rospy.init_node', 'rospy.init_node', (['"""waypoint_updater"""'], {}), "('waypoint_updater')\n", (1117, 1137), False, 'import rospy\n'), ((1147, 1207), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/current_pose"""', 'PoseStamped', 'self.pose_cb'], {}), "('/current_pose', PoseStamped, self.pose_cb)\n", (1163, 1207), False, 'import rospy\n'), ((1328, 1388), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/base_waypoints"""', 'Lane', 'self.waypoints_cb'], {}), "('/base_waypoints', Lane, self.waypoints_cb)\n", (1344, 1388), False, 'import rospy\n'), ((1632, 1693), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/traffic_waypoint"""', 'Int32', 'self.traffic_cb'], {}), "('/traffic_waypoint', Int32, self.traffic_cb)\n", (1648, 1693), False, 'import rospy\n'), ((1946, 2000), 'rospy.Publisher', 'rospy.Publisher', (['"""final_waypoints"""', 'Lane'], {'queue_size': '(1)'}), "('final_waypoints', Lane, queue_size=1)\n", (1961, 2000), False, 'import rospy\n'), ((2058, 2072), 'rospy.Rate', 'rospy.Rate', (['(20)'], {}), '(20)\n', (2068, 2072), False, 'import rospy\n'), ((3209, 3232), 'numpy.array', 'np.array', (['closest_coord'], {}), '(closest_coord)\n', (3217, 3232), True, 'import numpy as np\n'), ((3253, 3273), 'numpy.array', 'np.array', (['prev_coord'], {}), '(prev_coord)\n', (3261, 3273), True, 'import numpy as np\n'), ((3293, 3309), 'numpy.array', 'np.array', (['[x, y]'], {}), '([x, y])\n', (3301, 3309), True, 'import numpy as np\n'), ((3324, 3371), 'numpy.dot', 'np.dot', (['(cl_vect - prev_vect)', '(pos_vect - cl_vect)'], {}), '(cl_vect - prev_vect, pos_vect - cl_vect)\n', (3330, 3371), True, 'import numpy as np\n'), ((4244, 4250), 'styx_msgs.msg.Lane', 'Lane', ([], {}), '()\n', (4248, 4250), False, 'from styx_msgs.msg import Lane, Waypoint\n'), ((2091, 2110), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (2108, 2110), False, 'import rospy\n'), ((5132, 5142), 'styx_msgs.msg.Waypoint', 'Waypoint', ([], {}), '()\n', (5140, 5142), False, 'from styx_msgs.msg import Lane, Waypoint\n'), ((5822, 5853), 'math.sqrt', 'math.sqrt', (['(2 * MAX_DECEL * dist)'], {}), '(2 * MAX_DECEL * dist)\n', (5831, 5853), False, 'import math\n'), ((6843, 6868), 'scipy.spatial.KDTree', 'KDTree', (['self.waypoints_2d'], {}), '(self.waypoints_2d)\n', (6849, 6868), False, 'from scipy.spatial import KDTree\n'), ((7491, 7556), 'math.sqrt', 'math.sqrt', (['((a.x - b.x) ** 2 + (a.y - b.y) ** 2 + (a.z - b.z) ** 2)'], {}), '((a.x - b.x) ** 2 + (a.y - b.y) ** 2 + (a.z - b.z) ** 2)\n', (7500, 7556), False, 'import math\n'), ((7825, 7879), 'rospy.logerr', 'rospy.logerr', (['"""Could not start waypoint updater node."""'], {}), "('Could not start waypoint updater node.')\n", (7837, 7879), False, 'import rospy\n')] |
import os
import logging
import pandas as pd
import src.tests.testconfig as tc
from collections import namedtuple
from src.parsedata import PDFParser
from src.capitalgains import CapitalGains
from src.initialize import Logger
class MFTestError(Exception):
pass
class UnitTest():
def __init__(self, show_logs:bool = False):
logger = Logger()
logger.initilaze_logging('no')
logging.info(f'{"*" * 50}Start Of Unit Test{"*" * 50}')
def _is_hdr_output_ok(self, file_path_cas:str, file_path_hdr_out:str, password:str )->bool:
'''
Comapres the sum of units to sell from the csv output file
'''
dp = PDFParser()
mf_trans_df, mf_hdr_df = dp.parse_mf_data(file_path_cas, password)
target_ltcg = 100000
cg = CapitalGains(mf_hdr_df, mf_trans_df)
cg.prepare_final_data(target_ltcg)
hdr_df_act = cg.output_hdr_df
#convert from decimal to Float
target_units_sum_act = float(hdr_df_act.target_units.sum())
# Read expected data
hdr_df_exp = pd.read_csv(file_path_hdr_out)
target_units_sum_exp = round(hdr_df_exp.target_units.sum(), 3)
if target_units_sum_act == target_units_sum_exp:
return True
else:
logging.error(f"Target Units Do not Match: Expected {target_units_sum_exp}, Got {target_units_sum_act}")
return False
def run_basic_test(self):
print("running Basic Test:", end = " ")
file_path_cas = os.path.join( tc.unit_test_base_dir, 'cas_jul_29_21.pdf')
file_path_actual = os.path.join( tc.unit_test_base_dir, 'output_mf_totals_jul_29_21.csv')
logging.info(f"running Basic Test for input file {file_path_cas}")
ok = self._is_hdr_output_ok(file_path_cas,file_path_actual, tc.password)
res = tc.PASS if ok else tc.FAIL
print(f"{res}")
logging.info(f"Result:{res}\n")
if not ok:
msg = f"Test Failed for input file {file_path_cas}, output file {file_path_actual}"
logging.error(msg)
raise MFTestError(msg)
def test_redemption(self):
print("running test for rdemeption", end = " ")
dp = PDFParser()
file_path_cas = os.path.join( tc.unit_test_base_dir, 'cas_jul_19_21.pdf')
logging.info(f"running test for rdemeption for file {file_path_cas}")
mf_trans_df, mf_hdr_df = dp.parse_mf_data(file_path_cas, tc.password)
cg = CapitalGains(mf_hdr_df, mf_trans_df)
test_data = []
Data = namedtuple('Data', ['scheme_name', 'redeemed_units'])
icici_nf = Data('ICICI Prudential Nifty Next 50 Index Fund - Direct Plan - Growth', 11130.000 )
test_data.append(icici_nf)
dsp_scf = Data('DSP Small Cap Fund - Direct Plan - Growth', 0)
test_data.append(dsp_scf)
for data in test_data:
mf_trans_single_df = cg.calc_redeemed_units(data.scheme_name, mf_trans_df)
redeemed_units = mf_trans_single_df.units_redeemed.sum()
if not redeemed_units == data.redeemed_units:
res = tc.FAIL
print(f"{res}")
msg = f"Test Failed for input file { file_path_cas}, scheme {data.scheme_name }: Expected Redeemmed units {data.redeemed_units} , Got {redeemed_units}"
logging.error(msg)
raise MFTestError(msg)
res = tc.PASS
print(f"{res}")
logging.info(f"Result:{res}\n")
def test_invested_current_amounts(self):
print("running test for Current Amount and Invested Amount :", end = " ")
dp = PDFParser()
file_path_cas = os.path.join( tc.unit_test_base_dir, 'cas_jul_19_21.pdf')
logging.info(f"running test for Current Amount and Invested Amount for file {file_path_cas}")
mf_trans_df, mf_hdr_df = dp.parse_mf_data(file_path_cas, tc.password)
cg = CapitalGains(mf_hdr_df, mf_trans_df)
cg._set_gf_nav()
mf_trans_df = cg.mf_trans_df
test_data = []
Data = namedtuple('Data', ['scheme_name', 'invested_amt', 'current_amt'])
icici_nf = Data('ICICI Prudential Nifty Next 50 Index Fund - Direct Plan - Growth', 277741, 411315 )
test_data.append(icici_nf)
ft_ldf = Data('Franklin India Low Duration Fund - Direct Plan - Growth', 33068, 42704 )
test_data.append(ft_ldf)
dsp_scf = Data('DSP Small Cap Fund - Direct Plan - Growth', 759990, 1405390 )
test_data.append(dsp_scf)
# Method to test
mf_trans_df = cg.set_units_amts_for_redemptions(mf_trans_df)
ok = True
for data in test_data:
mf_trans_single = mf_trans_df[mf_trans_df.scheme_name == data.scheme_name]
total_invested_amt = round(mf_trans_single.invested_amt.sum(), 0)
total_current_amt = round(mf_trans_single.current_amt.sum(), 0)
if total_invested_amt != data.invested_amt:
msg = ( f"Test Failed for input file { file_path_cas}, scheme {data.scheme_name }:\n"
f"expected invested amount: {data.invested_amt}, got {total_invested_amt}" )
ok = False
break
if total_current_amt != data.current_amt:
msg = ( f"Test Failed for input file { file_path_cas}, scheme {data.scheme_name }:\n"
f"expected current amount: {data.current_amt}, got {total_current_amt}" )
ok = False
break
res = tc.PASS if ok else tc.FAIL
print(res)
logging.info(f"Result:{res}\n")
if not ok:
logging.error(msg)
raise MFTestError(msg)
def test_hdr_amounts(self):
'''
Test Aggregate Invested and Current Amount for all MF schemes
This has been validated against VRO portfolio on 30-07-2021
'''
print("running test for Aggregate Current Amount and Invested Amount :", end = " ")
file_path_cas = os.path.join( tc.unit_test_base_dir, 'cas_jul_29_21.pdf')
logging.info(f"running test for Aggregate Current Amount and Invested Amount for file {file_path_cas}")
dp =PDFParser()
mf_trans_df, mf_hdr_df = dp.parse_mf_data(file_path_cas, password=tc.password)
cg = CapitalGains(mf_hdr_df, mf_trans_df)
cg._set_gf_nav()
cg.mf_trans_df = cg.set_units_amts_for_redemptions(cg.mf_trans_df)
cg.mf_trans_df= cg._calculate_capital_gains(cg.mf_trans_df)
# Method to be tested
mf_hdr_df = cg.prepare_hdr_data(cg.mf_hdr_df, cg.mf_trans_df)
invested_amt_act = round(mf_hdr_df.invested_amt.sum(), 0)
invested_amt_exp = 8666344.0
current_amt_act = round(mf_hdr_df.current_amt.sum(), 0)
current_amt_exp = 12911991.0
ok = True
if invested_amt_act != invested_amt_exp:
msg = ( f"Test Failed for input file { file_path_cas}: "
f"expected invested amount: { invested_amt_exp}, got: {invested_amt_act}" )
ok = False
if current_amt_act != current_amt_exp:
msg = ( f"Test Failed for input file { file_path_cas}: "
f"expected invested amount: { current_amt_exp}, got: {current_amt_act}" )
ok = False
res = tc.PASS if ok else tc.FAIL
print(res)
logging.info(f"Result:{res}\n")
if not ok:
logging.error(msg)
raise MFTestError(msg)
| [
"collections.namedtuple",
"src.initialize.Logger",
"pandas.read_csv",
"os.path.join",
"logging.info",
"src.parsedata.PDFParser",
"src.capitalgains.CapitalGains",
"logging.error"
] | [((358, 366), 'src.initialize.Logger', 'Logger', ([], {}), '()\n', (364, 366), False, 'from src.initialize import Logger\n'), ((418, 473), 'logging.info', 'logging.info', (['f"""{\'*\' * 50}Start Of Unit Test{\'*\' * 50}"""'], {}), '(f"{\'*\' * 50}Start Of Unit Test{\'*\' * 50}")\n', (430, 473), False, 'import logging\n'), ((675, 686), 'src.parsedata.PDFParser', 'PDFParser', ([], {}), '()\n', (684, 686), False, 'from src.parsedata import PDFParser\n'), ((813, 849), 'src.capitalgains.CapitalGains', 'CapitalGains', (['mf_hdr_df', 'mf_trans_df'], {}), '(mf_hdr_df, mf_trans_df)\n', (825, 849), False, 'from src.capitalgains import CapitalGains\n'), ((1095, 1125), 'pandas.read_csv', 'pd.read_csv', (['file_path_hdr_out'], {}), '(file_path_hdr_out)\n', (1106, 1125), True, 'import pandas as pd\n'), ((1566, 1622), 'os.path.join', 'os.path.join', (['tc.unit_test_base_dir', '"""cas_jul_29_21.pdf"""'], {}), "(tc.unit_test_base_dir, 'cas_jul_29_21.pdf')\n", (1578, 1622), False, 'import os\n'), ((1651, 1720), 'os.path.join', 'os.path.join', (['tc.unit_test_base_dir', '"""output_mf_totals_jul_29_21.csv"""'], {}), "(tc.unit_test_base_dir, 'output_mf_totals_jul_29_21.csv')\n", (1663, 1720), False, 'import os\n'), ((1732, 1798), 'logging.info', 'logging.info', (['f"""running Basic Test for input file {file_path_cas}"""'], {}), "(f'running Basic Test for input file {file_path_cas}')\n", (1744, 1798), False, 'import logging\n'), ((1962, 1993), 'logging.info', 'logging.info', (['f"""Result:{res}\n"""'], {}), "(f'Result:{res}\\n')\n", (1974, 1993), False, 'import logging\n'), ((2307, 2318), 'src.parsedata.PDFParser', 'PDFParser', ([], {}), '()\n', (2316, 2318), False, 'from src.parsedata import PDFParser\n'), ((2347, 2403), 'os.path.join', 'os.path.join', (['tc.unit_test_base_dir', '"""cas_jul_19_21.pdf"""'], {}), "(tc.unit_test_base_dir, 'cas_jul_19_21.pdf')\n", (2359, 2403), False, 'import os\n'), ((2414, 2483), 'logging.info', 'logging.info', (['f"""running test for rdemeption for file {file_path_cas}"""'], {}), "(f'running test for rdemeption for file {file_path_cas}')\n", (2426, 2483), False, 'import logging\n'), ((2576, 2612), 'src.capitalgains.CapitalGains', 'CapitalGains', (['mf_hdr_df', 'mf_trans_df'], {}), '(mf_hdr_df, mf_trans_df)\n', (2588, 2612), False, 'from src.capitalgains import CapitalGains\n'), ((2652, 2705), 'collections.namedtuple', 'namedtuple', (['"""Data"""', "['scheme_name', 'redeemed_units']"], {}), "('Data', ['scheme_name', 'redeemed_units'])\n", (2662, 2705), False, 'from collections import namedtuple\n'), ((3592, 3623), 'logging.info', 'logging.info', (['f"""Result:{res}\n"""'], {}), "(f'Result:{res}\\n')\n", (3604, 3623), False, 'import logging\n'), ((3774, 3785), 'src.parsedata.PDFParser', 'PDFParser', ([], {}), '()\n', (3783, 3785), False, 'from src.parsedata import PDFParser\n'), ((3814, 3870), 'os.path.join', 'os.path.join', (['tc.unit_test_base_dir', '"""cas_jul_19_21.pdf"""'], {}), "(tc.unit_test_base_dir, 'cas_jul_19_21.pdf')\n", (3826, 3870), False, 'import os\n'), ((3881, 3984), 'logging.info', 'logging.info', (['f"""running test for Current Amount and Invested Amount for file {file_path_cas}"""'], {}), "(\n f'running test for Current Amount and Invested Amount for file {file_path_cas}'\n )\n", (3893, 3984), False, 'import logging\n'), ((4067, 4103), 'src.capitalgains.CapitalGains', 'CapitalGains', (['mf_hdr_df', 'mf_trans_df'], {}), '(mf_hdr_df, mf_trans_df)\n', (4079, 4103), False, 'from src.capitalgains import CapitalGains\n'), ((4206, 4272), 'collections.namedtuple', 'namedtuple', (['"""Data"""', "['scheme_name', 'invested_amt', 'current_amt']"], {}), "('Data', ['scheme_name', 'invested_amt', 'current_amt'])\n", (4216, 4272), False, 'from collections import namedtuple\n'), ((5809, 5840), 'logging.info', 'logging.info', (['f"""Result:{res}\n"""'], {}), "(f'Result:{res}\\n')\n", (5821, 5840), False, 'import logging\n'), ((6240, 6296), 'os.path.join', 'os.path.join', (['tc.unit_test_base_dir', '"""cas_jul_29_21.pdf"""'], {}), "(tc.unit_test_base_dir, 'cas_jul_29_21.pdf')\n", (6252, 6296), False, 'import os\n'), ((6307, 6420), 'logging.info', 'logging.info', (['f"""running test for Aggregate Current Amount and Invested Amount for file {file_path_cas}"""'], {}), "(\n f'running test for Aggregate Current Amount and Invested Amount for file {file_path_cas}'\n )\n", (6319, 6420), False, 'import logging\n'), ((6424, 6435), 'src.parsedata.PDFParser', 'PDFParser', ([], {}), '()\n', (6433, 6435), False, 'from src.parsedata import PDFParser\n'), ((6542, 6578), 'src.capitalgains.CapitalGains', 'CapitalGains', (['mf_hdr_df', 'mf_trans_df'], {}), '(mf_hdr_df, mf_trans_df)\n', (6554, 6578), False, 'from src.capitalgains import CapitalGains\n'), ((7639, 7670), 'logging.info', 'logging.info', (['f"""Result:{res}\n"""'], {}), "(f'Result:{res}\\n')\n", (7651, 7670), False, 'import logging\n'), ((1314, 1428), 'logging.error', 'logging.error', (['f"""Target Units Do not Match: Expected {target_units_sum_exp}, Got {target_units_sum_act}"""'], {}), "(\n f'Target Units Do not Match: Expected {target_units_sum_exp}, Got {target_units_sum_act}'\n )\n", (1327, 1428), False, 'import logging\n'), ((2134, 2152), 'logging.error', 'logging.error', (['msg'], {}), '(msg)\n', (2147, 2152), False, 'import logging\n'), ((5872, 5890), 'logging.error', 'logging.error', (['msg'], {}), '(msg)\n', (5885, 5890), False, 'import logging\n'), ((7702, 7720), 'logging.error', 'logging.error', (['msg'], {}), '(msg)\n', (7715, 7720), False, 'import logging\n'), ((3467, 3485), 'logging.error', 'logging.error', (['msg'], {}), '(msg)\n', (3480, 3485), False, 'import logging\n')] |
import pygame
import cmg
from cmg.color import Colors
from study_tool.config import Config
from study_tool.entities.entity import Entity
class StudyProficiencyBar(Entity):
"""
Bar that displays the proficiency histogram of a study set.
"""
def __init__(self, study_set, center_y=0, left=0, right=0):
super().__init__()
self.center_y = center_y
self.left = left
self.right = right
self.study_set = study_set
self.__proficiency_counts = {}
self.__score = 0
self.__total_cards = 0
self.__font_bar_text = pygame.font.Font(None, 30)
def on_create(self):
"""Called when the entity is created."""
self.recalculate()
def recalculate(self):
cards = []
if isinstance(self.study_set, list):
cards = self.study_set
else:
cards = list(self.study_set.cards)
self.__total_cards = len(cards)
self.__proficiency_counts = {}
self.__score = 0
for level in range(Config.proficiency_levels, -1, -1):
self.__proficiency_counts[level] = 0
for card in cards:
study_data = self.context.study_database.get_card_study_data(card)
self.__proficiency_counts[study_data.get_proficiency_level()] += 1
for level in range(Config.proficiency_levels, -1, -1):
count = self.__proficiency_counts[level]
self.__score += count * max(0, level - 1)
#for level in range(Config.proficiency_levels, -1, -1):
# count = len([c for c in cards if c.proficiency_level == level])
# self.__proficiency_counts[level] = count
# if count > 0:
# self.__score += count * max(0, level - 1)
self.__score /= max(1.0, float((Config.proficiency_levels - 1) * len(cards)))
self.__score = int(round(self.__score * 100))
def update(self, dt):
"""Update the entity."""
def draw(self, g):
"""Draw the entity."""
cards = []
if isinstance(self.study_set, list):
cards = self.study_set
else:
cards = list(self.study_set.cards)
total_cards = len(cards)
font = self.__font_bar_text
left_margin = g.measure_text("100%", font=font)[0] + 4
right_margin = g.measure_text(str(9999), font=font)[0] + 4
bar_height = g.measure_text("1", font=font)[1]
bar_width = self.right - self.left - left_margin - right_margin
top = self.center_y - (bar_height / 2)
if False:
cards = sorted(
cards, key=lambda x: x.get_history_score(), reverse=True)
for index, card in enumerate(cards):
score = card.get_history_score()
x = self.left + left_margin + bar_width * \
(float(index) / len(cards))
w = max(1, math.ceil(float(bar_width) / len(cards)))
c = math.lerp(Colors.RED, Colors.GREEN, score)
h = math.ceil(score * bar_height)
g.fill_rect(x, top + bar_height - h, w, h, color=c)
else:
x = self.left + left_margin
for level in range(Config.proficiency_levels, -1, -1):
count = self.__proficiency_counts[level]
if count > 0:
level_width = int(
round(bar_width * (float(count) / self.__total_cards)))
if x + level_width > self.left + left_margin + bar_width:
level_width = (self.left + left_margin + bar_width) - x
g.fill_rect(x, top, level_width, bar_height,
color=Config.proficiency_level_colors[level])
x += level_width
g.draw_text(self.left + left_margin - 4, self.center_y, text="{}%".format(self.__score),
color=cmg.Theme.color_text, align=cmg.Align.MiddleRight, font=font)
g.draw_text(self.right - right_margin + 4, self.center_y, text=str(self.__total_cards),
color=cmg.Theme.color_text, align=cmg.Align.MiddleLeft, font=font)
| [
"pygame.font.Font"
] | [((591, 617), 'pygame.font.Font', 'pygame.font.Font', (['None', '(30)'], {}), '(None, 30)\n', (607, 617), False, 'import pygame\n')] |
import os
import sys
import numpy as np
from shapeworks import *
success = True
def pointsTest():
mesh = Mesh(os.environ["DATA"] + "/simple_ellipsoid.ply")
points = mesh.points()
v0 = points[0]
g0 = [1.12801208e+01, 1.84252377e+01, 2.66504917e+01]
vn = points[points.shape[0]-1]
gn = [3.35370102e+01, 1.25301433e+00, 3.71165695e+01]
return points.shape[0] == 14 and points.shape[1] == 3 and np.linalg.norm(v0-g0) < 1e-4 and np.linalg.norm(vn-gn) < 1e-4
success &= utils.test(pointsTest)
sys.exit(not success)
| [
"numpy.linalg.norm",
"sys.exit"
] | [((510, 531), 'sys.exit', 'sys.exit', (['(not success)'], {}), '(not success)\n', (518, 531), False, 'import sys\n'), ((412, 435), 'numpy.linalg.norm', 'np.linalg.norm', (['(v0 - g0)'], {}), '(v0 - g0)\n', (426, 435), True, 'import numpy as np\n'), ((445, 468), 'numpy.linalg.norm', 'np.linalg.norm', (['(vn - gn)'], {}), '(vn - gn)\n', (459, 468), True, 'import numpy as np\n')] |
import cv2
import numpy
def grayscale_smooth(np_image):
"""
Convert an image to black+white and apply a bilarteral smooth filter
"""
clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
gray_img = cv2.cvtColor(np_image, cv2.COLOR_RGB2GRAY)
norm_img = clahe.apply(gray_img)
smooth_img = cv2.bilateralFilter(norm_img, 3, 75, 75)
return smooth_img
# Takes 4 corner points and use them to try and unwarp a rectangular image
def four_point_transform(image, pts):
"""
transforms an image using four given points to flatten and transform into a rectangle
Magic taken from http://www.pyimagesearch.com/2014/08/25/4-point-opencv-getperspective-transform-example/
"""
# obtain a consistent order of the points and unpack them
# individually
rect = order_points(pts)
(tl, tr, br, bl) = rect
# compute the width of the new image, which will be the
# maximum distance between bottom-right and bottom-left
# x-coordiates or the top-right and top-left x-coordinates
width_a = numpy.sqrt(((br[0] - bl[0]) ** 2) + ((br[1] - bl[1]) ** 2))
width_b = numpy.sqrt(((tr[0] - tl[0]) ** 2) + ((tr[1] - tl[1]) ** 2))
max_width = max(int(width_a), int(width_b))
# compute the height of the new image, which will be the
# maximum distance between the top-right and bottom-right
# y-coordinates or the top-left and bottom-left y-coordinates
height_a = numpy.sqrt(((tr[0] - br[0]) ** 2) + ((tr[1] - br[1]) ** 2))
height_b = numpy.sqrt(((tl[0] - bl[0]) ** 2) + ((tl[1] - bl[1]) ** 2))
max_height = max(int(height_a), int(height_b))
# now that we have the dimensions of the new image, construct
# the set of destination points to obtain a "birds eye view",
# (i.e. top-down view) of the image, again specifying points
# in the top-left, top-right, bottom-right, and bottom-left
# order
dst = numpy.array([
[0, 0],
[max_width - 1, 0],
[max_width - 1, max_height - 1],
[0, max_height - 1]], dtype="float32")
# compute the perspective transform matrix and then apply it
transform_matrix = cv2.getPerspectiveTransform(rect, dst)
warped = cv2.warpPerspective(image, transform_matrix, (max_width, max_height))
# return the warped image
return warped
def order_points(pts):
"""
takes a tuple of four point tuples and order then in the following order;
[topLeft, topRight, bottomRight, bottomLeft]
Magic taken from http://www.pyimagesearch.com/2014/08/25/4-point-opencv-getperspective-transform-example/
"""
# initialzie a list of coordinates that will be ordered
# such that the first entry in the list is the top-left,
# the second entry is the top-right, the third is the
# bottom-right, and the fourth is the bottom-left
rect = numpy.zeros((4, 2), dtype="float32")
# the top-left point will have the smallest sum, whereas
# the bottom-right point will have the largest sum
s = pts.sum(axis=1)
rect[0] = pts[numpy.argmin(s)]
rect[2] = pts[numpy.argmax(s)]
# now, compute the difference between the points, the
# top-right point will have the smallest difference,
# whereas the bottom-left will have the largest difference
diff = numpy.diff(pts, axis=1)
rect[1] = pts[numpy.argmin(diff)]
rect[3] = pts[numpy.argmax(diff)]
# return the ordered coordinates
return rect
def guess_colour(r, g, b):
# Use the average RGB values of a stickyNote and then using the difference between them
# to establish which of the four supported colours it is.
r = int(r)
g = int(g)
b = int(b)
rg = r - g
rb = r - b
gb = g - b
colour_thresholds = {
"ORANGE": {
"min_rg": 20,
"max_rg": 110,
"min_rb": 60,
"max_rb": 180,
"min_gb": 25,
"max_gb": 100
},
"YELLOW": {
"min_rg": -30,
"max_rg": 15,
"min_rb": 35,
"max_rb": 150,
"min_gb": 40,
"max_gb": 150
},
"BLUE": {
"min_rg": -110,
"max_rg": -20,
"min_rb": -140,
"max_rb": -40,
"min_gb": -45,
"max_gb": 0
},
"MAGENTA": {
"min_rg": 40,
"max_rg": 135,
"min_rb": 25,
"max_rb": 100,
"min_gb": -55,
"max_gb": -10
},
}
for colour in colour_thresholds:
if ((rg >= colour_thresholds[colour]["min_rg"]) and
(rg <= colour_thresholds[colour]["max_rg"]) and
(rb >= colour_thresholds[colour]["min_rb"]) and
(rb <= colour_thresholds[colour]["max_rb"]) and
(gb >= colour_thresholds[colour]["min_gb"]) and
(gb <= colour_thresholds[colour]["max_gb"])):
return colour
return None
def binarize(image, lightest=True):
# Process converts an image to black and white using clustering to the lightest and darkest areas
Z = image.reshape((-1,3))
# convert to np.float32
Z = numpy.float32(Z)
# define criteria, number of clusters(K) and apply kmeans()
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
if lightest:
K = 3
else:
K = 2
ret,label,center = cv2.kmeans(data=Z,
K=K,
bestLabels=None,
criteria=criteria,
attempts=10,
flags=cv2.KMEANS_RANDOM_CENTERS)
# Now convert back into uint8, and make original image
center = numpy.uint8(center)
res = center[label.flatten()]
res2 = res.reshape((image.shape))
bmin = res2[..., 0].min()
gmin = res2[..., 1].min()
rmin = res2[..., 2].min()
bmax = res2[..., 0].max()
gmax = res2[..., 1].max()
rmax = res2[..., 2].max()
if lightest:
image[numpy.where((res2 < [bmax, gmax, rmax]).all(axis=2))] = [0, 0, 0]
image[numpy.where((res2 == [bmax, gmax, rmax ]).all(axis=2))] = [255, 255, 255]
else:
image[numpy.where((res2 > [0, 0, 0]).all(axis=2))] = [0, 0, 0]
image[numpy.where((res2 > [bmin, gmin, rmin]).all(axis=2))] = [255, 255, 255]
return image
def edge(img):
kernel = numpy.ones((5, 5), numpy.uint8)
#img = cv2.medianBlur(img, 9)
img = cv2.bilateralFilter(img,9,75,75)
gray_image = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
edged = cv2.Canny(gray_image, 1, 30)
return edged
def get_area(points):
#if len(points) == 4:
# points = self.order_points(points)
pointsum = 0
for index in range(-1, len(points) - 1):
pointsum = pointsum + (points[index][0] * points[index + 1][1] - points[index][1] * points[index + 1][0])
area = abs(pointsum / 2)
return area | [
"numpy.uint8",
"numpy.sqrt",
"numpy.ones",
"cv2.bilateralFilter",
"cv2.getPerspectiveTransform",
"cv2.kmeans",
"numpy.diff",
"numpy.argmax",
"cv2.createCLAHE",
"numpy.array",
"cv2.warpPerspective",
"numpy.zeros",
"cv2.cvtColor",
"numpy.argmin",
"cv2.Canny",
"numpy.float32"
] | [((159, 210), 'cv2.createCLAHE', 'cv2.createCLAHE', ([], {'clipLimit': '(2.0)', 'tileGridSize': '(8, 8)'}), '(clipLimit=2.0, tileGridSize=(8, 8))\n', (174, 210), False, 'import cv2\n'), ((226, 268), 'cv2.cvtColor', 'cv2.cvtColor', (['np_image', 'cv2.COLOR_RGB2GRAY'], {}), '(np_image, cv2.COLOR_RGB2GRAY)\n', (238, 268), False, 'import cv2\n'), ((323, 363), 'cv2.bilateralFilter', 'cv2.bilateralFilter', (['norm_img', '(3)', '(75)', '(75)'], {}), '(norm_img, 3, 75, 75)\n', (342, 363), False, 'import cv2\n'), ((1059, 1114), 'numpy.sqrt', 'numpy.sqrt', (['((br[0] - bl[0]) ** 2 + (br[1] - bl[1]) ** 2)'], {}), '((br[0] - bl[0]) ** 2 + (br[1] - bl[1]) ** 2)\n', (1069, 1114), False, 'import numpy\n'), ((1133, 1188), 'numpy.sqrt', 'numpy.sqrt', (['((tr[0] - tl[0]) ** 2 + (tr[1] - tl[1]) ** 2)'], {}), '((tr[0] - tl[0]) ** 2 + (tr[1] - tl[1]) ** 2)\n', (1143, 1188), False, 'import numpy\n'), ((1446, 1501), 'numpy.sqrt', 'numpy.sqrt', (['((tr[0] - br[0]) ** 2 + (tr[1] - br[1]) ** 2)'], {}), '((tr[0] - br[0]) ** 2 + (tr[1] - br[1]) ** 2)\n', (1456, 1501), False, 'import numpy\n'), ((1521, 1576), 'numpy.sqrt', 'numpy.sqrt', (['((tl[0] - bl[0]) ** 2 + (tl[1] - bl[1]) ** 2)'], {}), '((tl[0] - bl[0]) ** 2 + (tl[1] - bl[1]) ** 2)\n', (1531, 1576), False, 'import numpy\n'), ((1916, 2033), 'numpy.array', 'numpy.array', (['[[0, 0], [max_width - 1, 0], [max_width - 1, max_height - 1], [0, \n max_height - 1]]'], {'dtype': '"""float32"""'}), "([[0, 0], [max_width - 1, 0], [max_width - 1, max_height - 1], [\n 0, max_height - 1]], dtype='float32')\n", (1927, 2033), False, 'import numpy\n'), ((2151, 2189), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['rect', 'dst'], {}), '(rect, dst)\n', (2178, 2189), False, 'import cv2\n'), ((2203, 2272), 'cv2.warpPerspective', 'cv2.warpPerspective', (['image', 'transform_matrix', '(max_width, max_height)'], {}), '(image, transform_matrix, (max_width, max_height))\n', (2222, 2272), False, 'import cv2\n'), ((2844, 2880), 'numpy.zeros', 'numpy.zeros', (['(4, 2)'], {'dtype': '"""float32"""'}), "((4, 2), dtype='float32')\n", (2855, 2880), False, 'import numpy\n'), ((3283, 3306), 'numpy.diff', 'numpy.diff', (['pts'], {'axis': '(1)'}), '(pts, axis=1)\n', (3293, 3306), False, 'import numpy\n'), ((5167, 5183), 'numpy.float32', 'numpy.float32', (['Z'], {}), '(Z)\n', (5180, 5183), False, 'import numpy\n'), ((5403, 5512), 'cv2.kmeans', 'cv2.kmeans', ([], {'data': 'Z', 'K': 'K', 'bestLabels': 'None', 'criteria': 'criteria', 'attempts': '(10)', 'flags': 'cv2.KMEANS_RANDOM_CENTERS'}), '(data=Z, K=K, bestLabels=None, criteria=criteria, attempts=10,\n flags=cv2.KMEANS_RANDOM_CENTERS)\n', (5413, 5512), False, 'import cv2\n'), ((5751, 5770), 'numpy.uint8', 'numpy.uint8', (['center'], {}), '(center)\n', (5762, 5770), False, 'import numpy\n'), ((6423, 6454), 'numpy.ones', 'numpy.ones', (['(5, 5)', 'numpy.uint8'], {}), '((5, 5), numpy.uint8)\n', (6433, 6454), False, 'import numpy\n'), ((6499, 6534), 'cv2.bilateralFilter', 'cv2.bilateralFilter', (['img', '(9)', '(75)', '(75)'], {}), '(img, 9, 75, 75)\n', (6518, 6534), False, 'import cv2\n'), ((6549, 6586), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_RGB2GRAY'], {}), '(img, cv2.COLOR_RGB2GRAY)\n', (6561, 6586), False, 'import cv2\n'), ((6600, 6628), 'cv2.Canny', 'cv2.Canny', (['gray_image', '(1)', '(30)'], {}), '(gray_image, 1, 30)\n', (6609, 6628), False, 'import cv2\n'), ((3041, 3056), 'numpy.argmin', 'numpy.argmin', (['s'], {}), '(s)\n', (3053, 3056), False, 'import numpy\n'), ((3076, 3091), 'numpy.argmax', 'numpy.argmax', (['s'], {}), '(s)\n', (3088, 3091), False, 'import numpy\n'), ((3325, 3343), 'numpy.argmin', 'numpy.argmin', (['diff'], {}), '(diff)\n', (3337, 3343), False, 'import numpy\n'), ((3363, 3381), 'numpy.argmax', 'numpy.argmax', (['diff'], {}), '(diff)\n', (3375, 3381), False, 'import numpy\n')] |
from unittest import mock
import pytest
from mopidy_radionet import backend
from mopidy_radionet.radionet import RadioNetClient
from mopidy_radionet.library import RadioNetLibraryProvider
@pytest.fixture
def backend_mock():
backend_mock = mock.Mock(spec=backend.RadioNetBackend)
backend_mock.radionet = RadioNetClient(proxy_config=None)
backend_mock.library = RadioNetLibraryProvider(backend=backend_mock)
backend_mock.radionet.set_apikey('test')
backend_mock.radionet.set_favorites({'lush'})
return backend_mock
@pytest.fixture
def library(backend_mock):
return backend_mock.library
@pytest.fixture
def radionet(backend_mock):
return backend_mock.radionet
| [
"mopidy_radionet.radionet.RadioNetClient",
"unittest.mock.Mock",
"mopidy_radionet.library.RadioNetLibraryProvider"
] | [((246, 285), 'unittest.mock.Mock', 'mock.Mock', ([], {'spec': 'backend.RadioNetBackend'}), '(spec=backend.RadioNetBackend)\n', (255, 285), False, 'from unittest import mock\n'), ((314, 347), 'mopidy_radionet.radionet.RadioNetClient', 'RadioNetClient', ([], {'proxy_config': 'None'}), '(proxy_config=None)\n', (328, 347), False, 'from mopidy_radionet.radionet import RadioNetClient\n'), ((375, 420), 'mopidy_radionet.library.RadioNetLibraryProvider', 'RadioNetLibraryProvider', ([], {'backend': 'backend_mock'}), '(backend=backend_mock)\n', (398, 420), False, 'from mopidy_radionet.library import RadioNetLibraryProvider\n')] |
# Generated by Django 3.0.5 on 2020-04-15 10:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('academics', '0002_semester'),
]
operations = [
migrations.AddField(
model_name='program',
name='desc',
field=models.TextField(default=''),
),
]
| [
"django.db.models.TextField"
] | [((325, 353), 'django.db.models.TextField', 'models.TextField', ([], {'default': '""""""'}), "(default='')\n", (341, 353), False, 'from django.db import migrations, models\n')] |
# Generated by Django 2.0.9 on 2019-02-05 11:45
from django.db import migrations, models
import fadderanmalan.models
class Migration(migrations.Migration):
dependencies = [
('fadderanmalan', '0003_auto_20190205_1121'),
]
operations = [
migrations.AlterField(
model_name='job',
name='hidden',
field=models.BooleanField(help_text='Om jobbet ska döljas från frontend:en. Överskrider datumen nedan'),
),
migrations.AlterField(
model_name='job',
name='hidden_after',
field=models.DateField(default=fadderanmalan.models.default_hidden_after, help_text='EFTER detta datum kommer jobbet att döljas.'),
),
migrations.AlterField(
model_name='job',
name='hidden_until',
field=models.DateField(default=fadderanmalan.models.default_hidden_until, help_text='Jobbet kommer att visas PÅ detta datum.'),
),
migrations.AlterField(
model_name='job',
name='locked',
field=models.BooleanField(help_text='Om jobbet ska vara låst. Överskrider datumen nedan.'),
),
migrations.AlterField(
model_name='job',
name='locked_after',
field=models.DateField(default=fadderanmalan.models.default_locked_after, help_text='EFTER detta datum kommer jobbet att låsas.'),
),
migrations.AlterField(
model_name='job',
name='locked_until',
field=models.DateField(default=fadderanmalan.models.default_locked_until, help_text='Jobbet kommer att låsas upp PÅ detta datum.'),
),
]
| [
"django.db.models.DateField",
"django.db.models.BooleanField"
] | [((367, 469), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'help_text': '"""Om jobbet ska döljas från frontend:en. Överskrider datumen nedan"""'}), "(help_text=\n 'Om jobbet ska döljas från frontend:en. Överskrider datumen nedan')\n", (386, 469), False, 'from django.db import migrations, models\n'), ((589, 717), 'django.db.models.DateField', 'models.DateField', ([], {'default': 'fadderanmalan.models.default_hidden_after', 'help_text': '"""EFTER detta datum kommer jobbet att döljas."""'}), "(default=fadderanmalan.models.default_hidden_after,\n help_text='EFTER detta datum kommer jobbet att döljas.')\n", (605, 717), False, 'from django.db import migrations, models\n'), ((838, 962), 'django.db.models.DateField', 'models.DateField', ([], {'default': 'fadderanmalan.models.default_hidden_until', 'help_text': '"""Jobbet kommer att visas PÅ detta datum."""'}), "(default=fadderanmalan.models.default_hidden_until,\n help_text='Jobbet kommer att visas PÅ detta datum.')\n", (854, 962), False, 'from django.db import migrations, models\n'), ((1077, 1166), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'help_text': '"""Om jobbet ska vara låst. Överskrider datumen nedan."""'}), "(help_text=\n 'Om jobbet ska vara låst. Överskrider datumen nedan.')\n", (1096, 1166), False, 'from django.db import migrations, models\n'), ((1286, 1413), 'django.db.models.DateField', 'models.DateField', ([], {'default': 'fadderanmalan.models.default_locked_after', 'help_text': '"""EFTER detta datum kommer jobbet att låsas."""'}), "(default=fadderanmalan.models.default_locked_after,\n help_text='EFTER detta datum kommer jobbet att låsas.')\n", (1302, 1413), False, 'from django.db import migrations, models\n'), ((1534, 1662), 'django.db.models.DateField', 'models.DateField', ([], {'default': 'fadderanmalan.models.default_locked_until', 'help_text': '"""Jobbet kommer att låsas upp PÅ detta datum."""'}), "(default=fadderanmalan.models.default_locked_until,\n help_text='Jobbet kommer att låsas upp PÅ detta datum.')\n", (1550, 1662), False, 'from django.db import migrations, models\n')] |
from __future__ import division
from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy
import numpy as np
from scipy.sparse import csr_matrix
B = np.array([
[1,1,1,1],
[1,1,1,0],
[1,1,0,0],
[1,0,0,0],
])
N = np.array([
[2,3,4,2],
[2,3,4,2],
[2,3,3,2],
[2,1,3,4]
])
W = np.array([
[4,3,2,1],
[3,2,1,0],
[2,1,0,0],
[1,0,0,0],
])
def test_jaccard_scipy():
""" Test Jaccard: scipy.spatial.dist.jaccard """
u = np.array([2,3,4,5])
v = np.array([2,3,4,2])
d = _jaccard_coef_scipy(u,v,min_support=1)
assert (d == 0.75)
def test_jaccard_binary():
""" Test Jaccard: binary (bitwise) coef """
u = np.array([1,1,1,1])
v = np.array([1,1,1,0])
d = _jaccard_coef_binary(u,v,min_support=1)
assert (d == 0.75)
def test_jaccard_set():
""" Test Jaccard: set coef """
u = np.array([4,3,2,1])
v = np.array([3,2,1,0])
d = _jaccard_coef_set(u,v,min_support=1)
assert (d == 0.6)
def test_jaccard_weighted():
""" Test Jaccard: weighted coef """
u = np.array([4,3,2,1])
v = np.array([3,2,1,0])
d = _jaccard_coef_weighted_numpy(u,v,min_support=1)
print
assert (d == 0.6)
def test_pairwise_distance_numpy_scipy():
""" Test pairwise distance: using the Numpy (dense matrix) implemmentation for numer jaccard (scipy) coef """
D = pairwise_proximity(N, metric='jaccard')
true = np.array([
[ 1. , 1. , 0.75, 0.25],
[ 1. , 1. , 0.75, 0.25],
[ 0.75, 0.75, 1. , 0.5 ],
[ 0.25, 0.25, 0.5 , 1. ],
], dtype=float)
assert np.isclose(D, true). all()
def test_pairwise_distance_numpy_binary():
""" Test pairwise distance: using the Numpy (dense matrix) implementation for jaccard binary coef """
D = pairwise_proximity(B, metric='jaccard_binary', min_support=1, verbose=True)
true = np.array([
[ 1., 0.75, 0.5, 0.25 ],
[ 0.75, 1., 0.66666667, 0.33333333],
[ 0.5, 0.66666667, 1., 0.5 ],
[ 0.25, 0.33333333, 0.5, 1. ],
], dtype=float)
assert np.isclose(D, true).all()
def test_pairwise_distance_numpy_set():
""" Test pairwise distance: using the Numpy (dense matrix) implementation for jaccard set coef """
D = pairwise_proximity(W, metric='jaccard_set', min_support=1)
true = np.array([
[ 1., 0.6, 0.4, 0.2, ],
[ 0.6, 1., 0.75, 0.5, ],
[ 0.4, 0.75, 1., 0.66666667,],
[ 0.2, 0.5, 0.66666667, 1., ],
], dtype=float)
assert np.isclose(D, true).all()
def test_pairwise_distance_numpy_weighted():
""" Test pairwise distance: using Numpy (dense matrix) using weighted jaccard """
D = pairwise_proximity(W, metric='weighted_jaccard', min_support=10)
true = np.array([
[ 1., 0.6, 0.3, 0.1],
[ 0.6, 1., 0., 0. ],
[ 0.3, 0., 1., 0. ],
[ 0.1, 0., 0., 1. ],
], dtype=float)
assert np.isclose(D, true).all()
def test_pairwise_distance_sparse_scipy():
""" Test pairwise distance: using the Scipy (sparse matrix) implemmentation for jaccard scipy coef """
N_sparse = csr_matrix(N)
D = pairwise_proximity(N_sparse, metric='jaccard', min_support=1)
true = np.array([
[ 1. , 1. , 0.75, 0.25],
[ 1. , 1. , 0.75, 0.25],
[ 0.75, 0.75, 1. , 0.5 ],
[ 0.25, 0.25, 0.5 , 1. ],
], dtype=float)
assert np.isclose(D.todense(), true). all()
def test_pairwise_distance_sparse_binary():
""" Test pairwise distance: using the Scipy (sparse matrix) implementation for jaccard bitwise coef """
B_sparse = csr_matrix(B)
D = pairwise_proximity(B_sparse, metric='jaccard_binary', min_support=1)
#print D.todense()
true = np.array([
[ 1., 0.75, 0.5, 0.25 ],
[ 0.75, 1., 0.66666667, 0.33333333],
[ 0.5, 0.66666667, 1., 0.5 ],
[ 0.25, 0.33333333, 0.5, 1. ],
], dtype=float)
assert np.isclose(D.todense(), true).all()
def test_pairwise_distance_sparse_set():
""" Test pairwise distance: using the Scipy (sparse matrix) implementation for jaccard set coef """
W_sparse = csr_matrix(W)
D = pairwise_proximity(W_sparse, metric='jaccard_set', min_support=1)
true = np.array([
[ 1., 0.75, 0.5, 0.25 ],
[ 0.75, 1., 0.66666667, 0.33333333],
[ 0.5, 0.66666667, 1., 0.5 ],
[ 0.25, 0.33333333, 0.5, 1. ],
], dtype=float)
assert np.isclose(D.todense(), true).all()
def test_pairwise_distance_sparse_weighted():
""" Test pairwise distance: using the Scipy (sparse matrix) implementation for jaccard weighted coef """
W_sparse = csr_matrix(W)
D = pairwise_proximity(W_sparse, metric='jaccard_weighted', min_support=1)
true = np.array([
[ 1., 0.6, 0.3, 0.1],
[ 0.6, 1., 0., 0. ],
[ 0.3, 0., 1., 0. ],
[ 0.1, 0., 0., 1. ],
], dtype=float)
assert np.isclose(D.todense(), true).all()
def test_pairwise_distance_dense_my_own_metric():
""" Test pairwise distance: using the numpy (dense matrix) implementation and my own metric function """
def my_coef(u,v):
return 0.25
D = pairwise_proximity(W, metric=my_coef, verbose=True)
true = np.array([
[1., .25, .25, .25],
[ .25, 1., .25, .25],
[ .25, .25, 1., .25],
[ .25, .25, .25, 1. ],
], dtype=float)
assert np.isclose(D, true).all()
def test_pairwise_distance_sparse_my_own_metric():
""" Test pairwise distance: using the Scipy (sparse matrix) implementation and my own metric function """
def my_coef(u,v):
return 0.25
W_sparse = csr_matrix(W)
D = pairwise_proximity(W_sparse, metric=('indices',my_coef), verbose=True)
true = np.array([
[1., .25, .25, .25],
[ .25, 1., .25, .25],
[ .25, .25, 1., .25],
[ .25, .25, .25, 1. ],
], dtype=float)
assert np.isclose(D.todense(), true).all()
| [
"distanceclosure.distance._jaccard_coef_set",
"distanceclosure.distance._jaccard_coef_weighted_numpy",
"numpy.isclose",
"distanceclosure.distance.pairwise_proximity",
"numpy.array",
"distanceclosure.distance._jaccard_coef_scipy",
"distanceclosure.distance._jaccard_coef_binary",
"scipy.sparse.csr_matrix"
] | [((240, 306), 'numpy.array', 'np.array', (['[[1, 1, 1, 1], [1, 1, 1, 0], [1, 1, 0, 0], [1, 0, 0, 0]]'], {}), '([[1, 1, 1, 1], [1, 1, 1, 0], [1, 1, 0, 0], [1, 0, 0, 0]])\n', (248, 306), True, 'import numpy as np\n'), ((307, 373), 'numpy.array', 'np.array', (['[[2, 3, 4, 2], [2, 3, 4, 2], [2, 3, 3, 2], [2, 1, 3, 4]]'], {}), '([[2, 3, 4, 2], [2, 3, 4, 2], [2, 3, 3, 2], [2, 1, 3, 4]])\n', (315, 373), True, 'import numpy as np\n'), ((373, 439), 'numpy.array', 'np.array', (['[[4, 3, 2, 1], [3, 2, 1, 0], [2, 1, 0, 0], [1, 0, 0, 0]]'], {}), '([[4, 3, 2, 1], [3, 2, 1, 0], [2, 1, 0, 0], [1, 0, 0, 0]])\n', (381, 439), True, 'import numpy as np\n'), ((517, 539), 'numpy.array', 'np.array', (['[2, 3, 4, 5]'], {}), '([2, 3, 4, 5])\n', (525, 539), True, 'import numpy as np\n'), ((542, 564), 'numpy.array', 'np.array', (['[2, 3, 4, 2]'], {}), '([2, 3, 4, 2])\n', (550, 564), True, 'import numpy as np\n'), ((567, 607), 'distanceclosure.distance._jaccard_coef_scipy', '_jaccard_coef_scipy', (['u', 'v'], {'min_support': '(1)'}), '(u, v, min_support=1)\n', (586, 607), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((704, 726), 'numpy.array', 'np.array', (['[1, 1, 1, 1]'], {}), '([1, 1, 1, 1])\n', (712, 726), True, 'import numpy as np\n'), ((729, 751), 'numpy.array', 'np.array', (['[1, 1, 1, 0]'], {}), '([1, 1, 1, 0])\n', (737, 751), True, 'import numpy as np\n'), ((754, 795), 'distanceclosure.distance._jaccard_coef_binary', '_jaccard_coef_binary', (['u', 'v'], {'min_support': '(1)'}), '(u, v, min_support=1)\n', (774, 795), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((877, 899), 'numpy.array', 'np.array', (['[4, 3, 2, 1]'], {}), '([4, 3, 2, 1])\n', (885, 899), True, 'import numpy as np\n'), ((902, 924), 'numpy.array', 'np.array', (['[3, 2, 1, 0]'], {}), '([3, 2, 1, 0])\n', (910, 924), True, 'import numpy as np\n'), ((927, 965), 'distanceclosure.distance._jaccard_coef_set', '_jaccard_coef_set', (['u', 'v'], {'min_support': '(1)'}), '(u, v, min_support=1)\n', (944, 965), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((1055, 1077), 'numpy.array', 'np.array', (['[4, 3, 2, 1]'], {}), '([4, 3, 2, 1])\n', (1063, 1077), True, 'import numpy as np\n'), ((1080, 1102), 'numpy.array', 'np.array', (['[3, 2, 1, 0]'], {}), '([3, 2, 1, 0])\n', (1088, 1102), True, 'import numpy as np\n'), ((1105, 1154), 'distanceclosure.distance._jaccard_coef_weighted_numpy', '_jaccard_coef_weighted_numpy', (['u', 'v'], {'min_support': '(1)'}), '(u, v, min_support=1)\n', (1133, 1154), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((1338, 1377), 'distanceclosure.distance.pairwise_proximity', 'pairwise_proximity', (['N'], {'metric': '"""jaccard"""'}), "(N, metric='jaccard')\n", (1356, 1377), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((1386, 1509), 'numpy.array', 'np.array', (['[[1.0, 1.0, 0.75, 0.25], [1.0, 1.0, 0.75, 0.25], [0.75, 0.75, 1.0, 0.5], [\n 0.25, 0.25, 0.5, 1.0]]'], {'dtype': 'float'}), '([[1.0, 1.0, 0.75, 0.25], [1.0, 1.0, 0.75, 0.25], [0.75, 0.75, 1.0,\n 0.5], [0.25, 0.25, 0.5, 1.0]], dtype=float)\n', (1394, 1509), True, 'import numpy as np\n'), ((1730, 1805), 'distanceclosure.distance.pairwise_proximity', 'pairwise_proximity', (['B'], {'metric': '"""jaccard_binary"""', 'min_support': '(1)', 'verbose': '(True)'}), "(B, metric='jaccard_binary', min_support=1, verbose=True)\n", (1748, 1805), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((1814, 1961), 'numpy.array', 'np.array', (['[[1.0, 0.75, 0.5, 0.25], [0.75, 1.0, 0.66666667, 0.33333333], [0.5, \n 0.66666667, 1.0, 0.5], [0.25, 0.33333333, 0.5, 1.0]]'], {'dtype': 'float'}), '([[1.0, 0.75, 0.5, 0.25], [0.75, 1.0, 0.66666667, 0.33333333], [0.5,\n 0.66666667, 1.0, 0.5], [0.25, 0.33333333, 0.5, 1.0]], dtype=float)\n', (1822, 1961), True, 'import numpy as np\n'), ((2247, 2305), 'distanceclosure.distance.pairwise_proximity', 'pairwise_proximity', (['W'], {'metric': '"""jaccard_set"""', 'min_support': '(1)'}), "(W, metric='jaccard_set', min_support=1)\n", (2265, 2305), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((2314, 2446), 'numpy.array', 'np.array', (['[[1.0, 0.6, 0.4, 0.2], [0.6, 1.0, 0.75, 0.5], [0.4, 0.75, 1.0, 0.66666667],\n [0.2, 0.5, 0.66666667, 1.0]]'], {'dtype': 'float'}), '([[1.0, 0.6, 0.4, 0.2], [0.6, 1.0, 0.75, 0.5], [0.4, 0.75, 1.0, \n 0.66666667], [0.2, 0.5, 0.66666667, 1.0]], dtype=float)\n', (2322, 2446), True, 'import numpy as np\n'), ((2740, 2804), 'distanceclosure.distance.pairwise_proximity', 'pairwise_proximity', (['W'], {'metric': '"""weighted_jaccard"""', 'min_support': '(10)'}), "(W, metric='weighted_jaccard', min_support=10)\n", (2758, 2804), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((2813, 2928), 'numpy.array', 'np.array', (['[[1.0, 0.6, 0.3, 0.1], [0.6, 1.0, 0.0, 0.0], [0.3, 0.0, 1.0, 0.0], [0.1, \n 0.0, 0.0, 1.0]]'], {'dtype': 'float'}), '([[1.0, 0.6, 0.3, 0.1], [0.6, 1.0, 0.0, 0.0], [0.3, 0.0, 1.0, 0.0],\n [0.1, 0.0, 0.0, 1.0]], dtype=float)\n', (2821, 2928), True, 'import numpy as np\n'), ((3148, 3161), 'scipy.sparse.csr_matrix', 'csr_matrix', (['N'], {}), '(N)\n', (3158, 3161), False, 'from scipy.sparse import csr_matrix\n'), ((3167, 3228), 'distanceclosure.distance.pairwise_proximity', 'pairwise_proximity', (['N_sparse'], {'metric': '"""jaccard"""', 'min_support': '(1)'}), "(N_sparse, metric='jaccard', min_support=1)\n", (3185, 3228), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((3237, 3360), 'numpy.array', 'np.array', (['[[1.0, 1.0, 0.75, 0.25], [1.0, 1.0, 0.75, 0.25], [0.75, 0.75, 1.0, 0.5], [\n 0.25, 0.25, 0.5, 1.0]]'], {'dtype': 'float'}), '([[1.0, 1.0, 0.75, 0.25], [1.0, 1.0, 0.75, 0.25], [0.75, 0.75, 1.0,\n 0.5], [0.25, 0.25, 0.5, 1.0]], dtype=float)\n', (3245, 3360), True, 'import numpy as np\n'), ((3601, 3614), 'scipy.sparse.csr_matrix', 'csr_matrix', (['B'], {}), '(B)\n', (3611, 3614), False, 'from scipy.sparse import csr_matrix\n'), ((3620, 3688), 'distanceclosure.distance.pairwise_proximity', 'pairwise_proximity', (['B_sparse'], {'metric': '"""jaccard_binary"""', 'min_support': '(1)'}), "(B_sparse, metric='jaccard_binary', min_support=1)\n", (3638, 3688), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((3717, 3864), 'numpy.array', 'np.array', (['[[1.0, 0.75, 0.5, 0.25], [0.75, 1.0, 0.66666667, 0.33333333], [0.5, \n 0.66666667, 1.0, 0.5], [0.25, 0.33333333, 0.5, 1.0]]'], {'dtype': 'float'}), '([[1.0, 0.75, 0.5, 0.25], [0.75, 1.0, 0.66666667, 0.33333333], [0.5,\n 0.66666667, 1.0, 0.5], [0.25, 0.33333333, 0.5, 1.0]], dtype=float)\n', (3725, 3864), True, 'import numpy as np\n'), ((4169, 4182), 'scipy.sparse.csr_matrix', 'csr_matrix', (['W'], {}), '(W)\n', (4179, 4182), False, 'from scipy.sparse import csr_matrix\n'), ((4188, 4253), 'distanceclosure.distance.pairwise_proximity', 'pairwise_proximity', (['W_sparse'], {'metric': '"""jaccard_set"""', 'min_support': '(1)'}), "(W_sparse, metric='jaccard_set', min_support=1)\n", (4206, 4253), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((4262, 4409), 'numpy.array', 'np.array', (['[[1.0, 0.75, 0.5, 0.25], [0.75, 1.0, 0.66666667, 0.33333333], [0.5, \n 0.66666667, 1.0, 0.5], [0.25, 0.33333333, 0.5, 1.0]]'], {'dtype': 'float'}), '([[1.0, 0.75, 0.5, 0.25], [0.75, 1.0, 0.66666667, 0.33333333], [0.5,\n 0.66666667, 1.0, 0.5], [0.25, 0.33333333, 0.5, 1.0]], dtype=float)\n', (4270, 4409), True, 'import numpy as np\n'), ((4724, 4737), 'scipy.sparse.csr_matrix', 'csr_matrix', (['W'], {}), '(W)\n', (4734, 4737), False, 'from scipy.sparse import csr_matrix\n'), ((4743, 4813), 'distanceclosure.distance.pairwise_proximity', 'pairwise_proximity', (['W_sparse'], {'metric': '"""jaccard_weighted"""', 'min_support': '(1)'}), "(W_sparse, metric='jaccard_weighted', min_support=1)\n", (4761, 4813), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((4822, 4937), 'numpy.array', 'np.array', (['[[1.0, 0.6, 0.3, 0.1], [0.6, 1.0, 0.0, 0.0], [0.3, 0.0, 1.0, 0.0], [0.1, \n 0.0, 0.0, 1.0]]'], {'dtype': 'float'}), '([[1.0, 0.6, 0.3, 0.1], [0.6, 1.0, 0.0, 0.0], [0.3, 0.0, 1.0, 0.0],\n [0.1, 0.0, 0.0, 1.0]], dtype=float)\n', (4830, 4937), True, 'import numpy as np\n'), ((5204, 5255), 'distanceclosure.distance.pairwise_proximity', 'pairwise_proximity', (['W'], {'metric': 'my_coef', 'verbose': '(True)'}), '(W, metric=my_coef, verbose=True)\n', (5222, 5255), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((5264, 5392), 'numpy.array', 'np.array', (['[[1.0, 0.25, 0.25, 0.25], [0.25, 1.0, 0.25, 0.25], [0.25, 0.25, 1.0, 0.25],\n [0.25, 0.25, 0.25, 1.0]]'], {'dtype': 'float'}), '([[1.0, 0.25, 0.25, 0.25], [0.25, 1.0, 0.25, 0.25], [0.25, 0.25, \n 1.0, 0.25], [0.25, 0.25, 0.25, 1.0]], dtype=float)\n', (5272, 5392), True, 'import numpy as np\n'), ((5652, 5665), 'scipy.sparse.csr_matrix', 'csr_matrix', (['W'], {}), '(W)\n', (5662, 5665), False, 'from scipy.sparse import csr_matrix\n'), ((5671, 5742), 'distanceclosure.distance.pairwise_proximity', 'pairwise_proximity', (['W_sparse'], {'metric': "('indices', my_coef)", 'verbose': '(True)'}), "(W_sparse, metric=('indices', my_coef), verbose=True)\n", (5689, 5742), False, 'from distanceclosure.distance import pairwise_proximity, _jaccard_coef_scipy, _jaccard_coef_binary, _jaccard_coef_set, _jaccard_coef_weighted_numpy\n'), ((5750, 5878), 'numpy.array', 'np.array', (['[[1.0, 0.25, 0.25, 0.25], [0.25, 1.0, 0.25, 0.25], [0.25, 0.25, 1.0, 0.25],\n [0.25, 0.25, 0.25, 1.0]]'], {'dtype': 'float'}), '([[1.0, 0.25, 0.25, 0.25], [0.25, 1.0, 0.25, 0.25], [0.25, 0.25, \n 1.0, 0.25], [0.25, 0.25, 0.25, 1.0]], dtype=float)\n', (5758, 5878), True, 'import numpy as np\n'), ((1551, 1570), 'numpy.isclose', 'np.isclose', (['D', 'true'], {}), '(D, true)\n', (1561, 1570), True, 'import numpy as np\n'), ((2075, 2094), 'numpy.isclose', 'np.isclose', (['D', 'true'], {}), '(D, true)\n', (2085, 2094), True, 'import numpy as np\n'), ((2580, 2599), 'numpy.isclose', 'np.isclose', (['D', 'true'], {}), '(D, true)\n', (2590, 2599), True, 'import numpy as np\n'), ((2962, 2981), 'numpy.isclose', 'np.isclose', (['D', 'true'], {}), '(D, true)\n', (2972, 2981), True, 'import numpy as np\n'), ((5420, 5439), 'numpy.isclose', 'np.isclose', (['D', 'true'], {}), '(D, true)\n', (5430, 5439), True, 'import numpy as np\n')] |
# Generated by Django 3.0.5 on 2020-06-07 11:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('homework_app', '0005_auto_20200607_1456'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='importance',
field=models.CharField(choices=[('высочайшая', '0'), ('высокая', '1'), ('средняя', '2'), ('низкая', '3')], max_length=10),
),
]
| [
"django.db.models.CharField"
] | [((346, 465), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('высочайшая', '0'), ('высокая', '1'), ('средняя', '2'), ('низкая', '3')]", 'max_length': '(10)'}), "(choices=[('высочайшая', '0'), ('высокая', '1'), ('средняя',\n '2'), ('низкая', '3')], max_length=10)\n", (362, 465), False, 'from django.db import migrations, models\n')] |
from django.contrib import admin
from auditlog.registry import auditlog
for model in auditlog.get_models():
admin.site.register(model)
| [
"django.contrib.admin.site.register",
"auditlog.registry.auditlog.get_models"
] | [((87, 108), 'auditlog.registry.auditlog.get_models', 'auditlog.get_models', ([], {}), '()\n', (106, 108), False, 'from auditlog.registry import auditlog\n'), ((114, 140), 'django.contrib.admin.site.register', 'admin.site.register', (['model'], {}), '(model)\n', (133, 140), False, 'from django.contrib import admin\n')] |
import os
import logging
import asyncio
from aiologger.loggers.json import JsonLogger
from aioelasticsearch import Elasticsearch
from asgard.sdk.options import get_option
LOGLEVEL_CONF = os.getenv("ASGARD_COUNTS_LOGLEVEL", "INFO")
loglevel = getattr(logging, LOGLEVEL_CONF, logging.INFO)
logger = None
async def init_logger():
global logger
logger = await JsonLogger.with_default_handlers(level=loglevel, flatten=True)
loop = asyncio.get_event_loop()
init_logger_task = loop.create_task(init_logger())
ELASTIC_SEARCH_ADDRESSES = get_option("ELASTICSEARCH", "ADDRESS")
elasticsearch = Elasticsearch(hosts=ELASTIC_SEARCH_ADDRESSES)
RABBITMQ_HOST = os.getenv("COUNTS_RABBITMQ_HOST", "127.0.0.1")
RABBITMQ_USER = os.getenv("COUNTS_RABBITMQ_USER", "guest")
RABBITMQ_PWD = os.getenv("COUNTS_RABBITMQ_PWD", "<PASSWORD>")
RABBITMQ_PREFETCH = int(os.getenv("COUNTS_RABBITMQ_PREFETCH", 32))
RABBITMQ_VHOST = os.getenv("COUNTS_RABBITMQ_VHOST", "/")
COUNTS_QUEUE_NAMES = [item.strip() for item in os.getenv("COUNTS_QUEUE_NAMES", "").split(",")]
| [
"os.getenv",
"aioelasticsearch.Elasticsearch",
"asgard.sdk.options.get_option",
"aiologger.loggers.json.JsonLogger.with_default_handlers",
"asyncio.get_event_loop"
] | [((190, 233), 'os.getenv', 'os.getenv', (['"""ASGARD_COUNTS_LOGLEVEL"""', '"""INFO"""'], {}), "('ASGARD_COUNTS_LOGLEVEL', 'INFO')\n", (199, 233), False, 'import os\n'), ((440, 464), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (462, 464), False, 'import asyncio\n'), ((544, 582), 'asgard.sdk.options.get_option', 'get_option', (['"""ELASTICSEARCH"""', '"""ADDRESS"""'], {}), "('ELASTICSEARCH', 'ADDRESS')\n", (554, 582), False, 'from asgard.sdk.options import get_option\n'), ((600, 645), 'aioelasticsearch.Elasticsearch', 'Elasticsearch', ([], {'hosts': 'ELASTIC_SEARCH_ADDRESSES'}), '(hosts=ELASTIC_SEARCH_ADDRESSES)\n', (613, 645), False, 'from aioelasticsearch import Elasticsearch\n'), ((663, 709), 'os.getenv', 'os.getenv', (['"""COUNTS_RABBITMQ_HOST"""', '"""127.0.0.1"""'], {}), "('COUNTS_RABBITMQ_HOST', '127.0.0.1')\n", (672, 709), False, 'import os\n'), ((726, 768), 'os.getenv', 'os.getenv', (['"""COUNTS_RABBITMQ_USER"""', '"""guest"""'], {}), "('COUNTS_RABBITMQ_USER', 'guest')\n", (735, 768), False, 'import os\n'), ((784, 830), 'os.getenv', 'os.getenv', (['"""COUNTS_RABBITMQ_PWD"""', '"""<PASSWORD>"""'], {}), "('COUNTS_RABBITMQ_PWD', '<PASSWORD>')\n", (793, 830), False, 'import os\n'), ((915, 954), 'os.getenv', 'os.getenv', (['"""COUNTS_RABBITMQ_VHOST"""', '"""/"""'], {}), "('COUNTS_RABBITMQ_VHOST', '/')\n", (924, 954), False, 'import os\n'), ((855, 896), 'os.getenv', 'os.getenv', (['"""COUNTS_RABBITMQ_PREFETCH"""', '(32)'], {}), "('COUNTS_RABBITMQ_PREFETCH', 32)\n", (864, 896), False, 'import os\n'), ((369, 431), 'aiologger.loggers.json.JsonLogger.with_default_handlers', 'JsonLogger.with_default_handlers', ([], {'level': 'loglevel', 'flatten': '(True)'}), '(level=loglevel, flatten=True)\n', (401, 431), False, 'from aiologger.loggers.json import JsonLogger\n'), ((1002, 1037), 'os.getenv', 'os.getenv', (['"""COUNTS_QUEUE_NAMES"""', '""""""'], {}), "('COUNTS_QUEUE_NAMES', '')\n", (1011, 1037), False, 'import os\n')] |
from django.http import HttpResponse
from django.shortcuts import redirect
from hostelapp.models import student_room
from django.contrib import messages
def unauthenticated_user(view_func):
def wrapper_func(request, *args, **kwargs):
if request.user.is_authenticated:
return redirect('default_home_name')
else:
return view_func(request, *args, **kwargs)
return wrapper_func
def is_student_booked(view_func):
def wrapper_func(request, *args, **kwargs):
student = student_room.objects.filter(user=request.user)
if len(student) > 0:
message = "You already Booked the Room One can Book Only one Room"
messages.error(request, message)
return redirect('default_home_name')
else:
return view_func(request, *args, **kwargs)
return wrapper_func
def allowed_users(allowed_roles=[]):
def decorator(view_func):
def wrapper_func(request, *args, **kwargs):
group = None
if request.user.groups.exists():
group = request.user.groups.all()[0].name
if group in allowed_roles:
return view_func(request, *args, **kwargs)
else:
message = "You are not authorized to view this page"
messages.success(request, message)
return redirect('default_home_name')
# return HttpResponse('')
return wrapper_func
return decorator
# def student_wanden_cheif_only(view_func):
# def wrapper_function(request, *args, **kwargs):
# group = None
# if request.user.groups.exists():
# group = request.user.groups.all()[0].name
#
# if group == 'student':
# return redirect('home')
#
# if group == 'wanden':
# return redirect('home')
# # return view_func(request, *args, **kwargs)
# if group == 'chief warden':
# return redirect('cheif_warden_home')
#
# return wrapper_function
| [
"django.contrib.messages.error",
"hostelapp.models.student_room.objects.filter",
"django.shortcuts.redirect",
"django.contrib.messages.success"
] | [((526, 572), 'hostelapp.models.student_room.objects.filter', 'student_room.objects.filter', ([], {'user': 'request.user'}), '(user=request.user)\n', (553, 572), False, 'from hostelapp.models import student_room\n'), ((300, 329), 'django.shortcuts.redirect', 'redirect', (['"""default_home_name"""'], {}), "('default_home_name')\n", (308, 329), False, 'from django.shortcuts import redirect\n'), ((693, 725), 'django.contrib.messages.error', 'messages.error', (['request', 'message'], {}), '(request, message)\n', (707, 725), False, 'from django.contrib import messages\n'), ((745, 774), 'django.shortcuts.redirect', 'redirect', (['"""default_home_name"""'], {}), "('default_home_name')\n", (753, 774), False, 'from django.shortcuts import redirect\n'), ((1321, 1355), 'django.contrib.messages.success', 'messages.success', (['request', 'message'], {}), '(request, message)\n', (1337, 1355), False, 'from django.contrib import messages\n'), ((1379, 1408), 'django.shortcuts.redirect', 'redirect', (['"""default_home_name"""'], {}), "('default_home_name')\n", (1387, 1408), False, 'from django.shortcuts import redirect\n')] |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import animation
import json
from tqdm import tqdm
from collections import deque
from fourier_transform import FourierTransform
from epicycle_frame import EpicycleFrame
from draw_it_yourself import draw_it_yourself
# Draw it yourself
diy_or_not = input("Draw image by yourself ? (y/n) ")
if diy_or_not == 'y':
draw_it_yourself()
# Show original sample points
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_axis_off()
ax.set_aspect('equal') # To have symmetric axes
# Load coords
f = open('input/coords', 'r')
coords = json.load(f)
f.close()
x_list = [coord[0] for coord in coords]
x_list = x_list - np.mean(x_list)
y_list = [-coord[1] for coord in coords]
y_list = y_list - np.mean(y_list)
ax.plot(x_list, y_list)
xlim = plt.xlim()
ylim = plt.ylim()
plt.show(block=False)
plt.pause(2)
plt.close()
# Ask for settings
order = min(int(input("Max order: ")), int(np.ceil((len(coords)-1)/2)))
mode = input("Fourier Transform type (fft/mydft/myfft/myfftplus): ")
suffix = input("Save file type (mp4/gif): ")
# Compute fourier coeffients
ft = FourierTransform(x_list, y_list, order, mode=mode)
# Draw animation
fig, ax = plt.subplots()
ax.set_xlim(xlim[0]-100, xlim[1]+100)
ax.set_ylim(ylim[0]-100, ylim[1]+100)
ax.set_axis_off()
ax.set_aspect('equal')
# Frame params
frames = 300
original_drawing, = ax.plot([], [], '-', color='mediumaquamarine', linewidth=0.5)
circles = [ax.plot([], [], '-', color='pink', alpha=0.3, linewidth=0.75)[0] for i in range(-order, order+1)]
lines = [ax.plot([], [], '-', color='mediumpurple', alpha=0.7, linewidth=0.75)[0] for i in range(-order, order+1)]
paintbrush_x = deque()
paintbrush_y = deque()
drawing, = ax.plot([], [], '-', color='plum', linewidth=2)
# Generate animation
print("Generating animation ...")
pbar = tqdm(total=frames, desc='Progress') # Progress bar
# Draw frame at time t (t goes from 0 to 2*pi for complete cycle)
def generate_frame(k, ft, t_list):
global pbar
t = t_list[k]
# Draw original image
original_drawing.set_data(x_list, y_list)
epicycle_frame = EpicycleFrame(ft, t)
for i in range(-order, order+1):
# Draw circles
circles[i].set_data(*epicycle_frame.circles[i])
# Draw lines
lines[i].set_data(*epicycle_frame.lines[i])
# Draw paintbrush
paintbrush_x.append(epicycle_frame.paintbrush[0])
paintbrush_y.append(epicycle_frame.paintbrush[1])
drawing.set_data(list(paintbrush_x), list(paintbrush_y))
# Update progress bar
pbar.update(1)
# Generate mp4 / gif
t_list = np.linspace(0, 1, num=frames)
anim = animation.FuncAnimation(fig, generate_frame, frames=frames, fargs=(ft, t_list), interval=1)
# Set up formatting for the video file
Writer = animation.writers['ffmpeg']
writer = Writer(fps=30, metadata=dict(artist='Tequila-Sunrise'), bitrate=7200)
anim.save('output/fourier-epicycle.'+suffix, writer=writer)
pbar.close()
print(f"Generating {suffix} file successfully!")
| [
"numpy.mean",
"epicycle_frame.EpicycleFrame",
"collections.deque",
"matplotlib.animation.FuncAnimation",
"tqdm.tqdm",
"matplotlib.pyplot.close",
"matplotlib.pyplot.figure",
"fourier_transform.FourierTransform",
"numpy.linspace",
"draw_it_yourself.draw_it_yourself",
"json.load",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.pause",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((440, 452), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (450, 452), True, 'import matplotlib.pyplot as plt\n'), ((600, 612), 'json.load', 'json.load', (['f'], {}), '(f)\n', (609, 612), False, 'import json\n'), ((804, 814), 'matplotlib.pyplot.xlim', 'plt.xlim', ([], {}), '()\n', (812, 814), True, 'import matplotlib.pyplot as plt\n'), ((822, 832), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {}), '()\n', (830, 832), True, 'import matplotlib.pyplot as plt\n'), ((834, 855), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (842, 855), True, 'import matplotlib.pyplot as plt\n'), ((856, 868), 'matplotlib.pyplot.pause', 'plt.pause', (['(2)'], {}), '(2)\n', (865, 868), True, 'import matplotlib.pyplot as plt\n'), ((869, 880), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (878, 880), True, 'import matplotlib.pyplot as plt\n'), ((1122, 1172), 'fourier_transform.FourierTransform', 'FourierTransform', (['x_list', 'y_list', 'order'], {'mode': 'mode'}), '(x_list, y_list, order, mode=mode)\n', (1138, 1172), False, 'from fourier_transform import FourierTransform\n'), ((1201, 1215), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1213, 1215), True, 'import matplotlib.pyplot as plt\n'), ((1683, 1690), 'collections.deque', 'deque', ([], {}), '()\n', (1688, 1690), False, 'from collections import deque\n'), ((1706, 1713), 'collections.deque', 'deque', ([], {}), '()\n', (1711, 1713), False, 'from collections import deque\n'), ((1836, 1871), 'tqdm.tqdm', 'tqdm', ([], {'total': 'frames', 'desc': '"""Progress"""'}), "(total=frames, desc='Progress')\n", (1840, 1871), False, 'from tqdm import tqdm\n'), ((2597, 2626), 'numpy.linspace', 'np.linspace', (['(0)', '(1)'], {'num': 'frames'}), '(0, 1, num=frames)\n', (2608, 2626), True, 'import numpy as np\n'), ((2634, 2729), 'matplotlib.animation.FuncAnimation', 'animation.FuncAnimation', (['fig', 'generate_frame'], {'frames': 'frames', 'fargs': '(ft, t_list)', 'interval': '(1)'}), '(fig, generate_frame, frames=frames, fargs=(ft,\n t_list), interval=1)\n', (2657, 2729), False, 'from matplotlib import animation\n'), ((384, 402), 'draw_it_yourself.draw_it_yourself', 'draw_it_yourself', ([], {}), '()\n', (400, 402), False, 'from draw_it_yourself import draw_it_yourself\n'), ((682, 697), 'numpy.mean', 'np.mean', (['x_list'], {}), '(x_list)\n', (689, 697), True, 'import numpy as np\n'), ((757, 772), 'numpy.mean', 'np.mean', (['y_list'], {}), '(y_list)\n', (764, 772), True, 'import numpy as np\n'), ((2119, 2139), 'epicycle_frame.EpicycleFrame', 'EpicycleFrame', (['ft', 't'], {}), '(ft, t)\n', (2132, 2139), False, 'from epicycle_frame import EpicycleFrame\n')] |
from argparse import Namespace
def namespace(d):
assert isinstance(d, dict)
return Namespace(**d)
def feed_args_dict(func, args={}, force_return=None):
assert callable(func)
args = namespace(args)
def _feed_args(*argsignore, **kwargsignore):
output = func(args)
if force_return is not None:
return force_return
return output
return _feed_args
| [
"argparse.Namespace"
] | [((93, 107), 'argparse.Namespace', 'Namespace', ([], {}), '(**d)\n', (102, 107), False, 'from argparse import Namespace\n')] |
import numpy as np
import os
import argparse
import matplotlib.pyplot as plt
from sklearn.preprocessing import StandardScaler
import umap
parser = argparse.ArgumentParser(description='AE model runner')
parser.add_argument('--name', '-n',
dest="model_name",
help= 'string of model name')
parser.add_argument('--data_path', '-d',
dest="data_path",
help= 'path to the latent variables')
args = parser.parse_args()
name = args.model_name
out_path = args.data_path
# Data
# data = np.load('vae_latents.npy')
data = np.load(os.path.join(out_path,'latents.npy'))
latents = data[:,:-1]
det_label = data[:,-1]
# UMAP Object
reducer = umap.UMAP()
scaled_latents = StandardScaler().fit_transform(latents)
embedding = reducer.fit_transform(scaled_latents)
print(embedding.shape)
# Plot
# plt.scatter(
# embedding[:, 0],
# embedding[:, 1],
# c=det_label,
# cmap='plasma'
# )
plt.scatter(
embedding[det_label==0, 0],
embedding[det_label==0, 1],
c='b',
label='Normal'
)
plt.scatter(
embedding[det_label==1, 0],
embedding[det_label==1, 1],
c='y',
label='Anomaly'
)
plt.legend(loc="upper left")
plt.gca().set_aspect('equal', 'datalim')
plt.title('UMAP projection of the '+name.upper()+' Latent Space', fontsize=24)
plt.savefig(os.path.join(out_path,name.lower()+'-latent-umap-scaled.png'),bbox_inches='tight') | [
"argparse.ArgumentParser",
"matplotlib.pyplot.gca",
"os.path.join",
"sklearn.preprocessing.StandardScaler",
"matplotlib.pyplot.scatter",
"umap.UMAP",
"matplotlib.pyplot.legend"
] | [((148, 202), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""AE model runner"""'}), "(description='AE model runner')\n", (171, 202), False, 'import argparse\n'), ((712, 723), 'umap.UMAP', 'umap.UMAP', ([], {}), '()\n', (721, 723), False, 'import umap\n'), ((970, 1069), 'matplotlib.pyplot.scatter', 'plt.scatter', (['embedding[det_label == 0, 0]', 'embedding[det_label == 0, 1]'], {'c': '"""b"""', 'label': '"""Normal"""'}), "(embedding[det_label == 0, 0], embedding[det_label == 0, 1], c=\n 'b', label='Normal')\n", (981, 1069), True, 'import matplotlib.pyplot as plt\n'), ((1083, 1183), 'matplotlib.pyplot.scatter', 'plt.scatter', (['embedding[det_label == 1, 0]', 'embedding[det_label == 1, 1]'], {'c': '"""y"""', 'label': '"""Anomaly"""'}), "(embedding[det_label == 1, 0], embedding[det_label == 1, 1], c=\n 'y', label='Anomaly')\n", (1094, 1183), True, 'import matplotlib.pyplot as plt\n'), ((1197, 1225), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper left"""'}), "(loc='upper left')\n", (1207, 1225), True, 'import matplotlib.pyplot as plt\n'), ((604, 641), 'os.path.join', 'os.path.join', (['out_path', '"""latents.npy"""'], {}), "(out_path, 'latents.npy')\n", (616, 641), False, 'import os\n'), ((741, 757), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (755, 757), False, 'from sklearn.preprocessing import StandardScaler\n'), ((1226, 1235), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1233, 1235), True, 'import matplotlib.pyplot as plt\n')] |
from argparse import ArgumentParser
def convert_vga(file_name: str) -> None:
"""
Open a BMP file and convert it into a
VGA data file.
:param file_name: The file name of the bmp file.
"""
bmp_data = []
with open(file_name, "rb") as bmp_file:
bmp_data = bmp_file.read()
vga_pointer = bmp_data[10] # This is the flag in BMP header
# holding location of Pixel Data.
vga_data = bmp_data[vga_pointer:vga_pointer + 320 * 200]
with open(file_name.replace(".bmp", ".vga"), "wb") as output:
output.write(vga_data)
if __name__ == '__main__':
parser = ArgumentParser(prog="BMP2VGA Converter",
description="Convert a BMP file to a VGA data file.")
parser.add_argument("--file", action="store", help="The name of the BMP file")
args = parser.parse_args()
convert_vga(args.file)
| [
"argparse.ArgumentParser"
] | [((613, 712), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'prog': '"""BMP2VGA Converter"""', 'description': '"""Convert a BMP file to a VGA data file."""'}), "(prog='BMP2VGA Converter', description=\n 'Convert a BMP file to a VGA data file.')\n", (627, 712), False, 'from argparse import ArgumentParser\n')] |
# Copyright 2021 the Ithaca Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Text processing functions."""
import random
import re
import unicodedata
import numpy as np
def idx_to_text(idxs, alphabet, strip_sos=True, strip_pad=True):
"""Converts a list of indices to a string."""
idxs = np.array(idxs)
out = ''
for i in range(idxs.size):
idx = idxs[i]
if strip_pad and idx == alphabet.pad_idx:
break
elif strip_sos and idx == alphabet.sos_idx:
pass
else:
out += alphabet.idx2char[idx]
return out
def idx_to_text_batch(idxs, alphabet, lengths=None):
"""Converts batched lists of indices to strings."""
b = []
for i in range(idxs.shape[0]):
idxs_i = idxs[i]
if lengths:
idxs_i = idxs_i[:lengths[i]]
b.append(idx_to_text(idxs_i, alphabet))
return b
def random_mask_span(t, geometric_p=0.2, limit_chars=None):
"""Masks a span of sequential words."""
# Obtain span indexes (indlusive)
span_idx = [(ele.start(), ele.end()) for ele in re.finditer(r'[\w\s]+', t)]
if not span_idx:
return []
# Select a span to mask
span_start, span_end = random.choice(span_idx)
# Sample a random span length using a geomteric distribution
if geometric_p and limit_chars:
span_len = np.clip(
np.random.geometric(geometric_p),
1, min(limit_chars, span_end - span_start))
elif geometric_p:
span_len = np.clip(
np.random.geometric(geometric_p),
1, span_end - span_start)
elif limit_chars:
span_len = min(limit_chars, span_end - span_start)
else:
raise ValueError('geometric_p or limit_chars should be set.')
# Pick a random start index
span_start = np.random.randint(span_start, span_end - span_len + 1)
assert span_start + span_len <= span_end
# Clip to limit chars
if limit_chars is not None and span_len >= limit_chars:
span_len = limit_chars
# Create mask indices
mask_idx = list(range(span_start, span_start + span_len))
return mask_idx
def random_sentence_swap(sentences, p):
"""Swaps sentences with probability p."""
def swap_sentence(s):
idx_1 = random.randint(0, len(s) - 1)
idx_2 = idx_1
counter = 0
while idx_2 == idx_1:
idx_2 = random.randint(0, len(s) - 1)
counter += 1
if counter > 3:
return s
s[idx_1], s[idx_2] = s[idx_2], s[idx_1]
return s
new_sentences = sentences.copy()
n = int(p * len(sentences))
for _ in range(n):
new_sentences = swap_sentence(new_sentences)
return new_sentences
def random_word_delete(sentence, p):
"""Deletes a word from a sentence with probability p."""
words = sentence.split(' ')
# Return if one word.
if len(words) == 1:
return words[0]
# Randomly delete words.
new_words = []
for word in words:
if random.uniform(0, 1) > p:
new_words.append(word)
# If all words are removed return one.
if not new_words:
rand_int = random.randint(0, len(words) - 1)
return words[rand_int]
sentence = ' '.join(new_words)
return sentence
def random_word_swap(sentence, p):
"""Swaps words from a sentence with probability p."""
def swap_word(new_words):
idx_1 = random.randint(0, len(new_words) - 1)
idx_2 = idx_1
counter = 0
while idx_2 == idx_1:
idx_2 = random.randint(0, len(new_words) - 1)
counter += 1
if counter > 3:
return new_words
new_words[idx_1], new_words[idx_2] = new_words[idx_2], new_words[idx_1]
return new_words
words = sentence.split(' ')
new_words = words.copy()
n = int(p * len(words))
for _ in range(n):
new_words = swap_word(new_words)
sentence = ' '.join(new_words)
return sentence
def strip_accents(s):
return ''.join(
c for c in unicodedata.normalize('NFD', s)
if unicodedata.category(c) != 'Mn')
def text_to_idx(t, alphabet):
"""Converts a string to character indices."""
return np.array([alphabet.char2idx[c] for c in t], dtype=np.int32)
def text_to_word_idx(t, alphabet):
"""Converts a string to word indices."""
out = np.full(len(t), alphabet.word2idx[alphabet.unk], dtype=np.int32)
for m in re.finditer(r'\w+', t):
if m.group() in alphabet.word2idx:
out[m.start():m.end()] = alphabet.word2idx[m.group()]
return out
| [
"numpy.random.geometric",
"random.choice",
"random.uniform",
"numpy.array",
"numpy.random.randint",
"unicodedata.category",
"re.finditer",
"unicodedata.normalize"
] | [((804, 818), 'numpy.array', 'np.array', (['idxs'], {}), '(idxs)\n', (812, 818), True, 'import numpy as np\n'), ((1635, 1658), 'random.choice', 'random.choice', (['span_idx'], {}), '(span_idx)\n', (1648, 1658), False, 'import random\n'), ((2190, 2244), 'numpy.random.randint', 'np.random.randint', (['span_start', '(span_end - span_len + 1)'], {}), '(span_start, span_end - span_len + 1)\n', (2207, 2244), True, 'import numpy as np\n'), ((4419, 4478), 'numpy.array', 'np.array', (['[alphabet.char2idx[c] for c in t]'], {'dtype': 'np.int32'}), '([alphabet.char2idx[c] for c in t], dtype=np.int32)\n', (4427, 4478), True, 'import numpy as np\n'), ((4643, 4665), 're.finditer', 're.finditer', (['"""\\\\w+"""', 't'], {}), "('\\\\w+', t)\n", (4654, 4665), False, 'import re\n'), ((1522, 1549), 're.finditer', 're.finditer', (['"""[\\\\w\\\\s]+"""', 't'], {}), "('[\\\\w\\\\s]+', t)\n", (1533, 1549), False, 'import re\n'), ((1789, 1821), 'numpy.random.geometric', 'np.random.geometric', (['geometric_p'], {}), '(geometric_p)\n', (1808, 1821), True, 'import numpy as np\n'), ((3305, 3325), 'random.uniform', 'random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (3319, 3325), False, 'import random\n'), ((1927, 1959), 'numpy.random.geometric', 'np.random.geometric', (['geometric_p'], {}), '(geometric_p)\n', (1946, 1959), True, 'import numpy as np\n'), ((4256, 4287), 'unicodedata.normalize', 'unicodedata.normalize', (['"""NFD"""', 's'], {}), "('NFD', s)\n", (4277, 4287), False, 'import unicodedata\n'), ((4297, 4320), 'unicodedata.category', 'unicodedata.category', (['c'], {}), '(c)\n', (4317, 4320), False, 'import unicodedata\n')] |
"""Release Granularity metric."""
import logging
import sqlalchemy
from typing import Text, Dict
from database import db
from database import models
from metrics import base
class ReleaseGranularityMetric(base.Metric):
"""A metric tracking the average number of commits between releases."""
UNIT = 'commits/release'
def _format_value(self, avg_commits: float) -> Text:
return '%d c/r' % round(avg_commits)
def _score_value(self, avg_commits: float) -> models.MetricScore:
if avg_commits > 1000:
return models.MetricScore.CRITICAL
elif avg_commits > 500:
return models.MetricScore.POOR
elif avg_commits > 100:
return models.MetricScore.MODERATE
elif avg_commits > 10:
return models.MetricScore.GOOD
else:
return models.MetricScore.EXCELLENT
def _compute_value(self) -> float:
"""Computes the average number of commits per release over the last 90 days.
Considering only production releases, we can just count all commits
committed between the first and last release of the 90-day window, and
divide be the number of releases (excluding the last one).
Raises:
ValueError: if less than two releases exist.
Returns:
The average number of commits per release.
"""
logging.info('Counting commits per release')
session = db.Session()
releases = session.query(models.Release).filter(
models.Release.is_last_90_days(base_time=self.base_time)).order_by(
models.Release.published_at.desc()).all()
release_count = len(releases)
if release_count < 2:
raise ValueError('Not enough releases to determine a range of commits.')
last_release_date = releases[0].published_at
first_release_date = releases[-1].published_at
commits_count = session.query(models.Commit).filter(
models.Commit.committed_at.between(first_release_date,
last_release_date)).count()
session.close()
# Subtract one from release count since commits from the last release are
# not included.
return commits_count / (release_count - 1)
base.Metric.register(ReleaseGranularityMetric)
| [
"database.models.Commit.committed_at.between",
"database.models.Release.published_at.desc",
"database.db.Session",
"metrics.base.Metric.register",
"database.models.Release.is_last_90_days",
"logging.info"
] | [((2134, 2180), 'metrics.base.Metric.register', 'base.Metric.register', (['ReleaseGranularityMetric'], {}), '(ReleaseGranularityMetric)\n', (2154, 2180), False, 'from metrics import base\n'), ((1278, 1322), 'logging.info', 'logging.info', (['"""Counting commits per release"""'], {}), "('Counting commits per release')\n", (1290, 1322), False, 'import logging\n'), ((1337, 1349), 'database.db.Session', 'db.Session', ([], {}), '()\n', (1347, 1349), False, 'from database import db\n'), ((1491, 1525), 'database.models.Release.published_at.desc', 'models.Release.published_at.desc', ([], {}), '()\n', (1523, 1525), False, 'from database import models\n'), ((1840, 1913), 'database.models.Commit.committed_at.between', 'models.Commit.committed_at.between', (['first_release_date', 'last_release_date'], {}), '(first_release_date, last_release_date)\n', (1874, 1913), False, 'from database import models\n'), ((1411, 1467), 'database.models.Release.is_last_90_days', 'models.Release.is_last_90_days', ([], {'base_time': 'self.base_time'}), '(base_time=self.base_time)\n', (1441, 1467), False, 'from database import models\n')] |
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ApplicationUsage(object):
"""
Application usage during a specified time period.
An application is a Java application that can be executed by a Java Runtime installation.
An application is independent of the Java Runtime or its installation.
"""
def __init__(self, **kwargs):
"""
Initializes a new ApplicationUsage object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param application_id:
The value to assign to the application_id property of this ApplicationUsage.
:type application_id: str
:param display_name:
The value to assign to the display_name property of this ApplicationUsage.
:type display_name: str
:param application_type:
The value to assign to the application_type property of this ApplicationUsage.
:type application_type: str
:param operating_systems:
The value to assign to the operating_systems property of this ApplicationUsage.
:type operating_systems: list[oci.jms.models.OperatingSystem]
:param approximate_installation_count:
The value to assign to the approximate_installation_count property of this ApplicationUsage.
:type approximate_installation_count: int
:param approximate_jre_count:
The value to assign to the approximate_jre_count property of this ApplicationUsage.
:type approximate_jre_count: int
:param approximate_managed_instance_count:
The value to assign to the approximate_managed_instance_count property of this ApplicationUsage.
:type approximate_managed_instance_count: int
:param time_start:
The value to assign to the time_start property of this ApplicationUsage.
:type time_start: datetime
:param time_end:
The value to assign to the time_end property of this ApplicationUsage.
:type time_end: datetime
:param time_first_seen:
The value to assign to the time_first_seen property of this ApplicationUsage.
:type time_first_seen: datetime
:param time_last_seen:
The value to assign to the time_last_seen property of this ApplicationUsage.
:type time_last_seen: datetime
"""
self.swagger_types = {
'application_id': 'str',
'display_name': 'str',
'application_type': 'str',
'operating_systems': 'list[OperatingSystem]',
'approximate_installation_count': 'int',
'approximate_jre_count': 'int',
'approximate_managed_instance_count': 'int',
'time_start': 'datetime',
'time_end': 'datetime',
'time_first_seen': 'datetime',
'time_last_seen': 'datetime'
}
self.attribute_map = {
'application_id': 'applicationId',
'display_name': 'displayName',
'application_type': 'applicationType',
'operating_systems': 'operatingSystems',
'approximate_installation_count': 'approximateInstallationCount',
'approximate_jre_count': 'approximateJreCount',
'approximate_managed_instance_count': 'approximateManagedInstanceCount',
'time_start': 'timeStart',
'time_end': 'timeEnd',
'time_first_seen': 'timeFirstSeen',
'time_last_seen': 'timeLastSeen'
}
self._application_id = None
self._display_name = None
self._application_type = None
self._operating_systems = None
self._approximate_installation_count = None
self._approximate_jre_count = None
self._approximate_managed_instance_count = None
self._time_start = None
self._time_end = None
self._time_first_seen = None
self._time_last_seen = None
@property
def application_id(self):
"""
**[Required]** Gets the application_id of this ApplicationUsage.
An internal identifier for the application that is unique to a Fleet.
:return: The application_id of this ApplicationUsage.
:rtype: str
"""
return self._application_id
@application_id.setter
def application_id(self, application_id):
"""
Sets the application_id of this ApplicationUsage.
An internal identifier for the application that is unique to a Fleet.
:param application_id: The application_id of this ApplicationUsage.
:type: str
"""
self._application_id = application_id
@property
def display_name(self):
"""
**[Required]** Gets the display_name of this ApplicationUsage.
The name of the application.
:return: The display_name of this ApplicationUsage.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this ApplicationUsage.
The name of the application.
:param display_name: The display_name of this ApplicationUsage.
:type: str
"""
self._display_name = display_name
@property
def application_type(self):
"""
**[Required]** Gets the application_type of this ApplicationUsage.
The type of the application, denoted by how the application was started.
:return: The application_type of this ApplicationUsage.
:rtype: str
"""
return self._application_type
@application_type.setter
def application_type(self, application_type):
"""
Sets the application_type of this ApplicationUsage.
The type of the application, denoted by how the application was started.
:param application_type: The application_type of this ApplicationUsage.
:type: str
"""
self._application_type = application_type
@property
def operating_systems(self):
"""
Gets the operating_systems of this ApplicationUsage.
The operating systems running this application.
:return: The operating_systems of this ApplicationUsage.
:rtype: list[oci.jms.models.OperatingSystem]
"""
return self._operating_systems
@operating_systems.setter
def operating_systems(self, operating_systems):
"""
Sets the operating_systems of this ApplicationUsage.
The operating systems running this application.
:param operating_systems: The operating_systems of this ApplicationUsage.
:type: list[oci.jms.models.OperatingSystem]
"""
self._operating_systems = operating_systems
@property
def approximate_installation_count(self):
"""
Gets the approximate_installation_count of this ApplicationUsage.
The approximate count of installations running this application.
:return: The approximate_installation_count of this ApplicationUsage.
:rtype: int
"""
return self._approximate_installation_count
@approximate_installation_count.setter
def approximate_installation_count(self, approximate_installation_count):
"""
Sets the approximate_installation_count of this ApplicationUsage.
The approximate count of installations running this application.
:param approximate_installation_count: The approximate_installation_count of this ApplicationUsage.
:type: int
"""
self._approximate_installation_count = approximate_installation_count
@property
def approximate_jre_count(self):
"""
Gets the approximate_jre_count of this ApplicationUsage.
The approximate count of Java Runtimes running this application.
:return: The approximate_jre_count of this ApplicationUsage.
:rtype: int
"""
return self._approximate_jre_count
@approximate_jre_count.setter
def approximate_jre_count(self, approximate_jre_count):
"""
Sets the approximate_jre_count of this ApplicationUsage.
The approximate count of Java Runtimes running this application.
:param approximate_jre_count: The approximate_jre_count of this ApplicationUsage.
:type: int
"""
self._approximate_jre_count = approximate_jre_count
@property
def approximate_managed_instance_count(self):
"""
Gets the approximate_managed_instance_count of this ApplicationUsage.
The approximate count of managed instances reporting this application.
:return: The approximate_managed_instance_count of this ApplicationUsage.
:rtype: int
"""
return self._approximate_managed_instance_count
@approximate_managed_instance_count.setter
def approximate_managed_instance_count(self, approximate_managed_instance_count):
"""
Sets the approximate_managed_instance_count of this ApplicationUsage.
The approximate count of managed instances reporting this application.
:param approximate_managed_instance_count: The approximate_managed_instance_count of this ApplicationUsage.
:type: int
"""
self._approximate_managed_instance_count = approximate_managed_instance_count
@property
def time_start(self):
"""
Gets the time_start of this ApplicationUsage.
Lower bound of the specified time period filter.
:return: The time_start of this ApplicationUsage.
:rtype: datetime
"""
return self._time_start
@time_start.setter
def time_start(self, time_start):
"""
Sets the time_start of this ApplicationUsage.
Lower bound of the specified time period filter.
:param time_start: The time_start of this ApplicationUsage.
:type: datetime
"""
self._time_start = time_start
@property
def time_end(self):
"""
Gets the time_end of this ApplicationUsage.
Upper bound of the specified time period filter.
:return: The time_end of this ApplicationUsage.
:rtype: datetime
"""
return self._time_end
@time_end.setter
def time_end(self, time_end):
"""
Sets the time_end of this ApplicationUsage.
Upper bound of the specified time period filter.
:param time_end: The time_end of this ApplicationUsage.
:type: datetime
"""
self._time_end = time_end
@property
def time_first_seen(self):
"""
Gets the time_first_seen of this ApplicationUsage.
The date and time the resource was _first_ reported to JMS.
This is potentially _before_ the specified time period provided by the filters.
For example, a resource can be first reported to JMS before the start of a specified time period,
if it is also reported during the time period.
:return: The time_first_seen of this ApplicationUsage.
:rtype: datetime
"""
return self._time_first_seen
@time_first_seen.setter
def time_first_seen(self, time_first_seen):
"""
Sets the time_first_seen of this ApplicationUsage.
The date and time the resource was _first_ reported to JMS.
This is potentially _before_ the specified time period provided by the filters.
For example, a resource can be first reported to JMS before the start of a specified time period,
if it is also reported during the time period.
:param time_first_seen: The time_first_seen of this ApplicationUsage.
:type: datetime
"""
self._time_first_seen = time_first_seen
@property
def time_last_seen(self):
"""
Gets the time_last_seen of this ApplicationUsage.
The date and time the resource was _last_ reported to JMS.
This is potentially _after_ the specified time period provided by the filters.
For example, a resource can be last reported to JMS before the start of a specified time period,
if it is also reported during the time period.
:return: The time_last_seen of this ApplicationUsage.
:rtype: datetime
"""
return self._time_last_seen
@time_last_seen.setter
def time_last_seen(self, time_last_seen):
"""
Sets the time_last_seen of this ApplicationUsage.
The date and time the resource was _last_ reported to JMS.
This is potentially _after_ the specified time period provided by the filters.
For example, a resource can be last reported to JMS before the start of a specified time period,
if it is also reported during the time period.
:param time_last_seen: The time_last_seen of this ApplicationUsage.
:type: datetime
"""
self._time_last_seen = time_last_seen
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| [
"oci.util.formatted_flat_dict"
] | [((13600, 13625), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (13619, 13625), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n')] |
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsRegressor
from sklearn.metrics import explained_variance_score
import sys
import os
#Funcao auxiliar para estimar uso de memoria
def mem_usage(pandas_obj):
if isinstance(pandas_obj,pd.DataFrame):
usage_b = pandas_obj.memory_usage(deep=True).sum()
else: # we assume if not a df it's a series
usage_b = pandas_obj.memory_usage(deep=True)
usage_mb = usage_b / 1024 ** 2 # convert bytes to megabytes
return "{:03.2f} MB".format(usage_mb)
#Funcao para exibir mensagem e terminar o programa
def print_and_exit(msg, memory_usage):
print(msg, memory_usage)
sys.exit()
#Funcao para carregar e preparar os arquivos
def load_and_prepare_files():
print("********************CARREGANDO ARQUIVOS********************")
colspecs = [(2,10),(12,24),(56,69),(108,121)]
names = ["DataPregao", "Ticker", "PrecoAbertura", "PrecoUltimoNegocio"]
frames = []
for root, dirs, files in os.walk("./data"):
for filename in files:
print("Carregando o arquivo: ", filename)
temp_dataset = pd.read_fwf('data/' + filename, header=None, colspecs=colspecs, names=names)
temp_dataset.drop([0], inplace=True)
temp_dataset = temp_dataset[:-1]
frames.append(temp_dataset)
dataset = pd.concat(frames)
print("Todos os arquivos carregados e tratados\n")
return dataset
def otimiza_memoria(data_frame):
print("********************OTIMIZANDO MEMORIA********************")
print("Total de memoria: ", mem_usage(data_frame))
data_frame['DataPregao'] = data_frame['DataPregao'].astype('category')
data_frame['Ticker'] = data_frame['Ticker'].astype('category')
data_frame_float = data_frame.select_dtypes(include=['float'])
converted_float = data_frame_float.apply(pd.to_numeric,downcast='float')
data_frame[converted_float.columns] = converted_float
print("Total de memoria: ", mem_usage(data_frame), "\n")
return data_frame
data_frame = load_and_prepare_files()
data_frame = otimiza_memoria(data_frame)
print("********************EXECUTANDO MODELO********************")
# Here we normalize date and ticker
data_frame['DataPregao'] = data_frame['DataPregao'].cat.codes
data_frame['Ticker'] = data_frame['Ticker'].cat.codes
#Ordena o dataset para evitar problemas na estimativa
data_frame.sort_values(by=['DataPregao', 'Ticker'], inplace=True)
# Slicing the data_frame to define X and Y
X = data_frame.values[:, 0:3]
Y = data_frame.values[:,3]
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size = 0.3,
random_state = 100)
rgr_knn = KNeighborsRegressor(n_neighbors=5, weights='distance')
rgr_knn.fit(X_train, y_train)
y_pred_knn = rgr_knn.predict(X_test)
print("KRR accuracy is ", explained_variance_score(y_test,y_pred_knn)*100)
X_test_knn_2 = [[482, 46229, 2308], [481, 46229, 2329], [480, 46229, 2374]]
y_pred_knn_2 = rgr_knn.predict(X_test_knn_2)
print(y_pred_knn_2)
X_test_knn_3 = [[481, 46229, 2329]]
y_pred_knn_3 = rgr_knn.predict(X_test_knn_3)
print(y_pred_knn_3) | [
"sklearn.model_selection.train_test_split",
"sklearn.neighbors.KNeighborsRegressor",
"pandas.read_fwf",
"sys.exit",
"pandas.concat",
"sklearn.metrics.explained_variance_score",
"os.walk"
] | [((2651, 2706), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'Y'], {'test_size': '(0.3)', 'random_state': '(100)'}), '(X, Y, test_size=0.3, random_state=100)\n', (2667, 2706), False, 'from sklearn.model_selection import train_test_split\n'), ((2774, 2828), 'sklearn.neighbors.KNeighborsRegressor', 'KNeighborsRegressor', ([], {'n_neighbors': '(5)', 'weights': '"""distance"""'}), "(n_neighbors=5, weights='distance')\n", (2793, 2828), False, 'from sklearn.neighbors import KNeighborsRegressor\n'), ((723, 733), 'sys.exit', 'sys.exit', ([], {}), '()\n', (731, 733), False, 'import sys\n'), ((1055, 1072), 'os.walk', 'os.walk', (['"""./data"""'], {}), "('./data')\n", (1062, 1072), False, 'import os\n'), ((1412, 1429), 'pandas.concat', 'pd.concat', (['frames'], {}), '(frames)\n', (1421, 1429), True, 'import pandas as pd\n'), ((2922, 2966), 'sklearn.metrics.explained_variance_score', 'explained_variance_score', (['y_test', 'y_pred_knn'], {}), '(y_test, y_pred_knn)\n', (2946, 2966), False, 'from sklearn.metrics import explained_variance_score\n'), ((1186, 1262), 'pandas.read_fwf', 'pd.read_fwf', (["('data/' + filename)"], {'header': 'None', 'colspecs': 'colspecs', 'names': 'names'}), "('data/' + filename, header=None, colspecs=colspecs, names=names)\n", (1197, 1262), True, 'import pandas as pd\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import textwrap
import logging
from optparse import make_option
from django.core.management.base import BaseCommand
from ralph.cmdb import models_changes as chdb
from ralph.cmdb.models_signals import register_issue_signal
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""This tool synchronize database with Jira tickets in case of errors."""
help = textwrap.dedent(__doc__).strip()
requires_model_validation = True
option_list = BaseCommand.option_list + (
make_option(
'--run',
action='store_true',
dest='run',
default=False,
help='Deprecated. Does nothing.',
),
)
def handle(self, *args, **options):
logger.debug('Syncing tickets.')
for change in chdb.CIChange.objects.filter(
registration_type=chdb.CI_CHANGE_REGISTRATION_TYPES.WAITING):
register_issue_signal.send(sender=self, change_id=change.id)
logger.debug('Finished syncing tickets.')
| [
"logging.getLogger",
"textwrap.dedent",
"ralph.cmdb.models_signals.register_issue_signal.send",
"ralph.cmdb.models_changes.CIChange.objects.filter",
"optparse.make_option"
] | [((385, 412), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (402, 412), False, 'import logging\n'), ((943, 1037), 'ralph.cmdb.models_changes.CIChange.objects.filter', 'chdb.CIChange.objects.filter', ([], {'registration_type': 'chdb.CI_CHANGE_REGISTRATION_TYPES.WAITING'}), '(registration_type=chdb.\n CI_CHANGE_REGISTRATION_TYPES.WAITING)\n', (971, 1037), True, 'from ralph.cmdb import models_changes as chdb\n'), ((533, 557), 'textwrap.dedent', 'textwrap.dedent', (['__doc__'], {}), '(__doc__)\n', (548, 557), False, 'import textwrap\n'), ((658, 765), 'optparse.make_option', 'make_option', (['"""--run"""'], {'action': '"""store_true"""', 'dest': '"""run"""', 'default': '(False)', 'help': '"""Deprecated. Does nothing."""'}), "('--run', action='store_true', dest='run', default=False, help=\n 'Deprecated. Does nothing.')\n", (669, 765), False, 'from optparse import make_option\n'), ((1063, 1123), 'ralph.cmdb.models_signals.register_issue_signal.send', 'register_issue_signal.send', ([], {'sender': 'self', 'change_id': 'change.id'}), '(sender=self, change_id=change.id)\n', (1089, 1123), False, 'from ralph.cmdb.models_signals import register_issue_signal\n')] |
#!/usr/bin/env python3
# coding: utf-8
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""operator dsl function: mean"""
import akg.topi
import akg.tvm
from akg.ops.math import sum
from akg.utils import format_transform as ft_util
from akg.utils import validation_check as vc_util
from akg.utils import custom_tiling as ct_util
from akg.utils.dynamic_shape import shape_is_dynamic
INT16_MAX = 65536
def get_attrs(tensor):
"""generate default attrs."""
if shape_is_dynamic(tensor):
return {"enable_double_buffer": 0, "enable_divide_var": 1}
return {}
def mean_dynamic_tiling_strategy(tensor, axis):
"""custom tiling for mean with dynamic shape"""
strategy = list()
inner_most_to_full = True
resnet_inner_most_axis_pos = 4
reduce_axis_to_1 = True
reduce_axis_to_no_iso = False
multicore_axis_to_1 = True
resnet_outer_most_axis_pos = 0
if inner_most_to_full:
strategy += ct_util.create_constraint_on_tensor(tensor=tensor,
values="FULL",
constraints=ct_util.TileConstraint.MAX,
tensor_pos=resnet_inner_most_axis_pos)
if reduce_axis_to_1:
strategy += ct_util.create_constraint_on_tensor(tensor=tensor,
values=[1 for _ in axis],
constraints=ct_util.TileConstraint.FACTOR,
tensor_pos=axis)
elif reduce_axis_to_no_iso:
strategy += ct_util.create_constraint_on_tensor(tensor=tensor,
values=[1 for _ in axis],
constraints=ct_util.TileConstraint.FORBID_ISOLATE,
tensor_pos=axis)
if multicore_axis_to_1:
strategy += ct_util.create_constraint_on_tensor(tensor=tensor,
values=1,
constraints=ct_util.TileConstraint.FACTOR,
tensor_pos=resnet_outer_most_axis_pos)
return strategy
@vc_util.check_input_type(akg.tvm.tensor.Tensor, (list, tuple, int, type(None)), (bool, type(None)))
def mean(data, axis=None, keepdims=False):
"""
Computes the mean of the values of a Tensor over the whole dataset.
Note:
If the tuple's elements are unsorted, this function will call preprocess_axis firstly to let these elements
sorted. if tuple is empty, this function will compute all elements' sum.
if the data type is folat 16 and the whole dim not less than 65536, this function will compute the mean by
divide 65535 first to avoid whole dim too large.
Args:
data (tvm.tensor.Tensor): Tensor of type float16, float32.
axis (Union[list, tuple, int, None]): If the tuple is empty, the axis equal to None.
keepdims (bool): If keepdims equal to True, the result shape length is same to input shape length.
Returns:
tvm.tensor.Tensor, has the same type as data. If keepdims equal to True, all reduced dimensions are
retained with length 1. else these reduced axis will be eliminate.
"""
# Check types
vc_util.ops_dtype_check(data.dtype, vc_util.DtypeForDavinci.ALL_FLOAT)
# Check shape
shape = ft_util.get_shape(data)
vc_util.reduce_axis_check(shape, axis)
axis = ft_util.refine_reduce_axis(data, axis)
count = 1
for i in axis:
count *= shape[i]
output, _ = sum.sum_value(data, axis, keepdims)
if shape_is_dynamic(data):
res = akg.tvm.compute(output.shape, lambda *i: akg.lang.cce.divide_var(output(*i), count), name="res")
else:
res = akg.topi.divide(output, count)
attrs = get_attrs(data)
if shape_is_dynamic(data):
attrs["custom_tiling"] = mean_dynamic_tiling_strategy(data, axis)
return res, attrs
@vc_util.check_input_type(akg.tvm.tensor.Tensor, (list, tuple, int, type(None)), (bool, type(None)))
def mean_v2(data, axis=None, keepdims=False):
"""Simple implementation of mean."""
# Check types
vc_util.ops_dtype_check(data.dtype, vc_util.DtypeForDavinci.ALL_FLOAT)
# Check shape
shape = [x.value for x in data.shape]
vc_util.reduce_axis_check(shape, axis)
axis = ft_util.refine_reduce_axis(data, axis)
dtype = data.dtype
count = 1
for i in axis:
count *= shape[i]
count_rec = 1 / count
output, _ = sum.sum_v2(data, axis, keepdims)
res = output * akg.tvm.const(count_rec, dtype)
attrs = get_attrs(data)
if shape_is_dynamic(data):
attrs["custom_tiling"] = mean_dynamic_tiling_strategy(data, axis)
return res, attrs
| [
"akg.utils.custom_tiling.create_constraint_on_tensor",
"akg.utils.dynamic_shape.shape_is_dynamic",
"akg.utils.format_transform.refine_reduce_axis",
"akg.ops.math.sum.sum_v2",
"akg.utils.validation_check.reduce_axis_check",
"akg.utils.validation_check.ops_dtype_check",
"akg.ops.math.sum.sum_value",
"akg.utils.format_transform.get_shape"
] | [((1008, 1032), 'akg.utils.dynamic_shape.shape_is_dynamic', 'shape_is_dynamic', (['tensor'], {}), '(tensor)\n', (1024, 1032), False, 'from akg.utils.dynamic_shape import shape_is_dynamic\n'), ((4010, 4080), 'akg.utils.validation_check.ops_dtype_check', 'vc_util.ops_dtype_check', (['data.dtype', 'vc_util.DtypeForDavinci.ALL_FLOAT'], {}), '(data.dtype, vc_util.DtypeForDavinci.ALL_FLOAT)\n', (4033, 4080), True, 'from akg.utils import validation_check as vc_util\n'), ((4112, 4135), 'akg.utils.format_transform.get_shape', 'ft_util.get_shape', (['data'], {}), '(data)\n', (4129, 4135), True, 'from akg.utils import format_transform as ft_util\n'), ((4140, 4178), 'akg.utils.validation_check.reduce_axis_check', 'vc_util.reduce_axis_check', (['shape', 'axis'], {}), '(shape, axis)\n', (4165, 4178), True, 'from akg.utils import validation_check as vc_util\n'), ((4190, 4228), 'akg.utils.format_transform.refine_reduce_axis', 'ft_util.refine_reduce_axis', (['data', 'axis'], {}), '(data, axis)\n', (4216, 4228), True, 'from akg.utils import format_transform as ft_util\n'), ((4305, 4340), 'akg.ops.math.sum.sum_value', 'sum.sum_value', (['data', 'axis', 'keepdims'], {}), '(data, axis, keepdims)\n', (4318, 4340), False, 'from akg.ops.math import sum\n'), ((4349, 4371), 'akg.utils.dynamic_shape.shape_is_dynamic', 'shape_is_dynamic', (['data'], {}), '(data)\n', (4365, 4371), False, 'from akg.utils.dynamic_shape import shape_is_dynamic\n'), ((4575, 4597), 'akg.utils.dynamic_shape.shape_is_dynamic', 'shape_is_dynamic', (['data'], {}), '(data)\n', (4591, 4597), False, 'from akg.utils.dynamic_shape import shape_is_dynamic\n'), ((4907, 4977), 'akg.utils.validation_check.ops_dtype_check', 'vc_util.ops_dtype_check', (['data.dtype', 'vc_util.DtypeForDavinci.ALL_FLOAT'], {}), '(data.dtype, vc_util.DtypeForDavinci.ALL_FLOAT)\n', (4930, 4977), True, 'from akg.utils import validation_check as vc_util\n'), ((5043, 5081), 'akg.utils.validation_check.reduce_axis_check', 'vc_util.reduce_axis_check', (['shape', 'axis'], {}), '(shape, axis)\n', (5068, 5081), True, 'from akg.utils import validation_check as vc_util\n'), ((5093, 5131), 'akg.utils.format_transform.refine_reduce_axis', 'ft_util.refine_reduce_axis', (['data', 'axis'], {}), '(data, axis)\n', (5119, 5131), True, 'from akg.utils import format_transform as ft_util\n'), ((5258, 5290), 'akg.ops.math.sum.sum_v2', 'sum.sum_v2', (['data', 'axis', 'keepdims'], {}), '(data, axis, keepdims)\n', (5268, 5290), False, 'from akg.ops.math import sum\n'), ((5377, 5399), 'akg.utils.dynamic_shape.shape_is_dynamic', 'shape_is_dynamic', (['data'], {}), '(data)\n', (5393, 5399), False, 'from akg.utils.dynamic_shape import shape_is_dynamic\n'), ((1484, 1637), 'akg.utils.custom_tiling.create_constraint_on_tensor', 'ct_util.create_constraint_on_tensor', ([], {'tensor': 'tensor', 'values': '"""FULL"""', 'constraints': 'ct_util.TileConstraint.MAX', 'tensor_pos': 'resnet_inner_most_axis_pos'}), "(tensor=tensor, values='FULL',\n constraints=ct_util.TileConstraint.MAX, tensor_pos=\n resnet_inner_most_axis_pos)\n", (1519, 1637), True, 'from akg.utils import custom_tiling as ct_util\n'), ((1842, 1984), 'akg.utils.custom_tiling.create_constraint_on_tensor', 'ct_util.create_constraint_on_tensor', ([], {'tensor': 'tensor', 'values': '[(1) for _ in axis]', 'constraints': 'ct_util.TileConstraint.FACTOR', 'tensor_pos': 'axis'}), '(tensor=tensor, values=[(1) for _ in\n axis], constraints=ct_util.TileConstraint.FACTOR, tensor_pos=axis)\n', (1877, 1984), True, 'from akg.utils import custom_tiling as ct_util\n'), ((2560, 2707), 'akg.utils.custom_tiling.create_constraint_on_tensor', 'ct_util.create_constraint_on_tensor', ([], {'tensor': 'tensor', 'values': '(1)', 'constraints': 'ct_util.TileConstraint.FACTOR', 'tensor_pos': 'resnet_outer_most_axis_pos'}), '(tensor=tensor, values=1, constraints=\n ct_util.TileConstraint.FACTOR, tensor_pos=resnet_outer_most_axis_pos)\n', (2595, 2707), True, 'from akg.utils import custom_tiling as ct_util\n'), ((2199, 2349), 'akg.utils.custom_tiling.create_constraint_on_tensor', 'ct_util.create_constraint_on_tensor', ([], {'tensor': 'tensor', 'values': '[(1) for _ in axis]', 'constraints': 'ct_util.TileConstraint.FORBID_ISOLATE', 'tensor_pos': 'axis'}), '(tensor=tensor, values=[(1) for _ in\n axis], constraints=ct_util.TileConstraint.FORBID_ISOLATE, tensor_pos=axis)\n', (2234, 2349), True, 'from akg.utils import custom_tiling as ct_util\n')] |
import pickle
import sys
import gensim
from gensim.test.utils import common_texts, get_tmpfile
from gensim.models import Word2Vec, KeyedVectors
embeddings_name = "input100_connection2_dim200_windowsize70.emb"
id_to_check = "27236"
with open('id_to_name.pkl', 'rb') as f:
pkl = pickle.load(f)
vectors = KeyedVectors.load_word2vec_format(embeddings_name, binary=False)
node_to_check = pkl[id_to_check]
similar = wv_from_text.most_similar(positive=id_to_check)
print("similar to: ", node_to_check["verb"], node_to_check["subject"], node_to_check["object"])
print("--------------")
for index, score in similar:
node = pkl[index]
print(node["verb"], node["subject"], node["object"], score) | [
"gensim.models.KeyedVectors.load_word2vec_format",
"pickle.load"
] | [((309, 373), 'gensim.models.KeyedVectors.load_word2vec_format', 'KeyedVectors.load_word2vec_format', (['embeddings_name'], {'binary': '(False)'}), '(embeddings_name, binary=False)\n', (342, 373), False, 'from gensim.models import Word2Vec, KeyedVectors\n'), ((283, 297), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (294, 297), False, 'import pickle\n')] |
from django.template import loader
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
@login_required(login_url='/accounts/login')
def home(request):
print()
return render(request, 'service/index.html', {})
| [
"django.shortcuts.render",
"django.contrib.auth.decorators.login_required"
] | [((143, 186), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login"""'}), "(login_url='/accounts/login')\n", (157, 186), False, 'from django.contrib.auth.decorators import login_required\n'), ((232, 273), 'django.shortcuts.render', 'render', (['request', '"""service/index.html"""', '{}'], {}), "(request, 'service/index.html', {})\n", (238, 273), False, 'from django.shortcuts import render\n')] |
import pandas as pd
import json
from datetime import date, timedelta, datetime
import numpy as np
from project.items import ProjectItem
import scrapy
class Project(scrapy.Spider):
name = 'Project'
start_urls = ['Your_URL&pagenums=1']
def parse(self, response):
data = json.loads(response.text)
stop = data['pages'] # get total page numbers
for i in range(1, stop+1):
q_url = f'Your_URL&pagenums={i}'
yield scrapy.Request(url=q_url, callback=self.parse_data_process)
def parse_data_process(self,response):
item = ProjectItem()
item['date'] = date.today().strftime('%Y-%m-%d')
item['log'] = response.text
item['createdtime'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
return item | [
"json.loads",
"project.items.ProjectItem",
"datetime.datetime.now",
"scrapy.Request",
"datetime.date.today"
] | [((290, 315), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (300, 315), False, 'import json\n'), ((587, 600), 'project.items.ProjectItem', 'ProjectItem', ([], {}), '()\n', (598, 600), False, 'from project.items import ProjectItem\n'), ((468, 527), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'q_url', 'callback': 'self.parse_data_process'}), '(url=q_url, callback=self.parse_data_process)\n', (482, 527), False, 'import scrapy\n'), ((624, 636), 'datetime.date.today', 'date.today', ([], {}), '()\n', (634, 636), False, 'from datetime import date, timedelta, datetime\n'), ((724, 738), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (736, 738), False, 'from datetime import date, timedelta, datetime\n')] |
import pandas as pd
from matplotlib import pyplot as plt
from glob import glob
def vplo_dmp2df(filename):
# df = pd.read_csv(filename, skiprows=1, delimiter = "\s+", names= ['s', 'x', 'y', 'Ue/Vinf', 'Dstar', 'Theta', 'Cf', 'H'])
df = pd.read_csv(filename, skiprows=1, delimiter = "\s+", header=None)
df = df[df.columns[:8]]
df.columns = ['s', 'x', 'y', 'Ue/Vinf', 'Dstar', 'Theta', 'Cf', 'H']
sufrace_df = df[df['x'] <=1.0] # removes the wake
sufrace_df['Ue/Vinf'] = abs(sufrace_df['Ue/Vinf'])
# upper_surface_df = sufrace_df[sufrace_df['y'] >=0.0]
# lower_surface_df = sufrace_df[sufrace_df['y'] <=0.0]
# Previously upper and lower surfaces were differentiated by y value.
# for cambered foils where the bottom can go back to positive this wont work
# so LE is figured from min(x)
le_index = sufrace_df[['x']].idxmin()[0]
upper_surface_df = sufrace_df.loc[:le_index]
lower_surface_df = sufrace_df.loc[le_index:]
# import pdb; pdb.set_trace()
upper_surface_df.label = filename[:-4]
lower_surface_df.label = filename[:-4]
return upper_surface_df, lower_surface_df
def gen_vplo_figure(surface):
fig = plt.figure(figsize=(6,24), tight_layout = True)
fig.canvas.set_window_title('XFoil_BL_Data : {}'.format(surface))
ax1 = fig.add_subplot(511)
ax2 = fig.add_subplot(512, sharex=ax1)
ax3 = fig.add_subplot(513, sharex=ax1)
ax4 = fig.add_subplot(514, sharex=ax1)
ax5 = fig.add_subplot(515, sharex=ax1)
for ax in fig.get_axes():
ax.set_xlim(0,1)
ax.grid()
ax1.set_ylabel("Ue/Vinf")
ax2.set_ylabel("Dstar")
ax3.set_ylabel("Theta")
ax4.set_ylabel("Cf")
ax5.set_ylabel("H")
return fig
def proc_plot_vplo(fig, df):
ax1, ax2, ax3, ax4, ax5 = fig.get_axes()
ax1.plot(df['x'], df["Ue/Vinf"] ,linewidth=1 , label=df.label)
ax2.plot(df['x'], df["Dstar"] ,linewidth=1)
ax3.plot(df['x'], df["Theta"] ,linewidth=1)
ax4.plot(df['x'], df["Cf"] ,linewidth=1)
ax5.plot(df['x'], df["H"] ,linewidth=1)
ax1.legend(loc = 'lower left', frameon=False, fontsize='small')
plt.show(block=False)
if __name__ == '__main__':
plt.ion()
upper_fig = gen_vplo_figure(surface = 'Upper_Surface')
lower_fig = gen_vplo_figure(surface = 'Lower_Surface')
file_list = glob('*.dmp')
for idx, filename in enumerate(file_list):
upper_surface_df, lower_surface_df = vplo_dmp2df(filename)
with pd.ExcelWriter('extracted_polar.xlsx') as writer:
upper_surface_df.to_excel(writer, sheet_name='{}_Upper_Surface'.format(idx), index=False)
lower_surface_df.to_excel(writer, sheet_name='{}_Lpper_Surface'.format(idx), index=False)
proc_plot_vplo(upper_fig, upper_surface_df)
proc_plot_vplo(lower_fig, lower_surface_df)
| [
"pandas.read_csv",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.ion",
"pandas.ExcelWriter",
"glob.glob",
"matplotlib.pyplot.show"
] | [((246, 310), 'pandas.read_csv', 'pd.read_csv', (['filename'], {'skiprows': '(1)', 'delimiter': '"""\\\\s+"""', 'header': 'None'}), "(filename, skiprows=1, delimiter='\\\\s+', header=None)\n", (257, 310), True, 'import pandas as pd\n'), ((1154, 1200), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 24)', 'tight_layout': '(True)'}), '(figsize=(6, 24), tight_layout=True)\n', (1164, 1200), True, 'from matplotlib import pyplot as plt\n'), ((2066, 2087), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (2074, 2087), True, 'from matplotlib import pyplot as plt\n'), ((2120, 2129), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (2127, 2129), True, 'from matplotlib import pyplot as plt\n'), ((2260, 2273), 'glob.glob', 'glob', (['"""*.dmp"""'], {}), "('*.dmp')\n", (2264, 2273), False, 'from glob import glob\n'), ((2391, 2429), 'pandas.ExcelWriter', 'pd.ExcelWriter', (['"""extracted_polar.xlsx"""'], {}), "('extracted_polar.xlsx')\n", (2405, 2429), True, 'import pandas as pd\n')] |
import numpy as np
import pandas as pd
import seaborn as sns
import networkx as nx
import matplotlib.pyplot as plt
import matplotlib
import time
import ot
from scipy import linalg
from scipy import sparse
import gromovWassersteinAveraging as gwa
import spectralGW as sgw
from geodesicVisualization import *
from GromovWassersteinFramework import *
import json
from sklearn import manifold
from sklearn.model_selection import train_test_split
# Load the S-GWL code
import DataIO as DataIO
import EvaluationMeasure as Eval
import GromovWassersteinGraphToolkit as GwGt
import pickle
import warnings
from graphProcessing import load_graph
# Load modules for network partitioning experiments
import community
from networkx.algorithms.community import greedy_modularity_communities
from networkx.algorithms.community.asyn_fluid import asyn_fluidc
from networkx.algorithms.community.quality import modularity
from sklearn import metrics
from infomap import Infomap
warnings.filterwarnings("ignore")
def get_sbm(ns,ps):
# convert ps from 1d to 2d array
n = len(ns)
if n*(n+1)/2 != len(ps):
print('Error: check size of ps')
return None
else:
R,C = np.triu_indices(n)
pm = np.zeros((n,n))
pm[R,C] = ps
pm[C,R] = ps
G = nx.stochastic_block_model(ns, pm)
gt = []
for i in range(len(ns)):
for j in range(ns[i]):
gt.append(i)
return G,gt,pm
def get_gw_ami(G,t,gt):
# G -- graph
# t -- heat kernel scale parameter
# gt -- ground truth
distribution_exponent_hk = 0.001
distribution_offset_hk = 0
C1 = sgw.undirected_normalized_heat_kernel(G,t)
p1 = sgw.node_distribution(G,distribution_offset_hk,distribution_exponent_hk)
p2 = np.ravel(GwGt.estimate_target_distribution({0: p1.reshape(-1,1)}, dim_t=len(np.unique(gt))))
# Note that we are inserting prior information about the number of clusters
C2 = np.diag(p2)
coup, log = ot.gromov.gromov_wasserstein(C1, C2, p1, p2, loss_fun = 'square_loss', log = True)
est_idx = np.argmax(coup, axis=1)
ami = metrics.adjusted_mutual_info_score(est_idx,gt,average_method='max')
comms = [set() for v in np.unique(est_idx)]
for idx,val in enumerate(est_idx):
comms[val].add(idx)
mod = modularity(G,comms)
return ami,mod
## Construct sequence of SBMs that are increasingly indistinguishable
ns = [75,75]
ps = [0.5,0.35,0.5]
ts = np.linspace(0,20,20)
amis = []
mods = []
pvals = []
tvals = []
iterate = []
As = []
p_range = np.arange(0.15,0.35,0.02)
# Calculate modularity and AMI
for iteration in range(10):
for p in p_range:
p_copy = ps.copy()
p_copy[1] = p
G,gt,pm = get_sbm(ns,p_copy)
A = nx.adjacency_matrix(G).toarray()
if iteration==0:
As.append(A)
for t in ts:
ami, mod = get_gw_ami(G,t,gt)
amis.append(ami)
mods.append(mod)
tvals.append(t)
pvals.append(p)
iterate.append(iteration)
sbm_df = pd.DataFrame()
sbm_df['t'] = tvals
sbm_df['off-diag-p'] = pvals
sbm_df['AMI'] = amis
sbm_df['Modularity'] = mods
sbm_df['iteration'] = iterate
fig,axs = plt.subplots(2,5)
axs = axs.flatten()
for i in range(len(As)):
ax = axs[i]
ax.imshow(As[i])
fig.suptitle('SBMs with increasing cross-block edge densities')
fig.tight_layout()
fig.savefig('res_sbmAmiModularity_blocks.png',bbox_inches='tight',dpi=150)
melted = pd.melt(sbm_df,['off-diag-p','t','iteration'])
f = sns.FacetGrid(melted,col = 'off-diag-p',col_wrap=5,margin_titles=True)
fg = plt.gcf()
fg.dpi = 50
f.map_dataframe(sns.lineplot, x='t', y='value',hue='variable')
f.set_axis_labels("t", "value")
f.add_legend()
cn = [round(v,2) for v in f.col_names]
fg = plt.gcf()
fg.suptitle('AMI and Modularity peaks across scales')
axes = f.axes.flatten()
for i,val in enumerate(cn):
axes[i].set_title("cross-block edge density = %2.2f" % cn[i])
fg.savefig('res_sbmAmiModularity.png',bbox_inches='tight',dpi=300)
plt.show() | [
"numpy.arange",
"numpy.linspace",
"pandas.DataFrame",
"pandas.melt",
"networkx.algorithms.community.quality.modularity",
"numpy.triu_indices",
"networkx.adjacency_matrix",
"matplotlib.pyplot.gcf",
"numpy.argmax",
"ot.gromov.gromov_wasserstein",
"networkx.stochastic_block_model",
"warnings.filterwarnings",
"matplotlib.pyplot.show",
"seaborn.FacetGrid",
"spectralGW.undirected_normalized_heat_kernel",
"numpy.unique",
"sklearn.metrics.adjusted_mutual_info_score",
"spectralGW.node_distribution",
"numpy.diag",
"numpy.zeros",
"matplotlib.pyplot.subplots"
] | [((962, 995), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (985, 995), False, 'import warnings\n'), ((2485, 2507), 'numpy.linspace', 'np.linspace', (['(0)', '(20)', '(20)'], {}), '(0, 20, 20)\n', (2496, 2507), True, 'import numpy as np\n'), ((2582, 2609), 'numpy.arange', 'np.arange', (['(0.15)', '(0.35)', '(0.02)'], {}), '(0.15, 0.35, 0.02)\n', (2591, 2609), True, 'import numpy as np\n'), ((3109, 3123), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (3121, 3123), True, 'import pandas as pd\n'), ((3263, 3281), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(5)'], {}), '(2, 5)\n', (3275, 3281), True, 'import matplotlib.pyplot as plt\n'), ((3538, 3587), 'pandas.melt', 'pd.melt', (['sbm_df', "['off-diag-p', 't', 'iteration']"], {}), "(sbm_df, ['off-diag-p', 't', 'iteration'])\n", (3545, 3587), True, 'import pandas as pd\n'), ((3590, 3661), 'seaborn.FacetGrid', 'sns.FacetGrid', (['melted'], {'col': '"""off-diag-p"""', 'col_wrap': '(5)', 'margin_titles': '(True)'}), "(melted, col='off-diag-p', col_wrap=5, margin_titles=True)\n", (3603, 3661), True, 'import seaborn as sns\n'), ((3666, 3675), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (3673, 3675), True, 'import matplotlib.pyplot as plt\n'), ((3844, 3853), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (3851, 3853), True, 'import matplotlib.pyplot as plt\n'), ((4105, 4115), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4113, 4115), True, 'import matplotlib.pyplot as plt\n'), ((1292, 1325), 'networkx.stochastic_block_model', 'nx.stochastic_block_model', (['ns', 'pm'], {}), '(ns, pm)\n', (1317, 1325), True, 'import networkx as nx\n'), ((1644, 1687), 'spectralGW.undirected_normalized_heat_kernel', 'sgw.undirected_normalized_heat_kernel', (['G', 't'], {}), '(G, t)\n', (1681, 1687), True, 'import spectralGW as sgw\n'), ((1696, 1770), 'spectralGW.node_distribution', 'sgw.node_distribution', (['G', 'distribution_offset_hk', 'distribution_exponent_hk'], {}), '(G, distribution_offset_hk, distribution_exponent_hk)\n', (1717, 1770), True, 'import spectralGW as sgw\n'), ((1965, 1976), 'numpy.diag', 'np.diag', (['p2'], {}), '(p2)\n', (1972, 1976), True, 'import numpy as np\n'), ((1993, 2071), 'ot.gromov.gromov_wasserstein', 'ot.gromov.gromov_wasserstein', (['C1', 'C2', 'p1', 'p2'], {'loss_fun': '"""square_loss"""', 'log': '(True)'}), "(C1, C2, p1, p2, loss_fun='square_loss', log=True)\n", (2021, 2071), False, 'import ot\n'), ((2090, 2113), 'numpy.argmax', 'np.argmax', (['coup'], {'axis': '(1)'}), '(coup, axis=1)\n', (2099, 2113), True, 'import numpy as np\n'), ((2129, 2198), 'sklearn.metrics.adjusted_mutual_info_score', 'metrics.adjusted_mutual_info_score', (['est_idx', 'gt'], {'average_method': '"""max"""'}), "(est_idx, gt, average_method='max')\n", (2163, 2198), False, 'from sklearn import metrics\n'), ((2331, 2351), 'networkx.algorithms.community.quality.modularity', 'modularity', (['G', 'comms'], {}), '(G, comms)\n', (2341, 2351), False, 'from networkx.algorithms.community.quality import modularity\n'), ((1184, 1202), 'numpy.triu_indices', 'np.triu_indices', (['n'], {}), '(n)\n', (1199, 1202), True, 'import numpy as np\n'), ((1217, 1233), 'numpy.zeros', 'np.zeros', (['(n, n)'], {}), '((n, n))\n', (1225, 1233), True, 'import numpy as np\n'), ((2225, 2243), 'numpy.unique', 'np.unique', (['est_idx'], {}), '(est_idx)\n', (2234, 2243), True, 'import numpy as np\n'), ((2795, 2817), 'networkx.adjacency_matrix', 'nx.adjacency_matrix', (['G'], {}), '(G)\n', (2814, 2817), True, 'import networkx as nx\n'), ((1854, 1867), 'numpy.unique', 'np.unique', (['gt'], {}), '(gt)\n', (1863, 1867), True, 'import numpy as np\n')] |
import boto3
import json
def ExtractEntitiesFromArticle(event, context):
retVal= {}
retVal["data"] = []
# Data is sent to Lambda via a HTTPS POST call. We want to get to the payload send by Snowflake
event_body = event["body"]
payload = json.loads(event_body)
for row in payload["data"]:
sflkRowRef = row[0] # This is how Snowflake keeps track of data as it gets returned
inputText = row[1]
client = boto3.client('comprehend')
comprehendResponse = client.detect_entities(
Text=inputText,
LanguageCode='en'
)
retVal["data"].append([sflkRowRef,comprehendResponse])
return retVal | [
"json.loads",
"boto3.client"
] | [((259, 281), 'json.loads', 'json.loads', (['event_body'], {}), '(event_body)\n', (269, 281), False, 'import json\n'), ((456, 482), 'boto3.client', 'boto3.client', (['"""comprehend"""'], {}), "('comprehend')\n", (468, 482), False, 'import boto3\n')] |
from django.contrib import admin
from .models import Domicilio, Localidad, Persona, Profesional, Titulo
admin.site.register(Domicilio)
admin.site.register(Localidad)
admin.site.register(Persona)
admin.site.register(Profesional)
admin.site.register(Titulo)
| [
"django.contrib.admin.site.register"
] | [((106, 136), 'django.contrib.admin.site.register', 'admin.site.register', (['Domicilio'], {}), '(Domicilio)\n', (125, 136), False, 'from django.contrib import admin\n'), ((137, 167), 'django.contrib.admin.site.register', 'admin.site.register', (['Localidad'], {}), '(Localidad)\n', (156, 167), False, 'from django.contrib import admin\n'), ((168, 196), 'django.contrib.admin.site.register', 'admin.site.register', (['Persona'], {}), '(Persona)\n', (187, 196), False, 'from django.contrib import admin\n'), ((197, 229), 'django.contrib.admin.site.register', 'admin.site.register', (['Profesional'], {}), '(Profesional)\n', (216, 229), False, 'from django.contrib import admin\n'), ((230, 257), 'django.contrib.admin.site.register', 'admin.site.register', (['Titulo'], {}), '(Titulo)\n', (249, 257), False, 'from django.contrib import admin\n')] |
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
# coding: utf-8
"""
Lifts electrical transmission network to a single 380 kV voltage layer,
removes dead-ends of the network,
and reduces multi-hop HVDC connections to a single link.
Relevant Settings
-----------------
.. code:: yaml
costs:
USD2013_to_EUR2013:
discountrate:
marginal_cost:
capital_cost:
electricity:
max_hours:
renewables: (keys)
{technology}:
potential:
lines:
length_factor:
links:
p_max_pu:
solving:
solver:
name:
.. seealso::
Documentation of the configuration file ``config.yaml`` at
:ref:`costs_cf`, :ref:`electricity_cf`, :ref:`renewable_cf`,
:ref:`lines_cf`, :ref:`links_cf`, :ref:`solving_cf`
Inputs
------
- ``data/costs.csv``: The database of cost assumptions for all included technologies for specific years from various sources; e.g. discount rate, lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide intensity.
- ``resources/regions_onshore.geojson``: confer :ref:`busregions`
- ``resources/regions_offshore.geojson``: confer :ref:`busregions`
- ``networks/elec.nc``: confer :ref:`electricity`
Outputs
-------
- ``resources/regions_onshore_elec_s{simpl}.geojson``:
.. image:: ../img/regions_onshore_elec_s.png
:scale: 33 %
- ``resources/regions_offshore_elec_s{simpl}.geojson``:
.. image:: ../img/regions_offshore_elec_s .png
:scale: 33 %
- ``resources/busmap_elec_s{simpl}.csv``: Mapping of buses from ``networks/elec.nc`` to ``networks/elec_s{simpl}.nc``;
- ``networks/elec_s{simpl}.nc``:
.. image:: ../img/elec_s.png
:scale: 33 %
Description
-----------
The rule :mod:`simplify_network` does up to four things:
1. Create an equivalent transmission network in which all voltage levels are mapped to the 380 kV level by the function ``simplify_network(...)``.
2. DC only sub-networks that are connected at only two buses to the AC network are reduced to a single representative link in the function ``simplify_links(...)``. The components attached to buses in between are moved to the nearest endpoint. The grid connection cost of offshore wind generators are added to the captial costs of the generator.
3. Stub lines and links, i.e. dead-ends of the network, are sequentially removed from the network in the function ``remove_stubs(...)``. Components are moved along.
4. Optionally, if an integer were provided for the wildcard ``{simpl}`` (e.g. ``networks/elec_s500.nc``), the network is clustered to this number of clusters with the routines from the ``cluster_network`` rule with the function ``cluster_network.cluster(...)``. This step is usually skipped!
"""
import logging
from _helpers import configure_logging, update_p_nom_max
from cluster_network import clustering_for_n_clusters, cluster_regions
from add_electricity import load_costs
import pandas as pd
import numpy as np
import scipy as sp
from scipy.sparse.csgraph import connected_components, dijkstra
from functools import reduce
import pypsa
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport, get_clustering_from_busmap, _make_consense
logger = logging.getLogger(__name__)
def simplify_network_to_380(n):
## All goes to v_nom == 380
logger.info("Mapping all network lines onto a single 380kV layer")
n.buses['v_nom'] = 380.
linetype_380, = n.lines.loc[n.lines.v_nom == 380., 'type'].unique()
lines_v_nom_b = n.lines.v_nom != 380.
n.lines.loc[lines_v_nom_b, 'num_parallel'] *= (n.lines.loc[lines_v_nom_b, 'v_nom'] / 380.)**2
n.lines.loc[lines_v_nom_b, 'v_nom'] = 380.
n.lines.loc[lines_v_nom_b, 'type'] = linetype_380
n.lines.loc[lines_v_nom_b, 's_nom'] = (
np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) *
n.lines.bus0.map(n.buses.v_nom) * n.lines.num_parallel
)
# Replace transformers by lines
trafo_map = pd.Series(n.transformers.bus1.values, index=n.transformers.bus0.values)
trafo_map = trafo_map[~trafo_map.index.duplicated(keep='first')]
several_trafo_b = trafo_map.isin(trafo_map.index)
trafo_map.loc[several_trafo_b] = trafo_map.loc[several_trafo_b].map(trafo_map)
missing_buses_i = n.buses.index.difference(trafo_map.index)
trafo_map = trafo_map.append(pd.Series(missing_buses_i, missing_buses_i))
for c in n.one_port_components|n.branch_components:
df = n.df(c)
for col in df.columns:
if col.startswith('bus'):
df[col] = df[col].map(trafo_map)
n.mremove("Transformer", n.transformers.index)
n.mremove("Bus", n.buses.index.difference(trafo_map))
return n, trafo_map
def _prepare_connection_costs_per_link(n):
if n.links.empty: return {}
Nyears = n.snapshot_weightings.objective.sum() / 8760
costs = load_costs(Nyears, snakemake.input.tech_costs,
snakemake.config['costs'], snakemake.config['electricity'])
connection_costs_per_link = {}
for tech in snakemake.config['renewable']:
if tech.startswith('offwind'):
connection_costs_per_link[tech] = (
n.links.length * snakemake.config['lines']['length_factor'] *
(n.links.underwater_fraction * costs.at[tech + '-connection-submarine', 'capital_cost'] +
(1. - n.links.underwater_fraction) * costs.at[tech + '-connection-underground', 'capital_cost'])
)
return connection_costs_per_link
def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None, buses=None):
if connection_costs_per_link is None:
connection_costs_per_link = _prepare_connection_costs_per_link(n)
if buses is None:
buses = busmap.index[busmap.index != busmap.values]
connection_costs_to_bus = pd.DataFrame(index=buses)
for tech in connection_costs_per_link:
adj = n.adjacency_matrix(weights=pd.concat(dict(Link=connection_costs_per_link[tech].reindex(n.links.index),
Line=pd.Series(0., n.lines.index))))
costs_between_buses = dijkstra(adj, directed=False, indices=n.buses.index.get_indexer(buses))
connection_costs_to_bus[tech] = costs_between_buses[np.arange(len(buses)),
n.buses.index.get_indexer(busmap.loc[buses])]
return connection_costs_to_bus
def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus):
connection_costs = {}
for tech in connection_costs_to_bus:
tech_b = n.generators.carrier == tech
costs = n.generators.loc[tech_b, "bus"].map(connection_costs_to_bus[tech]).loc[lambda s: s>0]
if not costs.empty:
n.generators.loc[costs.index, "capital_cost"] += costs
logger.info("Displacing {} generator(s) and adding connection costs to capital_costs: {} "
.format(tech, ", ".join("{:.0f} Eur/MW/a for `{}`".format(d, b) for b, d in costs.iteritems())))
connection_costs[tech] = costs
pd.DataFrame(connection_costs).to_csv(snakemake.output.connection_costs)
def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate_one_ports={"Load", "StorageUnit"}):
def replace_components(n, c, df, pnl):
n.mremove(c, n.df(c).index)
import_components_from_dataframe(n, df, c)
for attr, df in pnl.items():
if not df.empty:
import_series_from_dataframe(n, df, c, attr)
_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus)
generators, generators_pnl = aggregategenerators(n, busmap, custom_strategies={'p_nom_min': np.sum})
replace_components(n, "Generator", generators, generators_pnl)
for one_port in aggregate_one_ports:
df, pnl = aggregateoneport(n, busmap, component=one_port)
replace_components(n, one_port, df, pnl)
buses_to_del = n.buses.index.difference(busmap)
n.mremove("Bus", buses_to_del)
for c in n.branch_components:
df = n.df(c)
n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)])
def simplify_links(n):
## Complex multi-node links are folded into end-points
logger.info("Simplifying connected link components")
if n.links.empty:
return n, n.buses.index.to_series()
# Determine connected link components, ignore all links but DC
adjacency_matrix = n.adjacency_matrix(branch_components=['Link'],
weights=dict(Link=(n.links.carrier == 'DC').astype(float)))
_, labels = connected_components(adjacency_matrix, directed=False)
labels = pd.Series(labels, n.buses.index)
G = n.graph()
def split_links(nodes):
nodes = frozenset(nodes)
seen = set()
supernodes = {m for m in nodes
if len(G.adj[m]) > 2 or (set(G.adj[m]) - nodes)}
for u in supernodes:
for m, ls in G.adj[u].items():
if m not in nodes or m in seen: continue
buses = [u, m]
links = [list(ls)] #[name for name in ls]]
while m not in (supernodes | seen):
seen.add(m)
for m2, ls in G.adj[m].items():
if m2 in seen or m2 == u: continue
buses.append(m2)
links.append(list(ls)) # [name for name in ls])
break
else:
# stub
break
m = m2
if m != u:
yield pd.Index((u, m)), buses, links
seen.add(u)
busmap = n.buses.index.to_series()
connection_costs_per_link = _prepare_connection_costs_per_link(n)
connection_costs_to_bus = pd.DataFrame(0., index=n.buses.index, columns=list(connection_costs_per_link))
for lbl in labels.value_counts().loc[lambda s: s > 2].index:
for b, buses, links in split_links(labels.index[labels == lbl]):
if len(buses) <= 2: continue
logger.debug('nodes = {}'.format(labels.index[labels == lbl]))
logger.debug('b = {}\nbuses = {}\nlinks = {}'.format(b, buses, links))
m = sp.spatial.distance_matrix(n.buses.loc[b, ['x', 'y']],
n.buses.loc[buses[1:-1], ['x', 'y']])
busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]]
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link, buses)
all_links = [i for _, i in sum(links, [])]
p_max_pu = snakemake.config['links'].get('p_max_pu', 1.)
lengths = n.links.loc[all_links, 'length']
name = lengths.idxmax() + '+{}'.format(len(links) - 1)
params = dict(
carrier='DC',
bus0=b[0], bus1=b[1],
length=sum(n.links.loc[[i for _, i in l], 'length'].mean() for l in links),
p_nom=min(n.links.loc[[i for _, i in l], 'p_nom'].sum() for l in links),
underwater_fraction=sum(lengths/lengths.sum() * n.links.loc[all_links, 'underwater_fraction']),
p_max_pu=p_max_pu,
p_min_pu=-p_max_pu,
underground=False,
under_construction=False
)
logger.info("Joining the links {} connecting the buses {} to simple link {}".format(", ".join(all_links), ", ".join(buses), name))
n.mremove("Link", all_links)
static_attrs = n.components["Link"]["attrs"].loc[lambda df: df.static]
for attr, default in static_attrs.default.iteritems(): params.setdefault(attr, default)
n.links.loc[name] = pd.Series(params)
# n.add("Link", **params)
logger.debug("Collecting all components using the busmap")
_aggregate_and_move_components(n, busmap, connection_costs_to_bus)
return n, busmap
def remove_stubs(n):
logger.info("Removing stubs")
busmap = busmap_by_stubs(n) # ['country'])
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus)
return n, busmap
def aggregate_to_substations(n, buses_i=None):
# can be used to aggregate a selection of buses to electrically closest neighbors
# if no buses are given, nodes that are no substations or without offshore connection are aggregated
if buses_i is None:
logger.info("Aggregating buses that are no substations or have no valid offshore connection")
buses_i = list(set(n.buses.index)-set(n.generators.bus)-set(n.loads.bus))
weight = pd.concat({'Line': n.lines.length/n.lines.s_nom.clip(1e-3),
'Link': n.links.length/n.links.p_nom.clip(1e-3)})
adj = n.adjacency_matrix(branch_components=['Line', 'Link'], weights=weight)
bus_indexer = n.buses.index.get_indexer(buses_i)
dist = pd.DataFrame(dijkstra(adj, directed=False, indices=bus_indexer), buses_i, n.buses.index)
dist[buses_i] = np.inf # bus in buses_i should not be assigned to different bus in buses_i
for c in n.buses.country.unique():
incountry_b = n.buses.country == c
dist.loc[incountry_b, ~incountry_b] = np.inf
busmap = n.buses.index.to_series()
busmap.loc[buses_i] = dist.idxmin(1)
clustering = get_clustering_from_busmap(n, busmap,
bus_strategies=dict(country=_make_consense("Bus", "country")),
aggregate_generators_weighted=True,
aggregate_generators_carriers=None,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=1.0,
generator_strategies={'p_nom_max': 'sum'},
scale_link_capital_costs=False)
return clustering.network, busmap
def cluster(n, n_clusters):
logger.info(f"Clustering to {n_clusters} buses")
focus_weights = snakemake.config.get('focus_weights', None)
renewable_carriers = pd.Index([tech
for tech in n.generators.carrier.unique()
if tech.split('-', 2)[0] in snakemake.config['renewable']])
def consense(x):
v = x.iat[0]
assert ((x == v).all() or x.isnull().all()), (
"The `potential` configuration option must agree for all renewable carriers, for now!"
)
return v
potential_mode = (consense(pd.Series([snakemake.config['renewable'][tech]['potential']
for tech in renewable_carriers]))
if len(renewable_carriers) > 0 else 'conservative')
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode,
solver_name=snakemake.config['solving']['solver']['name'],
focus_weights=focus_weights)
return clustering.network, clustering.busmap
if __name__ == "__main__":
if 'snakemake' not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake('simplify_network', simpl='', network='elec')
configure_logging(snakemake)
n = pypsa.Network(snakemake.input.network)
n, trafo_map = simplify_network_to_380(n)
n, simplify_links_map = simplify_links(n)
n, stub_map = remove_stubs(n)
busmaps = [trafo_map, simplify_links_map, stub_map]
if snakemake.config.get('clustering', {}).get('simplify', {}).get('to_substations', False):
n, substation_map = aggregate_to_substations(n)
busmaps.append(substation_map)
if snakemake.wildcards.simpl:
n, cluster_map = cluster(n, int(snakemake.wildcards.simpl))
busmaps.append(cluster_map)
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed
# and are lost when clustering (for example with the simpl wildcard), we remove them for consistency:
buses_c = {'symbol', 'tags', 'under_construction', 'substation_lv', 'substation_off'}.intersection(n.buses.columns)
n.buses = n.buses.drop(buses_c, axis=1)
update_p_nom_max(n)
n.export_to_netcdf(snakemake.output.network)
busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
busmap_s.to_csv(snakemake.output.busmap)
cluster_regions(busmaps, snakemake.input, snakemake.output)
| [
"logging.getLogger",
"numpy.sqrt",
"scipy.sparse.csgraph.dijkstra",
"pandas.Index",
"add_electricity.load_costs",
"pypsa.networkclustering.aggregateoneport",
"pypsa.networkclustering._make_consense",
"pypsa.io.import_components_from_dataframe",
"cluster_network.cluster_regions",
"pandas.DataFrame",
"scipy.spatial.distance_matrix",
"pypsa.networkclustering.aggregategenerators",
"pandas.Series",
"cluster_network.clustering_for_n_clusters",
"scipy.sparse.csgraph.connected_components",
"pypsa.Network",
"_helpers.update_p_nom_max",
"pypsa.networkclustering.busmap_by_stubs",
"pypsa.io.import_series_from_dataframe",
"_helpers.mock_snakemake",
"_helpers.configure_logging"
] | [((3471, 3498), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (3488, 3498), False, 'import logging\n'), ((4208, 4279), 'pandas.Series', 'pd.Series', (['n.transformers.bus1.values'], {'index': 'n.transformers.bus0.values'}), '(n.transformers.bus1.values, index=n.transformers.bus0.values)\n', (4217, 4279), True, 'import pandas as pd\n'), ((5107, 5217), 'add_electricity.load_costs', 'load_costs', (['Nyears', 'snakemake.input.tech_costs', "snakemake.config['costs']", "snakemake.config['electricity']"], {}), "(Nyears, snakemake.input.tech_costs, snakemake.config['costs'],\n snakemake.config['electricity'])\n", (5117, 5217), False, 'from add_electricity import load_costs\n'), ((6083, 6108), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'buses'}), '(index=buses)\n', (6095, 6108), True, 'import pandas as pd\n'), ((7928, 7999), 'pypsa.networkclustering.aggregategenerators', 'aggregategenerators', (['n', 'busmap'], {'custom_strategies': "{'p_nom_min': np.sum}"}), "(n, busmap, custom_strategies={'p_nom_min': np.sum})\n", (7947, 7999), False, 'from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport, get_clustering_from_busmap, _make_consense\n'), ((8920, 8974), 'scipy.sparse.csgraph.connected_components', 'connected_components', (['adjacency_matrix'], {'directed': '(False)'}), '(adjacency_matrix, directed=False)\n', (8940, 8974), False, 'from scipy.sparse.csgraph import connected_components, dijkstra\n'), ((8988, 9020), 'pandas.Series', 'pd.Series', (['labels', 'n.buses.index'], {}), '(labels, n.buses.index)\n', (8997, 9020), True, 'import pandas as pd\n'), ((12405, 12423), 'pypsa.networkclustering.busmap_by_stubs', 'busmap_by_stubs', (['n'], {}), '(n)\n', (12420, 12423), False, 'from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport, get_clustering_from_busmap, _make_consense\n'), ((15300, 15489), 'cluster_network.clustering_for_n_clusters', 'clustering_for_n_clusters', (['n', 'n_clusters'], {'custom_busmap': '(False)', 'potential_mode': 'potential_mode', 'solver_name': "snakemake.config['solving']['solver']['name']", 'focus_weights': 'focus_weights'}), "(n, n_clusters, custom_busmap=False,\n potential_mode=potential_mode, solver_name=snakemake.config['solving'][\n 'solver']['name'], focus_weights=focus_weights)\n", (15325, 15489), False, 'from cluster_network import clustering_for_n_clusters, cluster_regions\n'), ((15812, 15840), '_helpers.configure_logging', 'configure_logging', (['snakemake'], {}), '(snakemake)\n', (15829, 15840), False, 'from _helpers import configure_logging, update_p_nom_max\n'), ((15850, 15888), 'pypsa.Network', 'pypsa.Network', (['snakemake.input.network'], {}), '(snakemake.input.network)\n', (15863, 15888), False, 'import pypsa\n'), ((16798, 16817), '_helpers.update_p_nom_max', 'update_p_nom_max', (['n'], {}), '(n)\n', (16814, 16817), False, 'from _helpers import configure_logging, update_p_nom_max\n'), ((16997, 17056), 'cluster_network.cluster_regions', 'cluster_regions', (['busmaps', 'snakemake.input', 'snakemake.output'], {}), '(busmaps, snakemake.input, snakemake.output)\n', (17012, 17056), False, 'from cluster_network import clustering_for_n_clusters, cluster_regions\n'), ((4583, 4626), 'pandas.Series', 'pd.Series', (['missing_buses_i', 'missing_buses_i'], {}), '(missing_buses_i, missing_buses_i)\n', (4592, 4626), True, 'import pandas as pd\n'), ((7646, 7688), 'pypsa.io.import_components_from_dataframe', 'import_components_from_dataframe', (['n', 'df', 'c'], {}), '(n, df, c)\n', (7678, 7688), False, 'from pypsa.io import import_components_from_dataframe, import_series_from_dataframe\n'), ((8127, 8174), 'pypsa.networkclustering.aggregateoneport', 'aggregateoneport', (['n', 'busmap'], {'component': 'one_port'}), '(n, busmap, component=one_port)\n', (8143, 8174), False, 'from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport, get_clustering_from_busmap, _make_consense\n'), ((13369, 13419), 'scipy.sparse.csgraph.dijkstra', 'dijkstra', (['adj'], {'directed': '(False)', 'indices': 'bus_indexer'}), '(adj, directed=False, indices=bus_indexer)\n', (13377, 13419), False, 'from scipy.sparse.csgraph import connected_components, dijkstra\n'), ((15747, 15807), '_helpers.mock_snakemake', 'mock_snakemake', (['"""simplify_network"""'], {'simpl': '""""""', 'network': '"""elec"""'}), "('simplify_network', simpl='', network='elec')\n", (15761, 15807), False, 'from _helpers import mock_snakemake\n'), ((7352, 7382), 'pandas.DataFrame', 'pd.DataFrame', (['connection_costs'], {}), '(connection_costs)\n', (7364, 7382), True, 'import pandas as pd\n'), ((10593, 10690), 'scipy.spatial.distance_matrix', 'sp.spatial.distance_matrix', (["n.buses.loc[b, ['x', 'y']]", "n.buses.loc[buses[1:-1], ['x', 'y']]"], {}), "(n.buses.loc[b, ['x', 'y']], n.buses.loc[buses[1:\n -1], ['x', 'y']])\n", (10619, 10690), True, 'import scipy as sp\n'), ((12121, 12138), 'pandas.Series', 'pd.Series', (['params'], {}), '(params)\n', (12130, 12138), True, 'import pandas as pd\n'), ((15069, 15165), 'pandas.Series', 'pd.Series', (["[snakemake.config['renewable'][tech]['potential'] for tech in\n renewable_carriers]"], {}), "([snakemake.config['renewable'][tech]['potential'] for tech in\n renewable_carriers])\n", (15078, 15165), True, 'import pandas as pd\n'), ((4031, 4041), 'numpy.sqrt', 'np.sqrt', (['(3)'], {}), '(3)\n', (4038, 4041), True, 'import numpy as np\n'), ((7771, 7815), 'pypsa.io.import_series_from_dataframe', 'import_series_from_dataframe', (['n', 'df', 'c', 'attr'], {}), '(n, df, c, attr)\n', (7799, 7815), False, 'from pypsa.io import import_components_from_dataframe, import_series_from_dataframe\n'), ((13886, 13918), 'pypsa.networkclustering._make_consense', '_make_consense', (['"""Bus"""', '"""country"""'], {}), "('Bus', 'country')\n", (13900, 13918), False, 'from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport, get_clustering_from_busmap, _make_consense\n'), ((6331, 6360), 'pandas.Series', 'pd.Series', (['(0.0)', 'n.lines.index'], {}), '(0.0, n.lines.index)\n', (6340, 6360), True, 'import pandas as pd\n'), ((9961, 9977), 'pandas.Index', 'pd.Index', (['(u, m)'], {}), '((u, m))\n', (9969, 9977), True, 'import pandas as pd\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"module_name": "CSF TZ",
"category": "Modules",
"label": _("Country Specifics"),
"color": "green",
"icon": "octicon octicon-bookmark",
"type": "module",
"description": "Country specific customizations for compliance, taxation and statutory reports.",
},
{
"module_name": "Clearing and Forwarding",
"category": "Modules",
"label": _("Clearing and Forwarding"),
"color": "green",
"icon": "octicon octicon-bookmark",
"type": "module",
"description": "Clearing and forwarding",
},
{
"module_name": "After Sales Services",
"category": "Modules",
"label": _("After Sales Services"),
"color": "green",
"icon": "octicon octicon-bookmark",
"type": "module",
},
{
"module_name": "Workshop",
"category": "Modules",
"label": _("Workshop"),
"color": "green",
"icon": "octicon octicon-bookmark",
"type": "module",
},
{
"module_name": "Fleet Management",
"category": "Modules",
"label": _("Fleet Management"),
"color": "green",
"icon": "octicon octicon-bookmark",
"type": "module",
},
{
"module_name": "Purchase and Stock Management",
"category": "Modules",
"label": _("Purchase and Stock Management"),
"color": "green",
"icon": "octicon octicon-bookmark",
"type": "module",
},
{
"module_name": "Sales and Marketing",
"category": "Modules",
"label": _("Sales and Marketing"),
"color": "green",
"icon": "octicon octicon-bookmark",
"type": "module",
},
]
| [
"frappe._"
] | [((182, 204), 'frappe._', '_', (['"""Country Specifics"""'], {}), "('Country Specifics')\n", (183, 204), False, 'from frappe import _\n'), ((480, 508), 'frappe._', '_', (['"""Clearing and Forwarding"""'], {}), "('Clearing and Forwarding')\n", (481, 508), False, 'from frappe import _\n'), ((725, 750), 'frappe._', '_', (['"""After Sales Services"""'], {}), "('After Sales Services')\n", (726, 750), False, 'from frappe import _\n'), ((910, 923), 'frappe._', '_', (['"""Workshop"""'], {}), "('Workshop')\n", (911, 923), False, 'from frappe import _\n'), ((1091, 1112), 'frappe._', '_', (['"""Fleet Management"""'], {}), "('Fleet Management')\n", (1092, 1112), False, 'from frappe import _\n'), ((1293, 1327), 'frappe._', '_', (['"""Purchase and Stock Management"""'], {}), "('Purchase and Stock Management')\n", (1294, 1327), False, 'from frappe import _\n'), ((1498, 1522), 'frappe._', '_', (['"""Sales and Marketing"""'], {}), "('Sales and Marketing')\n", (1499, 1522), False, 'from frappe import _\n')] |
from datetime import datetime
from multiprocessing import Process
from queue import Empty
import click
from fabric import Connection
from clustrun.result import Result
def log_date():
return datetime.now().strftime("[%Y-%m-%d %H:%M:%S]")
def worker(q, rq, hostname, config, port=22):
while True:
t = q.get(block=False)
if t is None:
# None is the sentinel to indicate all queued items have been
# processed
break
start_time = datetime.now()
print(log_date(), "Running", t, "on", hostname)
cmd = config.cmd_tplt.format(t)
r = None
try:
c = Connection(hostname, port=port, config=config.connection)
except Exception:
err_msg = "{0} Connection failed to {1}".format(log_date(), hostname)
click.secho(err_msg, fg="red")
break
try:
if config.sudo:
r = c.sudo(cmd, hide="both", warn=True)
else:
r = c.run(cmd, hide="both", warn=True)
except Exception as e:
click.secho("Exception running command: " + str(e), fg="red")
break
else:
duration = datetime.now() - start_time
if r.exited == 0:
finish_msg = "{0} Finished {1} on {2} after {3}".format(
log_date(), t, hostname, duration
)
click.secho(finish_msg, fg="green")
else:
err_msg = "{0} Error during {1} on {2} after {3}".format(
log_date(), t, hostname, duration
)
click.secho(err_msg, fg="red")
result = Result(
hostname=hostname,
task=t,
stdout=r.stdout,
stderr=r.stderr,
exit_code=r.exited,
duration=duration,
)
rq.put(result)
finally:
q.task_done()
c.close()
click.echo("Worker on {0} is done".format(hostname))
def setup_workers(config):
for h in config.hosts:
print("Configuring ", h.hostname, "... ", end="", sep="", flush=True)
c = Connection(h.hostname, config=config.connection)
for l in config.setup_cmd.split("\n"):
if config.sudo:
c.sudo(l, hide="both")
else:
c.run(l, hide="both")
print("done")
def launch_workers(config, q, rq):
workers = []
for h in config.hosts:
for _ in range(h.n_jobs):
p = Process(
target=worker,
args=(q, rq, h.hostname, config, h.port),
name="clustrun.worker",
)
p.start()
workers.append(p)
return workers
def wait_for_workers(processes, results_queue):
results = []
while processes:
while True:
try:
results.append(results_queue.get(block=False))
except Empty:
break
for p in processes:
p.join(0.1)
processes = [p for p in processes if p.is_alive()]
return results
| [
"click.secho",
"multiprocessing.Process",
"clustrun.result.Result",
"datetime.datetime.now",
"fabric.Connection"
] | [((500, 514), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (512, 514), False, 'from datetime import datetime\n'), ((2214, 2262), 'fabric.Connection', 'Connection', (['h.hostname'], {'config': 'config.connection'}), '(h.hostname, config=config.connection)\n', (2224, 2262), False, 'from fabric import Connection\n'), ((199, 213), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (211, 213), False, 'from datetime import datetime\n'), ((657, 714), 'fabric.Connection', 'Connection', (['hostname'], {'port': 'port', 'config': 'config.connection'}), '(hostname, port=port, config=config.connection)\n', (667, 714), False, 'from fabric import Connection\n'), ((1701, 1811), 'clustrun.result.Result', 'Result', ([], {'hostname': 'hostname', 'task': 't', 'stdout': 'r.stdout', 'stderr': 'r.stderr', 'exit_code': 'r.exited', 'duration': 'duration'}), '(hostname=hostname, task=t, stdout=r.stdout, stderr=r.stderr,\n exit_code=r.exited, duration=duration)\n', (1707, 1811), False, 'from clustrun.result import Result\n'), ((2586, 2679), 'multiprocessing.Process', 'Process', ([], {'target': 'worker', 'args': '(q, rq, h.hostname, config, h.port)', 'name': '"""clustrun.worker"""'}), "(target=worker, args=(q, rq, h.hostname, config, h.port), name=\n 'clustrun.worker')\n", (2593, 2679), False, 'from multiprocessing import Process\n'), ((835, 865), 'click.secho', 'click.secho', (['err_msg'], {'fg': '"""red"""'}), "(err_msg, fg='red')\n", (846, 865), False, 'import click\n'), ((1214, 1228), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1226, 1228), False, 'from datetime import datetime\n'), ((1433, 1468), 'click.secho', 'click.secho', (['finish_msg'], {'fg': '"""green"""'}), "(finish_msg, fg='green')\n", (1444, 1468), False, 'import click\n'), ((1649, 1679), 'click.secho', 'click.secho', (['err_msg'], {'fg': '"""red"""'}), "(err_msg, fg='red')\n", (1660, 1679), False, 'import click\n')] |
import warnings
import napari
import pyclesperanto_prototype as cle
from napari_tools_menu import register_function
@register_function(menu="Measurement > Statistics of labeled pixels (clEsperanto)")
def statistics_of_labeled_pixels(image: napari.types.ImageData, labels: napari.types.LabelsData, measure_background=False, napari_viewer : napari.Viewer=None) -> "pandas.DataFrame":
"""
Adds a table widget to a given napari viewer with quantitative analysis results derived from an image-labelimage pair.
"""
if image is not None and labels is not None:
# quantitative analysis using clEsperanto's statistics_of_labelled_pixels
if measure_background:
table = cle.statistics_of_background_and_labelled_pixels(image, labels)
else:
table = cle.statistics_of_labelled_pixels(image, labels)
if napari_viewer is not None:
# Store results in the properties dictionary:
from napari_workflows._workflow import _get_layer_from_data
labels_layer = _get_layer_from_data(napari_viewer, labels)
labels_layer.properties = table
# turn table into a widget
from napari_skimage_regionprops import add_table
add_table(labels_layer, napari_viewer)
else:
import pandas
return pandas.DataFrame(table)
else:
warnings.warn("Image and labels must be set.")
| [
"pyclesperanto_prototype.statistics_of_background_and_labelled_pixels",
"pyclesperanto_prototype.statistics_of_labelled_pixels",
"pandas.DataFrame",
"napari_skimage_regionprops.add_table",
"warnings.warn",
"napari_tools_menu.register_function",
"napari_workflows._workflow._get_layer_from_data"
] | [((120, 207), 'napari_tools_menu.register_function', 'register_function', ([], {'menu': '"""Measurement > Statistics of labeled pixels (clEsperanto)"""'}), "(menu=\n 'Measurement > Statistics of labeled pixels (clEsperanto)')\n", (137, 207), False, 'from napari_tools_menu import register_function\n'), ((1392, 1438), 'warnings.warn', 'warnings.warn', (['"""Image and labels must be set."""'], {}), "('Image and labels must be set.')\n", (1405, 1438), False, 'import warnings\n'), ((708, 771), 'pyclesperanto_prototype.statistics_of_background_and_labelled_pixels', 'cle.statistics_of_background_and_labelled_pixels', (['image', 'labels'], {}), '(image, labels)\n', (756, 771), True, 'import pyclesperanto_prototype as cle\n'), ((806, 854), 'pyclesperanto_prototype.statistics_of_labelled_pixels', 'cle.statistics_of_labelled_pixels', (['image', 'labels'], {}), '(image, labels)\n', (839, 854), True, 'import pyclesperanto_prototype as cle\n'), ((1051, 1094), 'napari_workflows._workflow._get_layer_from_data', '_get_layer_from_data', (['napari_viewer', 'labels'], {}), '(napari_viewer, labels)\n', (1071, 1094), False, 'from napari_workflows._workflow import _get_layer_from_data\n'), ((1252, 1290), 'napari_skimage_regionprops.add_table', 'add_table', (['labels_layer', 'napari_viewer'], {}), '(labels_layer, napari_viewer)\n', (1261, 1290), False, 'from napari_skimage_regionprops import add_table\n'), ((1350, 1373), 'pandas.DataFrame', 'pandas.DataFrame', (['table'], {}), '(table)\n', (1366, 1373), False, 'import pandas\n')] |
# Copyright 2015-2017 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""VSwitch controller for Physical to Tunnel Endpoint to Physical deployment
"""
import logging
from core.vswitch_controller import IVswitchController
from vswitches.utils import add_ports_to_flow
from conf import settings as S
from tools import tasks
_FLOW_TEMPLATE = {
'idle_timeout': '0'
}
class VswitchControllerOP2P(IVswitchController):
"""VSwitch controller for OP2P deployment scenario.
Attributes:
_vswitch_class: The vSwitch class to be used.
_vswitch: The vSwitch object controlled by this controller
_deployment_scenario: A string describing the scenario to set-up in the
constructor.
"""
def __init__(self, vswitch_class, traffic, tunnel_operation=None):
"""Initializes up the prerequisites for the OP2P deployment scenario.
:vswitch_class: the vSwitch class to be used.
"""
self._logger = logging.getLogger(__name__)
self._vswitch_class = vswitch_class
self._vswitch = vswitch_class()
self._deployment_scenario = "OP2P"
self._traffic = traffic.copy()
self._tunnel_operation = tunnel_operation
self._logger.debug('Creation using ' + str(self._vswitch_class))
def setup(self):
""" Sets up the switch for overlay P2P (tunnel encap or decap)
"""
self._logger.debug('Setting up ' + str(self._tunnel_operation))
if self._tunnel_operation == "encapsulation":
self._setup_encap()
else:
if str(S.getValue('VSWITCH')).endswith('Vanilla'):
self._setup_decap_vanilla()
else:
self._setup_decap()
def _setup_encap(self):
""" Sets up the switch for overlay P2P encapsulation test
Create 2 bridges br0 (integration bridge) and br-ext and a VXLAN port
for encapsulation.
"""
self._logger.debug('Setup using ' + str(self._vswitch_class))
try:
self._vswitch.start()
bridge = S.getValue('TUNNEL_INTEGRATION_BRIDGE')
bridge_ext = S.getValue('TUNNEL_EXTERNAL_BRIDGE')
bridge_ext_ip = S.getValue('TUNNEL_EXTERNAL_BRIDGE_IP')
tg_port2_mac = S.getValue('TRAFFICGEN_PORT2_MAC')
vtep_ip2 = S.getValue('VTEP_IP2')
self._vswitch.add_switch(bridge)
tasks.run_task(['sudo', 'ip', 'addr', 'add',
S.getValue('VTEP_IP1'), 'dev', bridge],
self._logger, 'Assign ' +
S.getValue('VTEP_IP1') + ' to ' + bridge,
False)
tasks.run_task(['sudo', 'ip', 'link', 'set', 'dev', bridge, 'up'],
self._logger, 'Bring up ' + bridge, False)
tunnel_type = self._traffic['tunnel_type']
self._vswitch.add_switch(bridge_ext)
(_, phy1_number) = self._vswitch.add_phy_port(bridge)
(_, phy2_number) = self._vswitch.add_tunnel_port(bridge,
vtep_ip2,
tunnel_type)
self._vswitch.add_phy_port(bridge_ext)
tasks.run_task(['sudo', 'ip', 'addr', 'add',
bridge_ext_ip,
'dev', bridge_ext], self._logger, 'Assign ' +
bridge_ext_ip + ' to ' + bridge_ext)
tasks.run_task(['sudo', 'ip', 'link', 'set', 'dev', bridge_ext,
'up'], self._logger,
'Set ' + bridge_ext + 'status to up')
self._vswitch.add_route(bridge,
S.getValue('VTEP_IP2_SUBNET'),
bridge_ext)
if str(S.getValue('VSWITCH')).endswith('Vanilla'):
tasks.run_task(['sudo', 'arp', '-s', vtep_ip2, tg_port2_mac],
self._logger,
'Set ' + bridge_ext + ' status to up')
else:
self._vswitch.set_tunnel_arp(vtep_ip2,
tg_port2_mac,
bridge_ext)
# Test is unidirectional for now
self._vswitch.del_flow(bridge)
flow1 = add_ports_to_flow(_FLOW_TEMPLATE, phy1_number,
phy2_number)
self._vswitch.add_flow(bridge, flow1)
except:
self._vswitch.stop()
raise
def _setup_decap(self):
""" Sets up the switch for overlay P2P decapsulation test
"""
self._logger.debug('Setup using ' + str(self._vswitch_class))
try:
self._vswitch.start()
bridge = S.getValue('TUNNEL_INTEGRATION_BRIDGE')
bridge_ext = S.getValue('TUNNEL_EXTERNAL_BRIDGE')
bridge_ext_ip = S.getValue('TUNNEL_EXTERNAL_BRIDGE_IP')
tgen_ip1 = S.getValue('TRAFFICGEN_PORT1_IP')
self._vswitch.add_switch(bridge)
tasks.run_task(['sudo', 'ip', 'addr', 'add',
S.getValue('VTEP_IP1'), 'dev', bridge],
self._logger, 'Assign ' +
S.getValue('VTEP_IP1') + ' to ' + bridge, False)
tasks.run_task(['sudo', 'ip', 'link', 'set', 'dev', bridge, 'up'],
self._logger, 'Bring up ' + bridge, False)
tunnel_type = self._traffic['tunnel_type']
self._vswitch.add_switch(bridge_ext)
self._vswitch.add_phy_port(bridge)
(_, phy2_number) = self._vswitch.add_phy_port(bridge_ext)
if tunnel_type == "vxlan":
vxlan_vni = 'options:key=' + S.getValue('VXLAN_VNI')
(_, phy3_number) = self._vswitch.add_tunnel_port(bridge_ext,
tgen_ip1,
tunnel_type,
params=[vxlan_vni])
else:
(_, phy3_number) = self._vswitch.add_tunnel_port(bridge_ext,
tgen_ip1,
tunnel_type)
tasks.run_task(['sudo', 'ip', 'addr', 'add',
bridge_ext_ip,
'dev', bridge_ext],
self._logger, 'Assign ' +
bridge_ext_ip
+ ' to ' + bridge_ext)
tasks.run_task(['sudo', 'ip', 'link', 'set', 'dev', bridge_ext,
'up'],
self._logger,
'Set ' + bridge_ext + ' status to up')
self._vswitch.set_tunnel_arp(tgen_ip1,
S.getValue('TRAFFICGEN_PORT1_MAC'),
bridge)
# Test is unidirectional for now
self._vswitch.del_flow(bridge_ext)
flow1 = add_ports_to_flow(_FLOW_TEMPLATE, phy3_number,
phy2_number)
self._vswitch.add_flow(bridge_ext, flow1)
except:
self._vswitch.stop()
raise
def _setup_decap_vanilla(self):
""" Sets up the switch for overlay P2P decapsulation test
"""
self._logger.debug('Setup decap vanilla ' + str(self._vswitch_class))
try:
self._vswitch.start()
bridge = S.getValue('TUNNEL_INTEGRATION_BRIDGE')
bridge_ext = S.getValue('TUNNEL_EXTERNAL_BRIDGE')
bridge_ext_ip = S.getValue('TUNNEL_EXTERNAL_BRIDGE_IP')
tgen_ip1 = S.getValue('TRAFFICGEN_PORT1_IP')
self._vswitch.add_switch(bridge)
tasks.run_task(['sudo', 'ip', 'addr', 'add',
S.getValue('TUNNEL_INT_BRIDGE_IP'), 'dev', bridge],
self._logger, 'Assign ' +
S.getValue('TUNNEL_INT_BRIDGE_IP') + ' to ' + bridge, False)
tasks.run_task(['sudo', 'ip', 'link', 'set', 'dev', bridge, 'up'],
self._logger, 'Bring up ' + bridge, False)
tunnel_type = self._traffic['tunnel_type']
self._vswitch.add_switch(bridge_ext)
self._vswitch.add_phy_port(bridge_ext)
(_, phy2_number) = self._vswitch.add_phy_port(bridge)
if tunnel_type == "vxlan":
vxlan_vni = 'options:key=' + S.getValue('VXLAN_VNI')
self._vswitch.add_tunnel_port(bridge, tgen_ip1, tunnel_type,
params=[vxlan_vni])
else:
self._vswitch.add_tunnel_port(bridge, tgen_ip1, tunnel_type)
tasks.run_task(['sudo', 'ip', 'addr', 'add',
bridge_ext_ip,
'dev', bridge_ext],
self._logger, 'Assign ' +
bridge_ext_ip
+ ' to ' + bridge_ext)
tasks.run_task(['sudo', 'ip', 'link', 'set', 'dev', bridge_ext,
'up'],
self._logger,
'Set ' + bridge_ext + ' status to up')
tg_port2_mac = S.getValue('TRAFFICGEN_PORT2_MAC')
vtep_ip2 = S.getValue('TRAFFICGEN_PORT2_IP')
self._vswitch.set_tunnel_arp(vtep_ip2,
tg_port2_mac,
bridge_ext)
self._vswitch.add_route(bridge,
S.getValue('VTEP_IP2_SUBNET'),
bridge)
tasks.run_task(['sudo', 'arp', '-s', vtep_ip2, tg_port2_mac],
self._logger,
'Set ' + bridge_ext + ' status to up')
# Test is unidirectional for now
self._vswitch.del_flow(bridge_ext)
flow1 = add_ports_to_flow(_FLOW_TEMPLATE, phy2_number, 'LOCAL')
self._vswitch.add_flow(bridge_ext, flow1)
except:
self._vswitch.stop()
raise
def stop(self):
"""Tears down the switch created in setup().
"""
self._logger.debug('Stop using ' + str(self._vswitch_class))
self._vswitch.stop()
def __enter__(self):
self.setup()
def __exit__(self, type_, value, traceback):
self.stop()
def get_vswitch(self):
"""See IVswitchController for description
"""
return self._vswitch
def get_ports_info(self):
"""See IVswitchController for description
"""
self._logger.debug('get_ports_info for bridges: %s, %s',
S.getValue('TUNNEL_INTEGRATION_BRIDGE'),
S.getValue('TUNNEL_EXTERNAL_BRIDGE'))
return self._vswitch.get_ports(
S.getValue('TUNNEL_INTEGRATION_BRIDGE')) +\
self._vswitch.get_ports(
S.getValue('TUNNEL_EXTERNAL_BRIDGE'))
def dump_vswitch_flows(self):
"""See IVswitchController for description
"""
self._vswitch.dump_flows(S.getValue('TUNNEL_INTEGRATION_BRIDGE'))
self._vswitch.dump_flows(S.getValue('TUNNEL_EXTERNAL_BRIDGE'))
| [
"logging.getLogger",
"tools.tasks.run_task",
"vswitches.utils.add_ports_to_flow",
"conf.settings.getValue"
] | [((1482, 1509), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1499, 1509), False, 'import logging\n'), ((2589, 2628), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_INTEGRATION_BRIDGE"""'], {}), "('TUNNEL_INTEGRATION_BRIDGE')\n", (2599, 2628), True, 'from conf import settings as S\n'), ((2654, 2690), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_EXTERNAL_BRIDGE"""'], {}), "('TUNNEL_EXTERNAL_BRIDGE')\n", (2664, 2690), True, 'from conf import settings as S\n'), ((2719, 2758), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_EXTERNAL_BRIDGE_IP"""'], {}), "('TUNNEL_EXTERNAL_BRIDGE_IP')\n", (2729, 2758), True, 'from conf import settings as S\n'), ((2786, 2820), 'conf.settings.getValue', 'S.getValue', (['"""TRAFFICGEN_PORT2_MAC"""'], {}), "('TRAFFICGEN_PORT2_MAC')\n", (2796, 2820), True, 'from conf import settings as S\n'), ((2844, 2866), 'conf.settings.getValue', 'S.getValue', (['"""VTEP_IP2"""'], {}), "('VTEP_IP2')\n", (2854, 2866), True, 'from conf import settings as S\n'), ((3206, 3320), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'ip', 'link', 'set', 'dev', bridge, 'up']", 'self._logger', "('Bring up ' + bridge)", '(False)'], {}), "(['sudo', 'ip', 'link', 'set', 'dev', bridge, 'up'], self.\n _logger, 'Bring up ' + bridge, False)\n", (3220, 3320), False, 'from tools import tasks\n'), ((3793, 3939), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'ip', 'addr', 'add', bridge_ext_ip, 'dev', bridge_ext]", 'self._logger', "('Assign ' + bridge_ext_ip + ' to ' + bridge_ext)"], {}), "(['sudo', 'ip', 'addr', 'add', bridge_ext_ip, 'dev',\n bridge_ext], self._logger, 'Assign ' + bridge_ext_ip + ' to ' + bridge_ext)\n", (3807, 3939), False, 'from tools import tasks\n'), ((4032, 4159), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'ip', 'link', 'set', 'dev', bridge_ext, 'up']", 'self._logger', "('Set ' + bridge_ext + 'status to up')"], {}), "(['sudo', 'ip', 'link', 'set', 'dev', bridge_ext, 'up'], self\n ._logger, 'Set ' + bridge_ext + 'status to up')\n", (4046, 4159), False, 'from tools import tasks\n'), ((4925, 4984), 'vswitches.utils.add_ports_to_flow', 'add_ports_to_flow', (['_FLOW_TEMPLATE', 'phy1_number', 'phy2_number'], {}), '(_FLOW_TEMPLATE, phy1_number, phy2_number)\n', (4942, 4984), False, 'from vswitches.utils import add_ports_to_flow\n'), ((5387, 5426), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_INTEGRATION_BRIDGE"""'], {}), "('TUNNEL_INTEGRATION_BRIDGE')\n", (5397, 5426), True, 'from conf import settings as S\n'), ((5452, 5488), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_EXTERNAL_BRIDGE"""'], {}), "('TUNNEL_EXTERNAL_BRIDGE')\n", (5462, 5488), True, 'from conf import settings as S\n'), ((5517, 5556), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_EXTERNAL_BRIDGE_IP"""'], {}), "('TUNNEL_EXTERNAL_BRIDGE_IP')\n", (5527, 5556), True, 'from conf import settings as S\n'), ((5580, 5613), 'conf.settings.getValue', 'S.getValue', (['"""TRAFFICGEN_PORT1_IP"""'], {}), "('TRAFFICGEN_PORT1_IP')\n", (5590, 5613), True, 'from conf import settings as S\n'), ((5926, 6040), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'ip', 'link', 'set', 'dev', bridge, 'up']", 'self._logger', "('Bring up ' + bridge)", '(False)'], {}), "(['sudo', 'ip', 'link', 'set', 'dev', bridge, 'up'], self.\n _logger, 'Bring up ' + bridge, False)\n", (5940, 6040), False, 'from tools import tasks\n'), ((6969, 7115), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'ip', 'addr', 'add', bridge_ext_ip, 'dev', bridge_ext]", 'self._logger', "('Assign ' + bridge_ext_ip + ' to ' + bridge_ext)"], {}), "(['sudo', 'ip', 'addr', 'add', bridge_ext_ip, 'dev',\n bridge_ext], self._logger, 'Assign ' + bridge_ext_ip + ' to ' + bridge_ext)\n", (6983, 7115), False, 'from tools import tasks\n'), ((7262, 7390), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'ip', 'link', 'set', 'dev', bridge_ext, 'up']", 'self._logger', "('Set ' + bridge_ext + ' status to up')"], {}), "(['sudo', 'ip', 'link', 'set', 'dev', bridge_ext, 'up'], self\n ._logger, 'Set ' + bridge_ext + ' status to up')\n", (7276, 7390), False, 'from tools import tasks\n'), ((7758, 7817), 'vswitches.utils.add_ports_to_flow', 'add_ports_to_flow', (['_FLOW_TEMPLATE', 'phy3_number', 'phy2_number'], {}), '(_FLOW_TEMPLATE, phy3_number, phy2_number)\n', (7775, 7817), False, 'from vswitches.utils import add_ports_to_flow\n'), ((8240, 8279), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_INTEGRATION_BRIDGE"""'], {}), "('TUNNEL_INTEGRATION_BRIDGE')\n", (8250, 8279), True, 'from conf import settings as S\n'), ((8305, 8341), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_EXTERNAL_BRIDGE"""'], {}), "('TUNNEL_EXTERNAL_BRIDGE')\n", (8315, 8341), True, 'from conf import settings as S\n'), ((8370, 8409), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_EXTERNAL_BRIDGE_IP"""'], {}), "('TUNNEL_EXTERNAL_BRIDGE_IP')\n", (8380, 8409), True, 'from conf import settings as S\n'), ((8433, 8466), 'conf.settings.getValue', 'S.getValue', (['"""TRAFFICGEN_PORT1_IP"""'], {}), "('TRAFFICGEN_PORT1_IP')\n", (8443, 8466), True, 'from conf import settings as S\n'), ((8803, 8917), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'ip', 'link', 'set', 'dev', bridge, 'up']", 'self._logger', "('Bring up ' + bridge)", '(False)'], {}), "(['sudo', 'ip', 'link', 'set', 'dev', bridge, 'up'], self.\n _logger, 'Bring up ' + bridge, False)\n", (8817, 8917), False, 'from tools import tasks\n'), ((9523, 9669), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'ip', 'addr', 'add', bridge_ext_ip, 'dev', bridge_ext]", 'self._logger', "('Assign ' + bridge_ext_ip + ' to ' + bridge_ext)"], {}), "(['sudo', 'ip', 'addr', 'add', bridge_ext_ip, 'dev',\n bridge_ext], self._logger, 'Assign ' + bridge_ext_ip + ' to ' + bridge_ext)\n", (9537, 9669), False, 'from tools import tasks\n'), ((9816, 9944), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'ip', 'link', 'set', 'dev', bridge_ext, 'up']", 'self._logger', "('Set ' + bridge_ext + ' status to up')"], {}), "(['sudo', 'ip', 'link', 'set', 'dev', bridge_ext, 'up'], self\n ._logger, 'Set ' + bridge_ext + ' status to up')\n", (9830, 9944), False, 'from tools import tasks\n'), ((10050, 10084), 'conf.settings.getValue', 'S.getValue', (['"""TRAFFICGEN_PORT2_MAC"""'], {}), "('TRAFFICGEN_PORT2_MAC')\n", (10060, 10084), True, 'from conf import settings as S\n'), ((10108, 10141), 'conf.settings.getValue', 'S.getValue', (['"""TRAFFICGEN_PORT2_IP"""'], {}), "('TRAFFICGEN_PORT2_IP')\n", (10118, 10141), True, 'from conf import settings as S\n'), ((10472, 10590), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'arp', '-s', vtep_ip2, tg_port2_mac]", 'self._logger', "('Set ' + bridge_ext + ' status to up')"], {}), "(['sudo', 'arp', '-s', vtep_ip2, tg_port2_mac], self._logger,\n 'Set ' + bridge_ext + ' status to up')\n", (10486, 10590), False, 'from tools import tasks\n'), ((10756, 10811), 'vswitches.utils.add_ports_to_flow', 'add_ports_to_flow', (['_FLOW_TEMPLATE', 'phy2_number', '"""LOCAL"""'], {}), "(_FLOW_TEMPLATE, phy2_number, 'LOCAL')\n", (10773, 10811), False, 'from vswitches.utils import add_ports_to_flow\n'), ((11539, 11578), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_INTEGRATION_BRIDGE"""'], {}), "('TUNNEL_INTEGRATION_BRIDGE')\n", (11549, 11578), True, 'from conf import settings as S\n'), ((11607, 11643), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_EXTERNAL_BRIDGE"""'], {}), "('TUNNEL_EXTERNAL_BRIDGE')\n", (11617, 11643), True, 'from conf import settings as S\n'), ((11970, 12009), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_INTEGRATION_BRIDGE"""'], {}), "('TUNNEL_INTEGRATION_BRIDGE')\n", (11980, 12009), True, 'from conf import settings as S\n'), ((12044, 12080), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_EXTERNAL_BRIDGE"""'], {}), "('TUNNEL_EXTERNAL_BRIDGE')\n", (12054, 12080), True, 'from conf import settings as S\n'), ((4291, 4320), 'conf.settings.getValue', 'S.getValue', (['"""VTEP_IP2_SUBNET"""'], {}), "('VTEP_IP2_SUBNET')\n", (4301, 4320), True, 'from conf import settings as S\n'), ((4450, 4568), 'tools.tasks.run_task', 'tasks.run_task', (["['sudo', 'arp', '-s', vtep_ip2, tg_port2_mac]", 'self._logger', "('Set ' + bridge_ext + ' status to up')"], {}), "(['sudo', 'arp', '-s', vtep_ip2, tg_port2_mac], self._logger,\n 'Set ' + bridge_ext + ' status to up')\n", (4464, 4568), False, 'from tools import tasks\n'), ((7561, 7595), 'conf.settings.getValue', 'S.getValue', (['"""TRAFFICGEN_PORT1_MAC"""'], {}), "('TRAFFICGEN_PORT1_MAC')\n", (7571, 7595), True, 'from conf import settings as S\n'), ((10383, 10412), 'conf.settings.getValue', 'S.getValue', (['"""VTEP_IP2_SUBNET"""'], {}), "('VTEP_IP2_SUBNET')\n", (10393, 10412), True, 'from conf import settings as S\n'), ((11697, 11736), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_INTEGRATION_BRIDGE"""'], {}), "('TUNNEL_INTEGRATION_BRIDGE')\n", (11707, 11736), True, 'from conf import settings as S\n'), ((11802, 11838), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_EXTERNAL_BRIDGE"""'], {}), "('TUNNEL_EXTERNAL_BRIDGE')\n", (11812, 11838), True, 'from conf import settings as S\n'), ((2998, 3020), 'conf.settings.getValue', 'S.getValue', (['"""VTEP_IP1"""'], {}), "('VTEP_IP1')\n", (3008, 3020), True, 'from conf import settings as S\n'), ((5745, 5767), 'conf.settings.getValue', 'S.getValue', (['"""VTEP_IP1"""'], {}), "('VTEP_IP1')\n", (5755, 5767), True, 'from conf import settings as S\n'), ((6370, 6393), 'conf.settings.getValue', 'S.getValue', (['"""VXLAN_VNI"""'], {}), "('VXLAN_VNI')\n", (6380, 6393), True, 'from conf import settings as S\n'), ((8598, 8632), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_INT_BRIDGE_IP"""'], {}), "('TUNNEL_INT_BRIDGE_IP')\n", (8608, 8632), True, 'from conf import settings as S\n'), ((9248, 9271), 'conf.settings.getValue', 'S.getValue', (['"""VXLAN_VNI"""'], {}), "('VXLAN_VNI')\n", (9258, 9271), True, 'from conf import settings as S\n'), ((2095, 2116), 'conf.settings.getValue', 'S.getValue', (['"""VSWITCH"""'], {}), "('VSWITCH')\n", (2105, 2116), True, 'from conf import settings as S\n'), ((4390, 4411), 'conf.settings.getValue', 'S.getValue', (['"""VSWITCH"""'], {}), "('VSWITCH')\n", (4400, 4411), True, 'from conf import settings as S\n'), ((3118, 3140), 'conf.settings.getValue', 'S.getValue', (['"""VTEP_IP1"""'], {}), "('VTEP_IP1')\n", (3128, 3140), True, 'from conf import settings as S\n'), ((5865, 5887), 'conf.settings.getValue', 'S.getValue', (['"""VTEP_IP1"""'], {}), "('VTEP_IP1')\n", (5875, 5887), True, 'from conf import settings as S\n'), ((8730, 8764), 'conf.settings.getValue', 'S.getValue', (['"""TUNNEL_INT_BRIDGE_IP"""'], {}), "('TUNNEL_INT_BRIDGE_IP')\n", (8740, 8764), True, 'from conf import settings as S\n')] |
from rest_framework import generics, permissions
from django.core.management import call_command
from rest_framework.response import Response
from rest_framework.views import APIView
from ..serializers import JobCategorySerializer, JobPostSerializer, JobSeekerSerializer, ResumeListSerializer, \
ResumeUpdateSerializer, MatchedPostSerializer, ResumeSerializer, WorkHistorySerializer, EducationSerializer, \
ApplicationSerializer
from ..models import JobPost, JobCategory, Resume, JobSeeker, WorkHistory, Education, MatchedPosts, Application
from accounts.models import Recruiter
class JobSeekerView(generics.RetrieveAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = JobSeekerSerializer
def get_object(self):
return JobSeeker.objects.get(user=self.request.user.id)
class ResumeDetailView(APIView):
permission_classes = [
permissions.IsAuthenticated
]
def get_object(self):
try:
return Resume.objects.get(seeker=self.request.user.jobseeker)
except Resume.DoesNotExist as e:
return Response({"error": "Given question object not found."}, status=404)
def get(self, request):
instance = self.get_object()
serializer = ResumeListSerializer(instance)
return Response(serializer.data)
class ResumeUpdateView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = ResumeUpdateSerializer
queryset = Resume.objects.all()
lookup_field = 'id'
class EvaluateResumeListView(generics.ListAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = MatchedPostSerializer
def get_queryset(self):
call_command('matchedPosts', self.request.user.jobseeker.id)
queryset = MatchedPosts.objects.filter(seeker=self.request.user.jobseeker)
return queryset
class MatchedPostsListView(generics.ListAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = MatchedPostSerializer
def get_queryset(self):
queryset = MatchedPosts.objects.filter(seeker=self.request.user.jobseeker)
return queryset
class MatchedPostsDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = MatchedPostSerializer
queryset = MatchedPosts.objects.all()
lookup_field = 'id'
class ResumeCreateView(APIView):
permission_classes = [
permissions.IsAuthenticated
]
def post(self, request):
seeker = JobSeeker.objects.get(user=self.request.user.id)
data = request.data
data["seeker"] = seeker.id
serializer = ResumeSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=201)
return Response(serializer.errors, status=400)
class WorkHistoryCreateView(generics.CreateAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = WorkHistorySerializer
def perform_create(self, serializer):
serializer.save(resume=self.request.user.jobseeker.resume)
class WorkHistoryDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = WorkHistorySerializer
queryset = WorkHistory.objects.all()
lookup_field = 'id'
class WorkHistoryListView(generics.ListAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = WorkHistorySerializer
lookup_field = 'id'
def get_queryset(self):
queryset = WorkHistory.objects.filter(resume=self.request.user.jobseeker.resume)
return queryset
class EducationBackgroundCreateView(generics.CreateAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = EducationSerializer
def perform_create(self, serializer):
serializer.save(resume=self.request.user.jobseeker.resume)
class EducationBackgroundDetailView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = EducationSerializer
queryset = Education.objects.all()
lookup_field = 'id'
class EducationBackgroundListView(generics.ListAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = EducationSerializer
lookup_field = 'id'
def get_queryset(self):
queryset = Education.objects.filter(resume=self.request.user.jobseeker.resume)
return queryset
class ApplicationCreateView(generics.CreateAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = ApplicationSerializer
def perform_create(self, serializer):
serializer.save(seeker=self.request.user.jobseeker)
# class JobPostListView(generics.ListAPIView):
# queryset = JobPost.objects.all()
# # permission_classes = [permissions.IsAuthenticated, ]
# serializer_class = JobPostSerializer
# lookup_field = 'id'
#
# filter_backends = [SearchFilter, OrderingFilter, ]
# search_fields = [
# 'job_title',
# ]
| [
"rest_framework.response.Response",
"django.core.management.call_command"
] | [((1318, 1343), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (1326, 1343), False, 'from rest_framework.response import Response\n'), ((1790, 1850), 'django.core.management.call_command', 'call_command', (['"""matchedPosts"""', 'self.request.user.jobseeker.id'], {}), "('matchedPosts', self.request.user.jobseeker.id)\n", (1802, 1850), False, 'from django.core.management import call_command\n'), ((2959, 2998), 'rest_framework.response.Response', 'Response', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (2967, 2998), False, 'from rest_framework.response import Response\n'), ((2906, 2943), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': '(201)'}), '(serializer.data, status=201)\n', (2914, 2943), False, 'from rest_framework.response import Response\n'), ((1117, 1184), 'rest_framework.response.Response', 'Response', (["{'error': 'Given question object not found.'}"], {'status': '(404)'}), "({'error': 'Given question object not found.'}, status=404)\n", (1125, 1184), False, 'from rest_framework.response import Response\n')] |
# -*- coding: utf-8 -*-
"""Operators API router."""
from typing import Optional
from fastapi import APIRouter, Depends, Header
from sqlalchemy.orm import Session
import projects.schemas.operator
from projects import database
from projects.controllers import (
ExperimentController,
OperatorController,
OperatorParameterController,
ProjectController,
)
router = APIRouter(
prefix="/projects/{project_id}/experiments/{experiment_id}/operators/{operator_id}/parameters",
)
@router.patch("/{name}")
async def handle_patch_parameter(
project_id: str,
experiment_id: str,
operator_id: str,
name: str,
parameter: projects.schemas.operator.ParameterUpdate,
session: Session = Depends(database.session_scope),
kubeflow_userid: Optional[str] = Header(database.DB_TENANT),
):
"""
Handles PATCH requests to /{name}.
Parameters
----------
project_id : str
experiment_id : str
operator_id : str
name : str
parameter : projects.schemas.Operator.ParameterUpdate
session : sqlalchemy.orm.session.Session
kubeflow_userid : fastapi.Header
Returns
-------
returns the updated value.
"""
project_controller = ProjectController(session, kubeflow_userid=kubeflow_userid)
project_controller.raise_if_project_does_not_exist(project_id)
experiment_controller = ExperimentController(session)
experiment_controller.raise_if_experiment_does_not_exist(experiment_id)
operator_controller = OperatorController(session)
operator_controller.raise_if_operator_does_not_exist(operator_id)
parameter_controller = OperatorParameterController(session)
operator = parameter_controller.update_parameter(
name=name, operator_id=operator_id, parameter=parameter
)
return operator
| [
"fastapi.Header",
"projects.controllers.OperatorParameterController",
"projects.controllers.ProjectController",
"projects.controllers.OperatorController",
"fastapi.APIRouter",
"projects.controllers.ExperimentController",
"fastapi.Depends"
] | [((380, 495), 'fastapi.APIRouter', 'APIRouter', ([], {'prefix': '"""/projects/{project_id}/experiments/{experiment_id}/operators/{operator_id}/parameters"""'}), "(prefix=\n '/projects/{project_id}/experiments/{experiment_id}/operators/{operator_id}/parameters'\n )\n", (389, 495), False, 'from fastapi import APIRouter, Depends, Header\n'), ((717, 748), 'fastapi.Depends', 'Depends', (['database.session_scope'], {}), '(database.session_scope)\n', (724, 748), False, 'from fastapi import APIRouter, Depends, Header\n'), ((787, 813), 'fastapi.Header', 'Header', (['database.DB_TENANT'], {}), '(database.DB_TENANT)\n', (793, 813), False, 'from fastapi import APIRouter, Depends, Header\n'), ((1207, 1266), 'projects.controllers.ProjectController', 'ProjectController', (['session'], {'kubeflow_userid': 'kubeflow_userid'}), '(session, kubeflow_userid=kubeflow_userid)\n', (1224, 1266), False, 'from projects.controllers import ExperimentController, OperatorController, OperatorParameterController, ProjectController\n'), ((1363, 1392), 'projects.controllers.ExperimentController', 'ExperimentController', (['session'], {}), '(session)\n', (1383, 1392), False, 'from projects.controllers import ExperimentController, OperatorController, OperatorParameterController, ProjectController\n'), ((1496, 1523), 'projects.controllers.OperatorController', 'OperatorController', (['session'], {}), '(session)\n', (1514, 1523), False, 'from projects.controllers import ExperimentController, OperatorController, OperatorParameterController, ProjectController\n'), ((1622, 1658), 'projects.controllers.OperatorParameterController', 'OperatorParameterController', (['session'], {}), '(session)\n', (1649, 1658), False, 'from projects.controllers import ExperimentController, OperatorController, OperatorParameterController, ProjectController\n')] |
# I2C LCD library for LoPy board
# Ported from Adafruit_Python_SSD1306 library by Dmitrii (<EMAIL>)
# v0.4 beta
# Display types
kDisplayI2C128x32 = 1
kDisplayI2C128x64 = 2
kDisplaySPI128x32 = 3 # not tested
kDisplaySPI128x64 = 4
from machine import I2C
from machine import SPI
from machine import Pin
import time
import pins # for pin names, NOTE added to original file for this project
# I2C OLED Wiring: standard
i2c = None
# NOTE this pin section modified from original for this project to use pin names
# SPI OLED Wiring:
# D0 - pins.DISPLAY_D0_PIN (CLK)
# D1 - pins.DISPLAY_D1_PIN (MOSI)
# DC - pins.DISPLAY_DC_PIN
# RST - pins.DISPLAY_RES_PIN
# CS - not used (for some displays needs to be connected with GND)
spi = None
DC_PIN = Pin(pins.DISPLAY_DC_PIN, mode=Pin.OUT)
RST_PIN = Pin(pins.DISPLAY_RES_PIN, mode=Pin.OUT)
# LCD Control constants
SSD1306_I2C_ADDRESS = 0x3C
SSD1306_SETCONTRAST = 0x81
SSD1306_DISPLAYALLON_RESUME = 0xA4
SSD1306_DISPLAYALLON = 0xA5
SSD1306_NORMALDISPLAY = 0xA6
SSD1306_INVERTDISPLAY = 0xA7
SSD1306_DISPLAYOFF = 0xAE
SSD1306_DISPLAYON = 0xAF
SSD1306_SETDISPLAYOFFSET = 0xD3
SSD1306_SETCOMPINS = 0xDA
SSD1306_SETVCOMDETECT = 0xDB
SSD1306_SETDISPLAYCLOCKDIV = 0xD5
SSD1306_SETPRECHARGE = 0xD9
SSD1306_SETMULTIPLEX = 0xA8
SSD1306_SETLOWCOLUMN = 0x00
SSD1306_SETHIGHCOLUMN = 0x10
SSD1306_SETSTARTLINE = 0x40
SSD1306_MEMORYMODE = 0x20
SSD1306_COLUMNADDR = 0x21
SSD1306_PAGEADDR = 0x22
SSD1306_COMSCANINC = 0xC0
SSD1306_COMSCANDEC = 0xC8
SSD1306_SEGREMAP = 0xA0
SSD1306_CHARGEPUMP = 0x8D
SSD1306_EXTERNALVCC = 0x1
SSD1306_SWITCHCAPVCC = 0x2
# Scrolling constants
SSD1306_ACTIVATE_SCROLL = 0x2F
SSD1306_DEACTIVATE_SCROLL = 0x2E
SSD1306_SET_VERTICAL_SCROLL_AREA = 0xA3
SSD1306_RIGHT_HORIZONTAL_SCROLL = 0x26
SSD1306_LEFT_HORIZONTAL_SCROLL = 0x27
SSD1306_VERTICAL_AND_RIGHT_HORIZONTAL_SCROLL = 0x29
SSD1306_VERTICAL_AND_LEFT_HORIZONTAL_SCROLL = 0x2A
# Font data. Taken from https://github.com/hsmptg/lcd/blob/master/font.py
font = [
0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x5F, 0x00, 0x00,
0x00, 0x07, 0x00, 0x07, 0x00,
0x14, 0x7F, 0x14, 0x7F, 0x14,
0x24, 0x2A, 0x7F, 0x2A, 0x12,
0x23, 0x13, 0x08, 0x64, 0x62,
0x36, 0x49, 0x56, 0x20, 0x50,
0x00, 0x08, 0x07, 0x03, 0x00,
0x00, 0x1C, 0x22, 0x41, 0x00,
0x00, 0x41, 0x22, 0x1C, 0x00,
0x2A, 0x1C, 0x7F, 0x1C, 0x2A,
0x08, 0x08, 0x3E, 0x08, 0x08,
0x00, 0x80, 0x70, 0x30, 0x00,
0x08, 0x08, 0x08, 0x08, 0x08,
0x00, 0x00, 0x60, 0x60, 0x00,
0x20, 0x10, 0x08, 0x04, 0x02,
0x3E, 0x51, 0x49, 0x45, 0x3E,
0x00, 0x42, 0x7F, 0x40, 0x00,
0x72, 0x49, 0x49, 0x49, 0x46,
0x21, 0x41, 0x49, 0x4D, 0x33,
0x18, 0x14, 0x12, 0x7F, 0x10,
0x27, 0x45, 0x45, 0x45, 0x39,
0x3C, 0x4A, 0x49, 0x49, 0x31,
0x41, 0x21, 0x11, 0x09, 0x07,
0x36, 0x49, 0x49, 0x49, 0x36,
0x46, 0x49, 0x49, 0x29, 0x1E,
0x00, 0x00, 0x14, 0x00, 0x00,
0x00, 0x40, 0x34, 0x00, 0x00,
0x00, 0x08, 0x14, 0x22, 0x41,
0x14, 0x14, 0x14, 0x14, 0x14,
0x00, 0x41, 0x22, 0x14, 0x08,
0x02, 0x01, 0x59, 0x09, 0x06,
0x3E, 0x41, 0x5D, 0x59, 0x4E,
0x7C, 0x12, 0x11, 0x12, 0x7C,
0x7F, 0x49, 0x49, 0x49, 0x36,
0x3E, 0x41, 0x41, 0x41, 0x22,
0x7F, 0x41, 0x41, 0x41, 0x3E,
0x7F, 0x49, 0x49, 0x49, 0x41,
0x7F, 0x09, 0x09, 0x09, 0x01,
0x3E, 0x41, 0x41, 0x51, 0x73,
0x7F, 0x08, 0x08, 0x08, 0x7F,
0x00, 0x41, 0x7F, 0x41, 0x00,
0x20, 0x40, 0x41, 0x3F, 0x01,
0x7F, 0x08, 0x14, 0x22, 0x41,
0x7F, 0x40, 0x40, 0x40, 0x40,
0x7F, 0x02, 0x1C, 0x02, 0x7F,
0x7F, 0x04, 0x08, 0x10, 0x7F,
0x3E, 0x41, 0x41, 0x41, 0x3E,
0x7F, 0x09, 0x09, 0x09, 0x06,
0x3E, 0x41, 0x51, 0x21, 0x5E,
0x7F, 0x09, 0x19, 0x29, 0x46,
0x26, 0x49, 0x49, 0x49, 0x32,
0x03, 0x01, 0x7F, 0x01, 0x03,
0x3F, 0x40, 0x40, 0x40, 0x3F,
0x1F, 0x20, 0x40, 0x20, 0x1F,
0x3F, 0x40, 0x38, 0x40, 0x3F,
0x63, 0x14, 0x08, 0x14, 0x63,
0x03, 0x04, 0x78, 0x04, 0x03,
0x61, 0x59, 0x49, 0x4D, 0x43,
0x00, 0x7F, 0x41, 0x41, 0x41,
0x02, 0x04, 0x08, 0x10, 0x20,
0x00, 0x41, 0x41, 0x41, 0x7F,
0x04, 0x02, 0x01, 0x02, 0x04,
0x40, 0x40, 0x40, 0x40, 0x40,
0x00, 0x03, 0x07, 0x08, 0x00,
0x20, 0x54, 0x54, 0x78, 0x40,
0x7F, 0x28, 0x44, 0x44, 0x38,
0x38, 0x44, 0x44, 0x44, 0x28,
0x38, 0x44, 0x44, 0x28, 0x7F,
0x38, 0x54, 0x54, 0x54, 0x18,
0x00, 0x08, 0x7E, 0x09, 0x02,
0x18, 0xA4, 0xA4, 0x9C, 0x78,
0x7F, 0x08, 0x04, 0x04, 0x78,
0x00, 0x44, 0x7D, 0x40, 0x00,
0x20, 0x40, 0x40, 0x3D, 0x00,
0x7F, 0x10, 0x28, 0x44, 0x00,
0x00, 0x41, 0x7F, 0x40, 0x00,
0x7C, 0x04, 0x78, 0x04, 0x78,
0x7C, 0x08, 0x04, 0x04, 0x78,
0x38, 0x44, 0x44, 0x44, 0x38,
0xFC, 0x18, 0x24, 0x24, 0x18,
0x18, 0x24, 0x24, 0x18, 0xFC,
0x7C, 0x08, 0x04, 0x04, 0x08,
0x48, 0x54, 0x54, 0x54, 0x24,
0x04, 0x04, 0x3F, 0x44, 0x24,
0x3C, 0x40, 0x40, 0x20, 0x7C,
0x1C, 0x20, 0x40, 0x20, 0x1C,
0x3C, 0x40, 0x30, 0x40, 0x3C,
0x44, 0x28, 0x10, 0x28, 0x44,
0x4C, 0x90, 0x90, 0x90, 0x7C,
0x44, 0x64, 0x54, 0x4C, 0x44,
0x00, 0x08, 0x36, 0x41, 0x00,
0x00, 0x00, 0x77, 0x00, 0x00,
0x00, 0x41, 0x36, 0x08, 0x00,
0x02, 0x01, 0x02, 0x04, 0x02,
0x3C, 0x26, 0x23, 0x26, 0x3C]
# Display data
width = None
height = None
pages = None
buffer = None
def isConnected():
if i2c != None:
# Check I2C devices
devices = i2c.scan() # returns list of slave addresses
for d in devices:
if d == SSD1306_I2C_ADDRESS: return True
return False
else:
# No check for SPI
return True
def command1(c):
if i2c != None:
i2c.writeto(SSD1306_I2C_ADDRESS, bytearray([0, c]))
else:
DC_PIN.value(0)
spi.write(bytes([c]))
def command2(c1, c2):
if i2c != None:
i2c.writeto(SSD1306_I2C_ADDRESS, bytearray([0, c1, c2]))
else:
DC_PIN.value(0)
spi.write(bytes([c1, c2]))
def command3(c1, c2, c3):
if i2c != None:
i2c.writeto(SSD1306_I2C_ADDRESS, bytearray([0, c1, c2, c3]))
else:
DC_PIN.value(0)
spi.write(bytes([c1, c2, c3]))
def writeSPIData(data):
DC_PIN.value(1)
spi.write(bytes(data))
def initialize(type):
global width, height, pages, buffer, i2c, spi
if type == kDisplayI2C128x32:
#128x32 I2C OLED Display
width = 128
height = 32
pages = 4 # height/8
buffer = [0]*512 # 128*32/8
i2c = I2C(0, I2C.MASTER, baudrate=100000)
initialize_128x32()
if type == kDisplayI2C128x64:
#128x64 I2C OLED Display
width = 128
height = 64
pages = 8 # height/8
buffer = [0]*1024 # 128*64/8
i2c = I2C(0, I2C.MASTER, baudrate=100000)
initialize_128x64()
if type == kDisplaySPI128x32:
#128x32 SPI OLED Display
width = 128
height = 32
pages = 4 # height/8
buffer = [0]*512 # 128*32/8
spi = SPI(0, mode=SPI.MASTER, baudrate=1000000, polarity=0, phase=0, firstbit=SPI.MSB)
RST_PIN.value(0)
time.sleep(0.01)
RST_PIN.value(1)
initialize_128x32()
if type == kDisplaySPI128x64:
#128x64 SPI OLED Display
width = 128
height = 64
pages = 8 # height/8
buffer = [0]*1024 # 128*64/8
spi = SPI(0, mode=SPI.MASTER, baudrate=1000000, polarity=0, phase=0, firstbit=SPI.MSB)
RST_PIN.value(0)
time.sleep(0.01)
RST_PIN.value(1)
initialize_128x64()
def initialize_128x32():
command1(SSD1306_DISPLAYOFF) # 0xAE
command2(SSD1306_SETDISPLAYCLOCKDIV, 0x80) # 0xD5
command2(SSD1306_SETMULTIPLEX, 0x1F) # 0xA8
command2(SSD1306_SETDISPLAYOFFSET, 0x0) # 0xD3
command1(SSD1306_SETSTARTLINE | 0x0) # line #0
command2(SSD1306_CHARGEPUMP, 0x14) # 0x8D
command2(SSD1306_MEMORYMODE, 0x00) # 0x20
command3(SSD1306_COLUMNADDR, 0, width-1)
command3(SSD1306_PAGEADDR, 0, pages-1)
command1(SSD1306_SEGREMAP | 0x1)
command1(SSD1306_COMSCANDEC)
command2(SSD1306_SETCOMPINS, 0x02) # 0xDA
command2(SSD1306_SETCONTRAST, 0x8F) # 0x81
command2(SSD1306_SETPRECHARGE, 0xF1) # 0xd9
command2(SSD1306_SETVCOMDETECT, 0x40) # 0xDB
command1(SSD1306_DISPLAYALLON_RESUME) # 0xA4
command1(SSD1306_NORMALDISPLAY) # 0xA6
def initialize_128x64():
command1(SSD1306_DISPLAYOFF) # 0xAE
command1(SSD1306_DISPLAYALLON_RESUME) # 0xA4
command2(SSD1306_SETDISPLAYCLOCKDIV, 0x80) # 0xD5
command2(SSD1306_SETMULTIPLEX, 0x3F) # 0xA8
command2(SSD1306_SETDISPLAYOFFSET, 0x0) # 0xD3
command1(SSD1306_SETSTARTLINE | 0x0) # line #0
command2(SSD1306_CHARGEPUMP, 0x14) # 0x8D
command2(SSD1306_MEMORYMODE, 0x00) # 0x20
command3(SSD1306_COLUMNADDR, 0, width-1)
command3(SSD1306_PAGEADDR, 0, pages-1)
command1(SSD1306_SEGREMAP | 0x1)
command1(SSD1306_COMSCANDEC)
command2(SSD1306_SETCOMPINS, 0x12) # 0xDA
command2(SSD1306_SETCONTRAST, 0xCF) # 0x81
command2(SSD1306_SETPRECHARGE, 0xF1) # 0xd9
command2(SSD1306_SETVCOMDETECT, 0x40) # 0xDB
command1(SSD1306_NORMALDISPLAY) # 0xA6
command1(SSD1306_DISPLAYON)
def set_contrast(contrast):
# Sets the contrast of the display. Contrast should be a value between 0 and 255.
if contrast < 0 or contrast > 255:
print('Contrast must be a value from 0 to 255 (inclusive).')
command2(SSD1306_SETCONTRAST, contrast)
def displayOff():
command1(SSD1306_DISPLAYOFF)
def displayOn():
command1(SSD1306_DISPLAYON)
def clearBuffer():
for i in range(0, len(buffer)):
buffer[i] = 0
def addString(x, y, str):
symPos = width*y + 6*x
for i in range(0, len(str)):
c = 5*(ord(str[i]) - 32)
buffer[symPos] = font[c]
buffer[symPos + 1] = font[c+1]
buffer[symPos + 2] = font[c+2]
buffer[symPos + 3] = font[c+3]
buffer[symPos + 4] = font[c+4]
symPos += 6
def drawBuffer():
command1(SSD1306_SETLOWCOLUMN)
command1(SSD1306_SETHIGHCOLUMN)
command1(SSD1306_SETSTARTLINE)
#Write display buffer to physical display.
if spi != None:
writeSPIData(buffer)
else:
line = [0]*17
line[0] = 0x40
for i in range(0, len(buffer), 16):
for p in range(0, 16):
line[p+1] = buffer[i + p]
i2c.writeto(SSD1306_I2C_ADDRESS, bytearray(line))
if __name__ == "__main__":
import sys, machine, os
print("Started")
displayType = kDisplayI2C128x64
initialize(displayType)
if isConnected():
set_contrast(128) # 1-255
displayOn()
clearBuffer()
addString(0, 0, sys.platform + " " + sys.version)
addString(0, 1, "---")
addString(0, 2, "CPU: {} MHz".format(machine.freq()[0]/1000000))
addString(0, 4, "Version: {}".format(os.uname().release))
addString(0, 5, "LoPy font test")
addString(0, 6, "AaBbCcDdEeFfGgHhIi")
addString(0, 7, "0123456789012345")
drawBuffer()
else:
print("Error: LCD not found")
print("Done")
| [
"machine.SPI",
"machine.freq",
"time.sleep",
"machine.Pin",
"machine.I2C",
"os.uname"
] | [((740, 778), 'machine.Pin', 'Pin', (['pins.DISPLAY_DC_PIN'], {'mode': 'Pin.OUT'}), '(pins.DISPLAY_DC_PIN, mode=Pin.OUT)\n', (743, 778), False, 'from machine import Pin\n'), ((789, 828), 'machine.Pin', 'Pin', (['pins.DISPLAY_RES_PIN'], {'mode': 'Pin.OUT'}), '(pins.DISPLAY_RES_PIN, mode=Pin.OUT)\n', (792, 828), False, 'from machine import Pin\n'), ((6463, 6498), 'machine.I2C', 'I2C', (['(0)', 'I2C.MASTER'], {'baudrate': '(100000)'}), '(0, I2C.MASTER, baudrate=100000)\n', (6466, 6498), False, 'from machine import I2C\n'), ((6715, 6750), 'machine.I2C', 'I2C', (['(0)', 'I2C.MASTER'], {'baudrate': '(100000)'}), '(0, I2C.MASTER, baudrate=100000)\n', (6718, 6750), False, 'from machine import I2C\n'), ((6966, 7051), 'machine.SPI', 'SPI', (['(0)'], {'mode': 'SPI.MASTER', 'baudrate': '(1000000)', 'polarity': '(0)', 'phase': '(0)', 'firstbit': 'SPI.MSB'}), '(0, mode=SPI.MASTER, baudrate=1000000, polarity=0, phase=0, firstbit=SPI.MSB\n )\n', (6969, 7051), False, 'from machine import SPI\n'), ((7080, 7096), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (7090, 7096), False, 'import time\n'), ((7338, 7423), 'machine.SPI', 'SPI', (['(0)'], {'mode': 'SPI.MASTER', 'baudrate': '(1000000)', 'polarity': '(0)', 'phase': '(0)', 'firstbit': 'SPI.MSB'}), '(0, mode=SPI.MASTER, baudrate=1000000, polarity=0, phase=0, firstbit=SPI.MSB\n )\n', (7341, 7423), False, 'from machine import SPI\n'), ((7452, 7468), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (7462, 7468), False, 'import time\n'), ((11289, 11299), 'os.uname', 'os.uname', ([], {}), '()\n', (11297, 11299), False, 'import sys, machine, os\n'), ((11215, 11229), 'machine.freq', 'machine.freq', ([], {}), '()\n', (11227, 11229), False, 'import sys, machine, os\n')] |
import matplotlib.pyplot as plt
from math import gcd
y = [1, 1]
for n in range(2, 10 ** 3):
if gcd(n, y[n - 1]) == 1:
y.append(y[n - 1] + n + 1)
else:
y.append(int(y[n - 1] / gcd(n, y[n-1])))
x = range(len(y))
plt.figure("A133058 - Fly straight, dammit")
plt.scatter(x, y, s=8)
plt.xlabel('n')
plt.ylabel('A133058(n)')
plt.title("A133058 - Fly straight, dammit")
plt.show()
| [
"matplotlib.pyplot.ylabel",
"math.gcd",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.title",
"matplotlib.pyplot.show"
] | [((237, 281), 'matplotlib.pyplot.figure', 'plt.figure', (['"""A133058 - Fly straight, dammit"""'], {}), "('A133058 - Fly straight, dammit')\n", (247, 281), True, 'import matplotlib.pyplot as plt\n'), ((282, 304), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x', 'y'], {'s': '(8)'}), '(x, y, s=8)\n', (293, 304), True, 'import matplotlib.pyplot as plt\n'), ((305, 320), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""n"""'], {}), "('n')\n", (315, 320), True, 'import matplotlib.pyplot as plt\n'), ((321, 345), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""A133058(n)"""'], {}), "('A133058(n)')\n", (331, 345), True, 'import matplotlib.pyplot as plt\n'), ((346, 389), 'matplotlib.pyplot.title', 'plt.title', (['"""A133058 - Fly straight, dammit"""'], {}), "('A133058 - Fly straight, dammit')\n", (355, 389), True, 'import matplotlib.pyplot as plt\n'), ((390, 400), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (398, 400), True, 'import matplotlib.pyplot as plt\n'), ((101, 117), 'math.gcd', 'gcd', (['n', 'y[n - 1]'], {}), '(n, y[n - 1])\n', (104, 117), False, 'from math import gcd\n'), ((201, 217), 'math.gcd', 'gcd', (['n', 'y[n - 1]'], {}), '(n, y[n - 1])\n', (204, 217), False, 'from math import gcd\n')] |
import functools
from eth_abi import (
encode_abi as eth_abi_encode_abi
)
from eth_utils import (
function_abi_to_4byte_selector,
encode_hex,
add_0x_prefix,
)
from web3.utils.abi import (
filter_by_type,
filter_by_name,
filter_by_argument_count,
filter_by_encodability,
get_abi_input_types,
map_abi_data,
merge_args_and_kwargs,
check_if_arguments_can_be_encoded,
)
from web3.utils.datastructures import (
HexBytes,
)
from web3.utils.normalizers import (
abi_address_to_hex,
abi_bytes_to_hex,
abi_ens_resolver,
abi_string_to_hex,
hexstrs_to_bytes,
)
from web3.utils.encoding import (
to_hex,
)
from eth_abi.exceptions import (
EncodingError,
)
def find_matching_fn_abi(abi, fn_name=None, args=None, kwargs=None):
filters = []
if fn_name:
filters.append(functools.partial(filter_by_name, fn_name))
if args is not None or kwargs is not None:
if args is None:
args = tuple()
if kwargs is None:
kwargs = {}
num_arguments = len(args) + len(kwargs)
filters.extend([
functools.partial(filter_by_argument_count, num_arguments),
functools.partial(filter_by_encodability, args, kwargs),
])
function_candidates = filter_by_type('function', abi)
for filter_fn in filters:
function_candidates = filter_fn(function_candidates)
if len(function_candidates) == 1:
return function_candidates[0]
elif not function_candidates:
break
if not function_candidates:
raise ValueError("No matching functions found")
else:
raise ValueError("Multiple functions found")
def encode_abi(web3, abi, arguments, data=None):
argument_types = get_abi_input_types(abi)
if not check_if_arguments_can_be_encoded(abi, arguments, {}):
raise TypeError(
"One or more arguments could not be encoded to the necessary "
"ABI type. Expected types are: {0}".format(
', '.join(argument_types),
)
)
try:
normalizers = [
abi_ens_resolver(web3),
abi_address_to_hex,
abi_bytes_to_hex,
abi_string_to_hex,
hexstrs_to_bytes,
]
normalized_arguments = map_abi_data(
normalizers,
argument_types,
arguments,
)
encoded_arguments = eth_abi_encode_abi(
argument_types,
normalized_arguments,
)
except EncodingError as e:
raise TypeError(
"One or more arguments could not be encoded to the necessary "
"ABI type: {0}".format(str(e))
)
if data:
return to_hex(HexBytes(data) + encoded_arguments)
else:
return encode_hex(encoded_arguments)
def prepare_transaction(abi,
address,
web3,
fn_name,
fn_args=None,
fn_kwargs=None,
transaction=None):
"""
Returns a dictionary of the transaction that could be used to call this
TODO: make this a public API
TODO: add new prepare_deploy_transaction API
"""
if transaction is None:
prepared_transaction = {}
else:
prepared_transaction = dict(**transaction)
if 'data' in prepared_transaction:
raise ValueError("Transaction parameter may not contain a 'data' key")
if address:
prepared_transaction.setdefault('to', address)
prepared_transaction['data'] = encode_transaction_data(
abi,
web3,
fn_name,
fn_args,
fn_kwargs,
)
return prepared_transaction
def encode_transaction_data(abi, web3, fn_name, args=None, kwargs=None):
fn_abi, fn_selector, fn_arguments = get_function_info(
abi, fn_name, args, kwargs,
)
return add_0x_prefix(encode_abi(web3, fn_abi, fn_arguments, fn_selector))
def get_function_info(abi, fn_name, args=None, kwargs=None):
if args is None:
args = tuple()
if kwargs is None:
kwargs = {}
fn_abi = find_matching_fn_abi(abi, fn_name, args, kwargs)
fn_selector = encode_hex(function_abi_to_4byte_selector(fn_abi))
fn_arguments = merge_args_and_kwargs(fn_abi, args, kwargs)
return fn_abi, fn_selector, fn_arguments
| [
"web3.utils.abi.map_abi_data",
"web3.utils.abi.filter_by_type",
"eth_abi.encode_abi",
"functools.partial",
"eth_utils.encode_hex",
"web3.utils.abi.check_if_arguments_can_be_encoded",
"eth_utils.function_abi_to_4byte_selector",
"web3.utils.datastructures.HexBytes",
"web3.utils.normalizers.abi_ens_resolver",
"web3.utils.abi.get_abi_input_types",
"web3.utils.abi.merge_args_and_kwargs"
] | [((1299, 1330), 'web3.utils.abi.filter_by_type', 'filter_by_type', (['"""function"""', 'abi'], {}), "('function', abi)\n", (1313, 1330), False, 'from web3.utils.abi import filter_by_type, filter_by_name, filter_by_argument_count, filter_by_encodability, get_abi_input_types, map_abi_data, merge_args_and_kwargs, check_if_arguments_can_be_encoded\n'), ((1788, 1812), 'web3.utils.abi.get_abi_input_types', 'get_abi_input_types', (['abi'], {}), '(abi)\n', (1807, 1812), False, 'from web3.utils.abi import filter_by_type, filter_by_name, filter_by_argument_count, filter_by_encodability, get_abi_input_types, map_abi_data, merge_args_and_kwargs, check_if_arguments_can_be_encoded\n'), ((4340, 4383), 'web3.utils.abi.merge_args_and_kwargs', 'merge_args_and_kwargs', (['fn_abi', 'args', 'kwargs'], {}), '(fn_abi, args, kwargs)\n', (4361, 4383), False, 'from web3.utils.abi import filter_by_type, filter_by_name, filter_by_argument_count, filter_by_encodability, get_abi_input_types, map_abi_data, merge_args_and_kwargs, check_if_arguments_can_be_encoded\n'), ((1825, 1878), 'web3.utils.abi.check_if_arguments_can_be_encoded', 'check_if_arguments_can_be_encoded', (['abi', 'arguments', '{}'], {}), '(abi, arguments, {})\n', (1858, 1878), False, 'from web3.utils.abi import filter_by_type, filter_by_name, filter_by_argument_count, filter_by_encodability, get_abi_input_types, map_abi_data, merge_args_and_kwargs, check_if_arguments_can_be_encoded\n'), ((2338, 2390), 'web3.utils.abi.map_abi_data', 'map_abi_data', (['normalizers', 'argument_types', 'arguments'], {}), '(normalizers, argument_types, arguments)\n', (2350, 2390), False, 'from web3.utils.abi import filter_by_type, filter_by_name, filter_by_argument_count, filter_by_encodability, get_abi_input_types, map_abi_data, merge_args_and_kwargs, check_if_arguments_can_be_encoded\n'), ((2466, 2522), 'eth_abi.encode_abi', 'eth_abi_encode_abi', (['argument_types', 'normalized_arguments'], {}), '(argument_types, normalized_arguments)\n', (2484, 2522), True, 'from eth_abi import encode_abi as eth_abi_encode_abi\n'), ((2839, 2868), 'eth_utils.encode_hex', 'encode_hex', (['encoded_arguments'], {}), '(encoded_arguments)\n', (2849, 2868), False, 'from eth_utils import function_abi_to_4byte_selector, encode_hex, add_0x_prefix\n'), ((4280, 4318), 'eth_utils.function_abi_to_4byte_selector', 'function_abi_to_4byte_selector', (['fn_abi'], {}), '(fn_abi)\n', (4310, 4318), False, 'from eth_utils import function_abi_to_4byte_selector, encode_hex, add_0x_prefix\n'), ((851, 893), 'functools.partial', 'functools.partial', (['filter_by_name', 'fn_name'], {}), '(filter_by_name, fn_name)\n', (868, 893), False, 'import functools\n'), ((2150, 2172), 'web3.utils.normalizers.abi_ens_resolver', 'abi_ens_resolver', (['web3'], {}), '(web3)\n', (2166, 2172), False, 'from web3.utils.normalizers import abi_address_to_hex, abi_bytes_to_hex, abi_ens_resolver, abi_string_to_hex, hexstrs_to_bytes\n'), ((1132, 1190), 'functools.partial', 'functools.partial', (['filter_by_argument_count', 'num_arguments'], {}), '(filter_by_argument_count, num_arguments)\n', (1149, 1190), False, 'import functools\n'), ((1204, 1259), 'functools.partial', 'functools.partial', (['filter_by_encodability', 'args', 'kwargs'], {}), '(filter_by_encodability, args, kwargs)\n', (1221, 1259), False, 'import functools\n'), ((2778, 2792), 'web3.utils.datastructures.HexBytes', 'HexBytes', (['data'], {}), '(data)\n', (2786, 2792), False, 'from web3.utils.datastructures import HexBytes\n')] |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------
# Copyright (C) 2009 StatPro Italia s.r.l.
#
# StatPro Italia
# Via <NAME> 4
# I-20123 Milano
# ITALY
#
# phone: +39 02 96875 1
# fax: +39 02 96875 605
#
# email: <EMAIL>
#
# This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the license for more details.
# -----------------------------------------------------------
#
# Author: <NAME> <<EMAIL>>
"""DRMAA C library function wrappers"""
from __future__ import absolute_import
from ctypes import *
from ctypes.util import find_library
from somadrmaa.errors import error_check, error_buffer
from soma_workflow.utils import DetectFindLib
import os
_drmaa_lib_env_name = 'DRMAA_LIBRARY_PATH'
(DRMMA_LIB_FOUND, _lib) = DetectFindLib(_drmaa_lib_env_name, 'drmaa')
STRING = c_char_p
size_t = c_ulong
ptrdiff_t = c_int
drmaa_init = _lib.drmaa_init
drmaa_init.restype = error_check
drmaa_init.argtypes = [STRING, STRING, size_t]
drmaa_exit = _lib.drmaa_exit
drmaa_exit.restype = error_check
drmaa_exit.argtypes = [STRING, size_t]
def init(contact=None):
return _lib.drmaa_init(contact, error_buffer, sizeof(error_buffer))
_lib.drmaa_exit.argtypes = [c_char_p, c_size_t]
_lib.drmaa_init.restype = error_check
def exit():
return _lib.drmaa_exit(error_buffer, sizeof(error_buffer))
# structures
class drmaa_job_template_s(Structure):
pass
drmaa_job_template_t = drmaa_job_template_s
drmaa_job_template_s._fields_ = [
]
class drmaa_attr_names_s(Structure):
pass
drmaa_attr_names_t = drmaa_attr_names_s
drmaa_attr_names_s._fields_ = [
]
class drmaa_attr_values_s(Structure):
pass
drmaa_attr_values_t = drmaa_attr_values_s
drmaa_attr_values_s._fields_ = [
]
class drmaa_job_ids_s(Structure):
pass
drmaa_job_ids_t = drmaa_job_ids_s
drmaa_job_ids_s._fields_ = [
]
drmaa_get_contact = _lib.drmaa_get_contact
drmaa_get_contact.restype = error_check
drmaa_get_contact.argtypes = [STRING, size_t, STRING, size_t]
drmaa_version = _lib.drmaa_version
drmaa_version.restype = error_check
drmaa_version.argtypes = [POINTER(c_uint), POINTER(c_uint), STRING, size_t]
drmaa_get_DRM_system = _lib.drmaa_get_DRM_system
drmaa_get_DRM_system.restype = error_check
drmaa_get_DRM_system.argtypes = [STRING, size_t, STRING, size_t]
drmaa_get_DRMAA_implementation = _lib.drmaa_get_DRMAA_implementation
drmaa_get_DRMAA_implementation.restype = error_check
drmaa_get_DRMAA_implementation.argtypes = [STRING, size_t, STRING, size_t]
drmaa_allocate_job_template = _lib.drmaa_allocate_job_template
drmaa_allocate_job_template.restype = error_check
drmaa_allocate_job_template.argtypes = [
POINTER(POINTER(drmaa_job_template_t)), STRING, size_t]
drmaa_delete_job_template = _lib.drmaa_delete_job_template
drmaa_delete_job_template.restype = error_check
drmaa_delete_job_template.argtypes = [
POINTER(drmaa_job_template_t), STRING, size_t]
drmaa_set_attribute = _lib.drmaa_set_attribute
drmaa_set_attribute.restype = error_check
drmaa_set_attribute.argtypes = [POINTER(drmaa_job_template_t), STRING,
STRING, STRING, size_t]
drmaa_get_attribute = _lib.drmaa_get_attribute
drmaa_get_attribute.restype = error_check
drmaa_get_attribute.argtypes = [POINTER(drmaa_job_template_t), STRING,
STRING, size_t, STRING, size_t]
drmaa_get_next_attr_name = _lib.drmaa_get_next_attr_name
drmaa_get_next_attr_name.restype = c_int
drmaa_get_next_attr_name.argtypes = [
POINTER(drmaa_attr_names_t), STRING, size_t]
drmaa_get_next_attr_value = _lib.drmaa_get_next_attr_value
drmaa_get_next_attr_value.restype = c_int
drmaa_get_next_attr_value.argtypes = [
POINTER(drmaa_attr_values_t), STRING, size_t]
drmaa_get_next_job_id = _lib.drmaa_get_next_job_id
drmaa_get_next_job_id.restype = error_check
drmaa_get_next_job_id.argtypes = [POINTER(drmaa_job_ids_t), STRING, size_t]
drmaa_release_attr_names = _lib.drmaa_release_attr_names
drmaa_release_attr_names.restype = None
drmaa_release_attr_names.argtypes = [POINTER(drmaa_attr_names_t)]
drmaa_release_attr_values = _lib.drmaa_release_attr_values
drmaa_release_attr_values.restype = None
drmaa_release_attr_values.argtypes = [POINTER(drmaa_attr_values_t)]
drmaa_release_job_ids = _lib.drmaa_release_job_ids
drmaa_release_job_ids.restype = None
drmaa_release_job_ids.argtypes = [POINTER(drmaa_job_ids_t)]
drmaa_set_vector_attribute = _lib.drmaa_set_vector_attribute
drmaa_set_vector_attribute.restype = error_check
drmaa_set_vector_attribute.argtypes = [POINTER(drmaa_job_template_t), STRING,
POINTER(STRING), STRING, size_t]
drmaa_get_vector_attribute = _lib.drmaa_get_vector_attribute
drmaa_get_vector_attribute.restype = error_check
drmaa_get_vector_attribute.argtypes = [POINTER(drmaa_job_template_t), STRING,
POINTER(POINTER(drmaa_attr_values_t)), STRING, size_t]
drmaa_get_attribute_names = _lib.drmaa_get_attribute_names
drmaa_get_attribute_names.restype = error_check
drmaa_get_attribute_names.argtypes = [
POINTER(POINTER(drmaa_attr_names_t)), STRING, size_t]
drmaa_get_vector_attribute_names = _lib.drmaa_get_vector_attribute_names
drmaa_get_vector_attribute_names.restype = error_check
drmaa_get_vector_attribute_names.argtypes = [
POINTER(POINTER(drmaa_attr_names_t)), STRING, size_t]
try:
drmaa_get_num_attr_names = _lib.drmaa_get_num_attr_names
drmaa_get_num_attr_names.restype = c_int
drmaa_get_num_attr_names.argtypes = [
POINTER(drmaa_attr_names_t), POINTER(c_int)]
drmaa_get_num_attr_values = _lib.drmaa_get_num_attr_values
drmaa_get_num_attr_values.restype = c_int
drmaa_get_num_attr_values.argtypes = [
POINTER(drmaa_attr_values_t), POINTER(c_int)]
except AttributeError: # the above are present from 1.0 onward only
pass
drmaa_run_job = _lib.drmaa_run_job
drmaa_run_job.restype = error_check
drmaa_run_job.argtypes = [
STRING, size_t, POINTER(drmaa_job_template_t), STRING, size_t]
drmaa_run_bulk_jobs = _lib.drmaa_run_bulk_jobs
drmaa_run_bulk_jobs.restype = error_check
drmaa_run_bulk_jobs.argtypes = [POINTER(POINTER(drmaa_job_ids_t)),
POINTER(drmaa_job_template_t),
c_int, c_int, c_int, STRING, size_t]
drmaa_control = _lib.drmaa_control
drmaa_control.restype = error_check
drmaa_control.argtypes = [STRING, c_int, STRING, size_t]
drmaa_synchronize = _lib.drmaa_synchronize
drmaa_synchronize.restype = error_check
drmaa_synchronize.argtypes = [POINTER(STRING), c_long, c_int, STRING, size_t]
drmaa_wait = _lib.drmaa_wait
drmaa_wait.restype = error_check
drmaa_wait.argtypes = [STRING, STRING, size_t, POINTER(c_int), c_long,
POINTER(POINTER(drmaa_attr_values_t)), STRING, size_t]
drmaa_wifexited = _lib.drmaa_wifexited
drmaa_wifexited.restype = error_check
drmaa_wifexited.argtypes = [POINTER(c_int), c_int, STRING, size_t]
drmaa_wexitstatus = _lib.drmaa_wexitstatus
drmaa_wexitstatus.restype = error_check
drmaa_wexitstatus.argtypes = [POINTER(c_int), c_int, STRING, size_t]
drmaa_wifsignaled = _lib.drmaa_wifsignaled
drmaa_wifsignaled.restype = error_check
drmaa_wifsignaled.argtypes = [POINTER(c_int), c_int, STRING, size_t]
drmaa_wtermsig = _lib.drmaa_wtermsig
drmaa_wtermsig.restype = error_check
drmaa_wtermsig.argtypes = [STRING, size_t, c_int, STRING, size_t]
drmaa_wcoredump = _lib.drmaa_wcoredump
drmaa_wcoredump.restype = error_check
drmaa_wcoredump.argtypes = [POINTER(c_int), c_int, STRING, size_t]
drmaa_wifaborted = _lib.drmaa_wifaborted
drmaa_wifaborted.restype = error_check
drmaa_wifaborted.argtypes = [POINTER(c_int), c_int, STRING, size_t]
drmaa_job_ps = _lib.drmaa_job_ps
drmaa_job_ps.restype = error_check
drmaa_job_ps.argtypes = [STRING, POINTER(c_int), STRING, size_t]
drmaa_strerror = _lib.drmaa_strerror
drmaa_strerror.restype = STRING
drmaa_strerror.argtypes = [c_int]
'''
Missing api
drmaa_get_num_job_ids
drmaa_read_configuration_file
drmaa_read_configuration
'''
| [
"soma_workflow.utils.DetectFindLib"
] | [((901, 944), 'soma_workflow.utils.DetectFindLib', 'DetectFindLib', (['_drmaa_lib_env_name', '"""drmaa"""'], {}), "(_drmaa_lib_env_name, 'drmaa')\n", (914, 944), False, 'from soma_workflow.utils import DetectFindLib\n')] |
import myRecorder as rcd
import matplotlib.pyplot as plt
import numpy as np
DURATION = 30
fig = plt.figure()
ax = fig.add_subplot(211)
ax.set_ylim(-5e4,5e4)
ax2 = fig.add_subplot(212)
fig.show()
data_array = []
try:
rec = rcd.Recorder(channels = 1,
rate = 44100,
chunk_size = 1024,
num_chunk = 8,
device_name = 'Line (U24XL with SPDIF I/O)')
rec.stream_init(playback = False)
except Exception as e:
print(e)
quit()
#rec.current_device_info()
line = ax.plot(range(len(rec.get_buffer())),rec.get_buffer())[0]
data_array= np.append(data_array,rec.signal_data)
line2 = ax2.plot(range(5),range(5))[0]
samples = 1000
try:
#while True:
for _ in range(samples):
data_array= np.append(data_array,rec.signal_data)
#print(data_array)
#ax2.cla()
#ax2.plot(range(len(data_array)),data_array)
line.set_ydata(rec.get_buffer())
fig.canvas.draw()
fig.canvas.flush_events()
except Exception as e:
print('Some error happened!')
print(e)
finally:
rec.stream_stop()
rec.close()
#plt.close(fig)
| [
"numpy.append",
"matplotlib.pyplot.figure",
"myRecorder.Recorder"
] | [((98, 110), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (108, 110), True, 'import matplotlib.pyplot as plt\n'), ((633, 671), 'numpy.append', 'np.append', (['data_array', 'rec.signal_data'], {}), '(data_array, rec.signal_data)\n', (642, 671), True, 'import numpy as np\n'), ((230, 343), 'myRecorder.Recorder', 'rcd.Recorder', ([], {'channels': '(1)', 'rate': '(44100)', 'chunk_size': '(1024)', 'num_chunk': '(8)', 'device_name': '"""Line (U24XL with SPDIF I/O)"""'}), "(channels=1, rate=44100, chunk_size=1024, num_chunk=8,\n device_name='Line (U24XL with SPDIF I/O)')\n", (242, 343), True, 'import myRecorder as rcd\n'), ((822, 860), 'numpy.append', 'np.append', (['data_array', 'rec.signal_data'], {}), '(data_array, rec.signal_data)\n', (831, 860), True, 'import numpy as np\n')] |
# Copyright (c) 2019. Partners HealthCare and other members of
# Forome Association
#
# Developed by <NAME> based on contributions by <NAME>,
# <NAME>, <NAME> and other members of Division of
# Genetics, Brigham and Women's Hospital
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numbers
from io import StringIO
from xml.sax.saxutils import escape
#===============================================
def htmlEscape(val):
if val is None:
return "null"
if val == "":
return ""
return escape(str(val))
#===============================================
def jsonHtmlRepr(obj, level = 0):
if obj is None:
return "null"
if isinstance(obj, str):
return htmlEscape(obj)
if isinstance(obj, numbers.Number):
return str(obj)
if isinstance(obj, dict):
if level < 2:
ret = []
for key in sorted(obj.keys()):
if level == 0:
ret.append("<b>%s</b>: " % htmlEscape(key))
ret.append("<br/>")
else:
ret.append("%s: " % htmlEscape(key))
rep_val = jsonHtmlRepr(obj[key], level + 1)
if level == 0:
rep_val = htmlEscape(rep_val)
ret.append(rep_val)
ret.append(", ")
if level == 0:
ret.append("<br/>")
while len(ret) > 0 and ret[-1] in ("<br/>", ", "):
del ret[-1]
return ''.join(ret)
return '{' + ', '.join(['%s:"%s"' %
(key, jsonHtmlRepr(obj[key], level + 1))
for key in sorted(obj.keys())]) + '}'
elif isinstance(obj, list):
ret = '[' + ', '.join([jsonHtmlRepr(sub_obj, level + 1)
for sub_obj in obj]) + ']'
if level == 0:
return htmlEscape(ret)
return ret
return '???'
#===============================================
def vcfRepr(vcf_content):
output = StringIO()
collect_str = ""
for fld in vcf_content.split('\t'):
if len(fld) < 40:
if len(collect_str) < 60:
collect_str += "\t" + fld
else:
print(collect_str[1:], file = output)
collect_str = "\t" + fld
continue
if collect_str:
print(collect_str[1:], file = output)
collect_str = ""
for vv in fld.split(';'):
var, q, val = vv.partition('=')
if var == "CSQ":
print("==v====SCQ======v========", file = output)
for idx, dt in enumerate(val.split(',')):
ddd = dt.split('|')
print("%d:\t%s" % (idx, '|'.join(ddd[:12])), file = output)
print("\t|%s" % ('|'.join(ddd[12:29])), file = output)
print("\t|%s" % ('|'.join(ddd[28:33])), file = output)
print("\t|%s" % ('|'.join(ddd[33:40])), file = output)
print("\t|%s" % ('|'.join(ddd[40:50])), file = output)
print("\t|%s" % ('|'.join(ddd[50:])), file = output)
print("==^====SCQ======^========", file = output)
else:
print(vv, file = output)
if collect_str:
print(collect_str[1:], file = output)
return output.getvalue()
| [
"io.StringIO"
] | [((2499, 2509), 'io.StringIO', 'StringIO', ([], {}), '()\n', (2507, 2509), False, 'from io import StringIO\n')] |
import glob
from ftplib import FTP
import numpy as np
import pandas
import requests
def main():
OutDir = r'W:\Projets\EC_stations_download\\'
if not glob.os.path.exists(OutDir):
glob.os.makedirs(OutDir)
# Connect and login
ftp = FTP('ftp.tor.ec.gc.ca')
ftp.login('client_climate')
# Change working directory
ftp.cwd('/Pub/Get_More_Data_Plus_de_donnees')
# Download station inventory info
filename = 'Station Inventory EN.csv'
ftp.retrbinary("RETR " + filename, open(OutDir + filename, 'wb').write)
# import station inventory as pandas dataframe
stats = pandas.read_csv(OutDir + filename, header=3)
download_daily(stats, OutDir)
download_hourly(stats, OutDir)
def download_hourly(stats, OutDir):
# get daily station data
time_step = 'HLY'
timeframe = 'timeframe=1' # Corresponding code for hourly from readme info
stats = stats[
stats[time_step + ' First Year'].notnull()] # stats[:][~np.isnan(stats[time_step +' First Year' ])] #get data
stats = stats[stats[time_step + ' Last Year'].notnull()] # stats[:][~np.isnan(stats[time_step +' Last Year' ])]
for index, row in stats.iterrows():
# Only get eastern provinces for now
if 'QUEBEC' in row['Province']: # \
# or 'ONTARIO' in row['Province']\
# or 'NOVA SCOTIA' in row['Province']\
# or 'PRINCE EDWARD ISLAND' in row['Province']\
# or 'ONTARIO' in row['Province']\
# or 'NEWFOUNDLAND' in row['Province']\
# or 'NEW BRUNSWICK' in row['Province']\
# or 'MANITOBA' in row['Province']:
# check that station has a first and last year value for HLY time step
if not np.isnan(row[time_step + ' First Year']) and not np.isnan(row[time_step + ' Last Year']):
# output directory name
print(row['Name'])
outrep1 = OutDir + time_step + '\\' + row['Province'].replace(' ', '_') + '\\' + row[
'Climate ID'] + '_' + row['Name'].replace('\\', '_').replace('/', '_').replace(' ', '_')
# create if necessary
if not glob.os.path.exists(outrep1):
glob.os.makedirs(outrep1)
# loop through years and download
for y in range(int(row[time_step + ' First Year']), int(row[time_step + ' Last Year'] + 1)):
for m in range(1, 13):
URL = 'http://climate.weather.gc.ca/climate_data/bulk_data_e.html?format=csv&stationID=' + str(
row['Station ID']) + '&Year=' + str(y) + '&Month=' + str(
m) + '&' + timeframe + '&submit= Download+Data'
r = requests.get(URL)
outfile = outrep1 + '\\' + row['Climate ID'] + '_' + row['Name'].replace('\\', '_').replace('/',
'_').replace(
' ', '_') + '_' + str(y) + '_' + str(m).zfill(2) + '_' + time_step + '.csv'
with open(outfile, 'wb') as f:
f.write(r.content)
def download_daily(stats, OutDir):
# get daily station data
time_step = 'DLY'
timeframe = 'timeframe=2' # Corresponding code for daily from readme info
for d in range(0, len(stats['Name'])):
# check that station has a first and last year value for DLY time step
if not np.isnan(stats[time_step + ' First Year'][d]) and not np.isnan(stats[time_step + ' Last Year'][d]):
# output directory name
outrep1 = OutDir + time_step + '\\' + stats['Province'][d].replace(' ', '_') + '\\' + stats['Climate ID'][
d] + '_' + stats['Name'][d].replace('\\', '_').replace('/', '_').replace(' ', '_')
# create if necessary
if not glob.os.path.exists(outrep1):
glob.os.makedirs(outrep1)
# loop through years and download
for y in range(int(stats[time_step + ' First Year'][d]), int(stats[time_step + ' Last Year'][d] + 1)):
URL = 'http://climate.weather.gc.ca/climate_data/bulk_data_e.html?format=csv&stationID=' + str(
stats['Station ID'][d]) + '&Year=' + str(y) + '&' + timeframe + '&submit= Download+Data'
r = requests.get(URL)
outfile = outrep1 + '\\' + stats['Climate ID'][d] + '_' + stats['Name'][d].replace('\\', '_').replace(
'/', '_').replace(' ', '_') + '_' + str(y) + '_' + time_step + '.csv'
with open(outfile, 'wb') as f:
f.write(r.content)
if __name__ == '__main__':
main()
| [
"ftplib.FTP",
"pandas.read_csv",
"requests.get",
"numpy.isnan",
"glob.os.makedirs",
"glob.os.path.exists"
] | [((257, 280), 'ftplib.FTP', 'FTP', (['"""ftp.tor.ec.gc.ca"""'], {}), "('ftp.tor.ec.gc.ca')\n", (260, 280), False, 'from ftplib import FTP\n'), ((615, 659), 'pandas.read_csv', 'pandas.read_csv', (['(OutDir + filename)'], {'header': '(3)'}), '(OutDir + filename, header=3)\n', (630, 659), False, 'import pandas\n'), ((160, 187), 'glob.os.path.exists', 'glob.os.path.exists', (['OutDir'], {}), '(OutDir)\n', (179, 187), False, 'import glob\n'), ((197, 221), 'glob.os.makedirs', 'glob.os.makedirs', (['OutDir'], {}), '(OutDir)\n', (213, 221), False, 'import glob\n'), ((3551, 3596), 'numpy.isnan', 'np.isnan', (["stats[time_step + ' First Year'][d]"], {}), "(stats[time_step + ' First Year'][d])\n", (3559, 3596), True, 'import numpy as np\n'), ((3605, 3649), 'numpy.isnan', 'np.isnan', (["stats[time_step + ' Last Year'][d]"], {}), "(stats[time_step + ' Last Year'][d])\n", (3613, 3649), True, 'import numpy as np\n'), ((3958, 3986), 'glob.os.path.exists', 'glob.os.path.exists', (['outrep1'], {}), '(outrep1)\n', (3977, 3986), False, 'import glob\n'), ((4004, 4029), 'glob.os.makedirs', 'glob.os.makedirs', (['outrep1'], {}), '(outrep1)\n', (4020, 4029), False, 'import glob\n'), ((4432, 4449), 'requests.get', 'requests.get', (['URL'], {}), '(URL)\n', (4444, 4449), False, 'import requests\n'), ((1745, 1785), 'numpy.isnan', 'np.isnan', (["row[time_step + ' First Year']"], {}), "(row[time_step + ' First Year'])\n", (1753, 1785), True, 'import numpy as np\n'), ((1794, 1833), 'numpy.isnan', 'np.isnan', (["row[time_step + ' Last Year']"], {}), "(row[time_step + ' Last Year'])\n", (1802, 1833), True, 'import numpy as np\n'), ((2182, 2210), 'glob.os.path.exists', 'glob.os.path.exists', (['outrep1'], {}), '(outrep1)\n', (2201, 2210), False, 'import glob\n'), ((2232, 2257), 'glob.os.makedirs', 'glob.os.makedirs', (['outrep1'], {}), '(outrep1)\n', (2248, 2257), False, 'import glob\n'), ((2770, 2787), 'requests.get', 'requests.get', (['URL'], {}), '(URL)\n', (2782, 2787), False, 'import requests\n')] |
from datetime import datetime
from django.utils import timezone
from django import forms
# from django.contrib.admin import widgets
# from django.core.exceptions import ValidationError
from django.shortcuts import get_object_or_404
from leaflet.forms.widgets import LeafletWidget
from tags.widgets import TagAutoSuggestSelect2
from .models import Project, ProjectAsset, Interview, InterviewAsset, LayerGroup, HeritageGISLayer, HeritageAsset,\
Place, AlternatePlaceName, CommonPlaceName
from assets.forms import SecureAssetForm
from geoinfo.forms import GISLayerForm
class ProjectForm(forms.ModelForm):
# Not needed for Heritage app.
# shapefile = forms.FileField(help_text="Upload a spatial data file for this project location.")
class Meta:
model = Project
# fields = '__all__'
fields = ['name', 'picture', 'phase_code', 'start_date', 'end_date', 'location', 'background']
# TODO Come up with a prettier format for materializecss help text.
# Using this here to override ugly help text.
help_texts = {
'phase_code': ""
}
class InterviewForm(forms.ModelForm):
'''
Splits the datetime field into two fields managed by separate materialized widgets:
time_picker, date_picker.
'''
time_picker = forms.TimeField(
label='Time',
initial=timezone.localtime(timezone.now()).time().strftime('%I:%M%p'),
input_formats=[
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30',
'%I:%M%p', # '5:30 AM',
]
)
date_picker = forms.DateField(
label='Date',
initial=timezone.now().date()
)
def __init__(self, *args, **kwargs):
project_pk = kwargs.pop('project_pk', None)
super(InterviewForm, self).__init__(*args, **kwargs)
# set date field to not required, we will set it's value in the clean via the time/date picker values.
self.fields['date'].required = False
if project_pk:
self.fields['phase'].queryset = Project.objects.filter(pk=project_pk)
# Raise 404 if the project doesn't exist.
self.fields['phase'].initial = get_object_or_404(Project, pk=project_pk)
self.project_instance = self.fields['phase'].initial
def clean_phase(self):
data = self.cleaned_data['phase']
if data != self.project_instance:
raise forms.ValidationError("Project must be set to:", str(self.project_instance))
return data
def clean_primary_interviewer(self):
data = self.cleaned_data['primary_interviewer']
if data is None:
raise forms.ValidationError("This field is required")
return data
def clean_date(self):
date = self.cleaned_data.get("date_picker")
time = self.cleaned_data.get("time_picker")
try:
date_and_time = datetime.combine(date, time)
return date_and_time
except TypeError as err:
raise forms.ValidationError("Failed to validate datetime:", str(err))
class Meta:
model = Interview
# fields = '__all__'
fields = ('phase', 'date_picker', 'time_picker', 'date', 'primary_interviewer', 'other_interviewers', 'participant_number',
'community', 'type', 'participants', 'attendees', )
class LayerGroupForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
interview_pk = kwargs.pop('interview_pk', None)
super(LayerGroupForm, self).__init__(*args, **kwargs)
# self.fields['primary_interviewer']
if interview_pk:
self.fields['interview'].queryset = Interview.objects.filter(pk=interview_pk)
# Raise 404 if the project doesn't exist.
self.fields['interview'].initial = get_object_or_404(Interview, pk=interview_pk)
self.interview_instance = self.fields['interview'].initial
def clean_interview(self):
data = self.cleaned_data['interview']
if data != self.interview_instance:
raise forms.ValidationError("Interview must be set to:", str(self.interview_instance))
return data
class Meta:
model = LayerGroup
fields = '__all__'
# Small note: this form requires that a devt project is given when the form
# is instantiated, hides that input on the form, and validates against it.
class HeritageGISLayerForm(GISLayerForm):
group_instance = None # We don't care what the user inputs, we only are interested in the preset development_project_instance.
def __init__(self, *args, **kwargs):
# Restrict project field to the actual project this layer will belong to. This must
# be popped off before the super is called:
group_pk = kwargs.pop('group_pk', None)
super(HeritageGISLayerForm, self).__init__(*args, **kwargs)
if group_pk is not None:
self.fields['group'].queryset = LayerGroup.objects.filter(pk=group_pk)
self.fields['group'].initial = LayerGroup.objects.get(pk=group_pk)
self.group_instance = self.fields['group'].initial # save for later clean_project method
# Double check that the project is what it is supposed to be.
def clean_group(self):
data = self.cleaned_data['group']
if data != self.group_instance:
raise forms.ValidationError("Dataset must be set to:",
str(self.group_instance))
return data
# This little ditty here overrides the form superclass fields
# and adds in the project.
class Meta(GISLayerForm.Meta):
model = HeritageGISLayer
fields = GISLayerForm.Meta.fields + ('group',)
# Asset forms
class HeritageAssetForm(SecureAssetForm):
class Meta:
model = HeritageAsset
fields = SecureAssetForm.Meta.fields
class ProjectAssetForm(SecureAssetForm):
heritage_project_instance = None # We don't care what the user inputs, we only are interested in the preset heritage_project_instance.
def __init__(self, *args, **kwgargs):
# Restrict project field to the actual project this layer will belong to. This must
# be popped off before the super is called:
h_project_id = kwgargs.pop('h_project_id', None)
if h_project_id is None:
raise TypeError('h_project_id is a required kwarg of ProjectAssetForm')
super(ProjectAssetForm, self).__init__(*args, **kwgargs)
self.fields['project'].queryset = Project.objects.filter(id=h_project_id)
self.fields['project'].initial = Project.objects.get(id=h_project_id)
self.heritage_project_instance = self.fields['project'].initial # save for later clean_project method
# Double check that the project is what it is supposed to be.
def clean_project(self):
data = self.cleaned_data['project']
if data != self.heritage_project_instance:
raise forms.ValidationError("Heritage Project must be set to:", str(self.heritage_project_instance))
return data
class Meta(SecureAssetForm.Meta):
model = ProjectAsset
fields = SecureAssetForm.Meta.fields + ('project',)
class InterviewAssetForm(SecureAssetForm):
heritage_interview_instance = None # We don't care what the user inputs, we only are interested in the preset heritage_interview_instance.
def __init__(self, *args, **kwgargs):
# Restrict interview field to the actual interview this layer will belong to. This must
# be popped off before the super is called:
h_interview_id = kwgargs.pop('h_interview_id', None)
if h_interview_id is None:
raise TypeError('h_interview_id is a required kwarg of InterviewAssetForm')
super(InterviewAssetForm, self).__init__(*args, **kwgargs)
self.fields['interview'].queryset = Interview.objects.filter(id=h_interview_id)
self.fields['interview'].initial = Interview.objects.get(id=h_interview_id)
self.heritage_interview_instance = self.fields['interview'].initial # save for later clean_interview method
# Double check that the interview is what it is supposed to be.
def clean_interview(self):
data = self.cleaned_data['interview']
if data != self.heritage_interview_instance:
raise forms.ValidationError("Heritage Interview must be set to:", str(self.heritage_interview_instance))
return data
class Meta(SecureAssetForm.Meta):
model = InterviewAsset
fields = SecureAssetForm.Meta.fields + ('interview',)
class PlaceForm(forms.ModelForm):
class Meta:
model = Place
fields = ('name', 'add_to_community_map', 'notes', 'geometry', 'gazetteer_names', 'place_types')
widgets = {
'geometry': LeafletWidget(),
'gazetteer_names': TagAutoSuggestSelect2(tagmodel='heritage.GazetteerNameTag', attrs={'startText': " "})
}
class PlaceNameForm(forms.ModelForm):
class Meta:
model = None
fields = '__all__'
class AlternatePlaceNameForm(PlaceNameForm):
class Meta(PlaceNameForm.Meta):
model = AlternatePlaceName
class CommonPlaceNameForm(PlaceNameForm):
class Meta(PlaceNameForm.Meta):
model = CommonPlaceName
| [
"tags.widgets.TagAutoSuggestSelect2",
"django.shortcuts.get_object_or_404",
"django.forms.ValidationError",
"django.utils.timezone.now",
"leaflet.forms.widgets.LeafletWidget",
"datetime.datetime.combine"
] | [((2194, 2235), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Project'], {'pk': 'project_pk'}), '(Project, pk=project_pk)\n', (2211, 2235), False, 'from django.shortcuts import get_object_or_404\n'), ((2670, 2717), 'django.forms.ValidationError', 'forms.ValidationError', (['"""This field is required"""'], {}), "('This field is required')\n", (2691, 2717), False, 'from django import forms\n'), ((2910, 2938), 'datetime.datetime.combine', 'datetime.combine', (['date', 'time'], {}), '(date, time)\n', (2926, 2938), False, 'from datetime import datetime\n'), ((3825, 3870), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Interview'], {'pk': 'interview_pk'}), '(Interview, pk=interview_pk)\n', (3842, 3870), False, 'from django.shortcuts import get_object_or_404\n'), ((8828, 8843), 'leaflet.forms.widgets.LeafletWidget', 'LeafletWidget', ([], {}), '()\n', (8841, 8843), False, 'from leaflet.forms.widgets import LeafletWidget\n'), ((8876, 8966), 'tags.widgets.TagAutoSuggestSelect2', 'TagAutoSuggestSelect2', ([], {'tagmodel': '"""heritage.GazetteerNameTag"""', 'attrs': "{'startText': ' '}"}), "(tagmodel='heritage.GazetteerNameTag', attrs={\n 'startText': ' '})\n", (8897, 8966), False, 'from tags.widgets import TagAutoSuggestSelect2\n'), ((1650, 1664), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1662, 1664), False, 'from django.utils import timezone\n'), ((1385, 1399), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1397, 1399), False, 'from django.utils import timezone\n')] |
from hooks.aws.aws_base_hook import AWSBaseHook
from botocore.exceptions import ClientError
from logger.log import Logger
import json
class LambdaHook(AWSBaseHook):
def __init__(self, aws_property):
self.client = self.get("lambda", aws_property, AWSBaseHook.Client)
def create_function(self, function_name, run_time, role, handler, code_bucket, code_key):
try:
return self.client.create_function(
FunctionName=function_name,
Runtime=run_time,
Role=role,
Handler=handler,
Code={
"S3Bucket": code_bucket,
"S3Key": code_key
},
Publish=True
)
except ClientError as e:
Logger.logger.exception("Couldn't create Lambda function. Error: " + str(e))
def invoke(self, function_name, payload: str, data):
try:
payload_json = json.loads(payload)
payload_json["data"] = data
return self.client.invoke(
FunctionName=function_name,
Payload=json.dumps(payload_json).encode('utf-8'),
Qualifier="$LATEST"
)
except ClientError as e:
Logger.logger.exception("Couldn't invoke Lambda function. Error: " + str(e))
| [
"json.loads",
"json.dumps"
] | [((962, 981), 'json.loads', 'json.loads', (['payload'], {}), '(payload)\n', (972, 981), False, 'import json\n'), ((1129, 1153), 'json.dumps', 'json.dumps', (['payload_json'], {}), '(payload_json)\n', (1139, 1153), False, 'import json\n')] |
from sqlalchemy.orm import Session
from sqlalchemy.orm.exc import NoResultFound
from uxi_celery_scheduler.data_models import ScheduledTask
from uxi_celery_scheduler.db.models import CrontabSchedule, PeriodicTask
from uxi_celery_scheduler.exceptions import PeriodicTaskNotFound
def schedule_task(
session: Session,
scheduled_task: ScheduledTask,
) -> PeriodicTask:
"""
Schedule a task by adding a periodic task entry.
"""
schedule = CrontabSchedule(**scheduled_task.schedule.dict())
task = PeriodicTask(
crontab=schedule,
name=scheduled_task.name,
task=scheduled_task.task,
)
session.add(task)
return task
def update_task_enabled_status(
session: Session,
enabled_status: bool,
periodic_task_id: int,
) -> PeriodicTask:
"""
Update task enabled status (if task is enabled or disabled).
"""
try:
task = session.query(PeriodicTask).get(periodic_task_id)
task.enabled = enabled_status
session.add(task)
except NoResultFound as e:
raise PeriodicTaskNotFound() from e
return task
def update_task(
session: Session,
scheduled_task: ScheduledTask,
periodic_task_id: int,
) -> PeriodicTask:
"""
Update the details of a task including the crontab schedule
"""
try:
task = session.query(PeriodicTask).get(periodic_task_id)
schedule = CrontabSchedule(**scheduled_task.schedule.dict())
task.crontab = schedule
task.name = scheduled_task.name
task.task = scheduled_task.task
session.add(task)
except NoResultFound as e:
raise PeriodicTaskNotFound() from e
return task
def delete_task(session: Session, periodic_task_id: int) -> PeriodicTask:
try:
task = session.query(PeriodicTask).get(periodic_task_id)
session.delete(task)
return task
except NoResultFound as e:
raise PeriodicTaskNotFound() from e
| [
"uxi_celery_scheduler.db.models.PeriodicTask",
"uxi_celery_scheduler.exceptions.PeriodicTaskNotFound"
] | [((520, 607), 'uxi_celery_scheduler.db.models.PeriodicTask', 'PeriodicTask', ([], {'crontab': 'schedule', 'name': 'scheduled_task.name', 'task': 'scheduled_task.task'}), '(crontab=schedule, name=scheduled_task.name, task=\n scheduled_task.task)\n', (532, 607), False, 'from uxi_celery_scheduler.db.models import CrontabSchedule, PeriodicTask\n'), ((1066, 1088), 'uxi_celery_scheduler.exceptions.PeriodicTaskNotFound', 'PeriodicTaskNotFound', ([], {}), '()\n', (1086, 1088), False, 'from uxi_celery_scheduler.exceptions import PeriodicTaskNotFound\n'), ((1643, 1665), 'uxi_celery_scheduler.exceptions.PeriodicTaskNotFound', 'PeriodicTaskNotFound', ([], {}), '()\n', (1663, 1665), False, 'from uxi_celery_scheduler.exceptions import PeriodicTaskNotFound\n'), ((1934, 1956), 'uxi_celery_scheduler.exceptions.PeriodicTaskNotFound', 'PeriodicTaskNotFound', ([], {}), '()\n', (1954, 1956), False, 'from uxi_celery_scheduler.exceptions import PeriodicTaskNotFound\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.