id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
74956 | # -*- coding: utf-8 -*-
u'''
This script evaluates the performance of the following outlier detection method:
- Consensus Regularized Multi-View Outlier Detection (CMOD)
- DMOD
- HOAD
Arguments:
-c, --config: JSON file with the information required to insert data
-N, --datasetName: name of the imported dataset
-D, --dbms: Database management system used to import data (Oracle or MongoDB).
-f, --featuresImage: extracted features from image dataset. e.g -f "{'cnn':'AlexNet', 'layer':'Visual'}"
-m, --method: coma-separated list with the outlier detection methods to test (either CMOD, DMOD or HOAD)
-p, --params: string on JSON format with the method parameters and their values. e.g. -p "{'k':2, 'sigma':.1, 'm':1}"
Created on 26/2/2018
@author: <NAME> (<EMAIL>)
@Institution: Computer Vision Center - Universitat Autonoma de Barcelona
'''
__author__ = '<NAME>'
__email__ = '<EMAIL>'
import ast
import getopt
import glob
import json
import os.path
import sys
from pprint import pprint
from scipy.spatial import distance as dist
from sklearn import metrics
from OutlierDetector.CMOD import CMOD
from OutlierDetector.DMOD import DMOD
from OutlierDetector.HOAD import HOAD
from processData import config as cfg, mongoConnection as mg, oracleConnection as orcl, noConnection as nc
from processData.datasetInfo import datasetInfo as dsi
def getConf( confDict ):
confString = confDict.split(" ")
conf = []
for c in confString:
conf.append(tuple([int(x) for x in c.replace("(", "").replace(")", "").split(",")]))
return conf
def loadData(dbms, nameDataset, params={}):
'''
Description: load data from the DBMS given the parameters passed by argument
:param dbms: object with the connection to the DB
:param nameDataset:
:param params:
:return: list of rows with the id of the image and the feature vector as numpy array
'''
data = dsi(nameDataset)
if "classList" in params:
data.classList = params["classList"]
else:
data.classList = []
if "layers" in params:
data.layers = params["layers"]
else:
data.layers = ["Visual"]
if "cnn" in params:
data.layers = params["cnn"]
else:
data.layers = ""
data.type = dbms.getDatasetType(nameDataset)
data.features, data.classIds = dbms.loadData(nameDataset, data)
return data
if __name__ == '__main__':
# read commandline arguments, first
fullCmdArguments = sys.argv
#classPath = "jars"
dir_path = os.path.dirname(os.path.realpath(__file__))
#jarFiles = glob.glob(dir_path + '/' + classPath + '/' + '*.jar')
"""
Database default parameters
"""
DBMS = ""
methods = ["DMOD", "CMOD", "HOAD"]
numViews = 2
params = {}
datasetName = "Synthetic Data"
params["numSamples"] = 200
isConfigFile = False
unixOptions = "hvc:f:N:m:D:p:"
gnuOptions = ["help", "verbose", "config_file=", "datasetName", "featuresImage=","method=", "dbms=", "params="]
try:
arguments, values = getopt.getopt(fullCmdArguments[1:], unixOptions, gnuOptions)
except getopt.error as err:
# output error, and return with an error code
print (str(err))
sys.exit(2)
# evaluate given options
for currentArgument, currentValue in arguments:
if currentArgument in ("-v", "--verbose"):
print ("enabling verbose mode")
elif currentArgument in ("-h", "--help"):
print (__doc__)
sys.exit(0)
# print ("displaying help")
elif currentArgument in ("-c", "--config_file"):
configFile = currentValue
isConfigFile = True
elif currentArgument in ("-D", "--dbms"):
DBMS = currentValue
elif currentArgument in ("-m", "--method"):
method = currentValue
elif currentArgument in ("-N", "--datasetName"):
datasetName = currentValue.lower()
elif currentArgument in ("-p", "--params"):
paramsMethod = ast.literal_eval(currentValue)
elif currentArgument in ("-f", "--featuresImage"):
featuresImage = ast.literal_eval(currentValue)
if isConfigFile:
with open(configFile) as f:
data = json.load(f)
if DBMS.lower() == "":
db = nc.noConnexion()
if DBMS.lower() == "oracle":
db = orcl.oracleConnexion(cfg.config(data["dbms"][DBMS.lower()]))
if DBMS.lower() == "mongodb":
db = mg.mongoConnexion(cfg.config(data["dbms"][DBMS.lower()]))
"""Iniciem la sessio"""
db.startSession()
"""Carreguem les dades dels datasets guardats a la BD"""
dataInfo = loadData(db, datasetName, params)
"""---"""
paramNames = []
if "data" in locals():
if "numIterations" in data:
numRepeticions = int(data['numIterations'])
else:
numRepeticions = 2
if "conf" in data:
confList = getConf(data['conf'])
else:
confList = [(2,0)]
else:
numRepeticions = 2
confList = [(2,0)]
for conf in confList:
"""Inicialitzem"""
if method.upper() == "DMOD":
od = DMOD(numViews)
if method.upper() == "CMOD":
od = CMOD(numViews)
if method.upper() == "HOAD":
od = HOAD(numViews)
for i in range(numRepeticions):
"""Per a cada repetició hem de generar els outliers del dataset """
print("""
==================================================================
Iniciant repetició {}
""".format(i))
newFeatures, y, outliersGTIdx = od.prepareExperimentData(db, conf, datasetName, dataInfo, i, settings={'numViews':numViews})
idExperiment = db.insertExperiment(conf, i, method, paramsMethod)
outliersIdx = od.detector(newFeatures, paramsMethod )
"""Calculem les mètriques d'avaluació"""
# Evaluate Outliers
fpr, tpr, thresholds = metrics.roc_curve(y, outliersIdx, pos_label=1)
auc = metrics.auc(tpr, fpr)
"""Inserim els resultats a la BD """
db.insertResults(datasetName, idExperiment, fpr, tpr, auc, dataInfo)
"""Mostrem els resultats per consola"""
valorsStr = "{}: {}".format(dataInfo, method)
for key in paramsMethod:
valorsStr = valorsStr + ", {}={}".format(key, paramsMethod[key])
valorsStr = valorsStr + ", {}-{} (repeticio {}): %.3f".format(conf[0],conf[1],i) %(auc)
print(valorsStr)
db.close()
print("Experiments fets")
sys.exit(0) | StarcoderdataPython |
1705065 | from unittest import TestCase
from regulations.generator.layers import tree_builder
from regulations.generator.node_types import REGTEXT
import itertools
class TreeBuilderTest(TestCase):
def build_tree(self):
child = {
'text': 'child text',
'children': [],
'label_id': '204-3',
'label': ['204', '3'],
'node_type': REGTEXT
}
tree = {
'text': 'parent text',
'children': [child],
'label_id': '204',
'label': ['204'],
'node_type': REGTEXT
}
return tree
def test_build_tree_hash(self):
tree = self.build_tree()
tree['children'][0]['children'] = [{
'text': 'child child test',
'children': [],
'label_id': '204-3-a',
'label': ['204', '3', 'a'],
'node_type': REGTEXT
}]
tree_hash = tree_builder.build_tree_hash(tree)
self.assertEqual(set(tree_hash.keys()),
set(['204-3-a', '204-3', '204']))
def test_parent_in_tree(self):
tree = self.build_tree()
tree_hash = tree_builder.build_tree_hash(tree)
self.assertEqual(tree_hash.keys(), ['204-3', '204'])
self.assertTrue(tree_builder.parent_in_tree('204-3', tree_hash))
def test_add_node(self):
new_node = {
'text': 'new node text',
'children': [],
'label_id': '204-4',
'label': ['204', '4'],
'node_type': REGTEXT
}
tree = self.build_tree()
tree_hash = tree_builder.build_tree_hash(tree)
self.assertEqual(tree_hash.keys(), ['204-3', '204'])
self.assertEqual(len(tree_hash['204']['children']), 1)
tree_builder.add_node_to_tree(new_node, '204', tree_hash)
self.assertEqual(len(tree_hash['204']['children']), 2)
child_labels = [c['label_id'] for c in tree_hash['204']['children']]
self.assertEqual(child_labels, ['204-3', '204-4'])
def test_make_label_sortable_roman(self):
label = "iv"
sortable = tree_builder.make_label_sortable(label, roman=True)
self.assertEquals(sortable, (4,))
def test_make_label_sortable_not_roman(self):
label = "iv"
sortable = tree_builder.make_label_sortable(label)
self.assertEquals(sortable, ('iv',))
def test_parent_label(self):
node = {'node_type': 'REGTEXT', 'label': ['204', 'a', '1', 'ii']}
parent_label = tree_builder.parent_label(node)
self.assertEquals(['204', 'a', '1'], parent_label)
node = {'node_type': 'INTERP', 'label': ['204', 'Interp']}
parent_label = tree_builder.parent_label(node)
self.assertEquals(['204'], parent_label)
node = {'node_type': 'INTERP', 'label': ['204', '2', 'Interp']}
parent_label = tree_builder.parent_label(node)
self.assertEquals(['204', 'Interp'], parent_label)
node = {'node_type': 'INTERP', 'label': ['204', '2', 'a', 'Interp']}
parent_label = tree_builder.parent_label(node)
self.assertEquals(['204', '2', 'Interp'], parent_label)
node = {'node_type': 'INTERP',
'label': ['204', '2', 'Interp', '1']}
parent_label = tree_builder.parent_label(node)
self.assertEquals(['204', '2', 'Interp'], parent_label)
node = {'node_type': 'INTERP',
'label': ['204', '2', 'Interp', '1', 'i']}
parent_label = tree_builder.parent_label(node)
self.assertEquals(['204', '2', 'Interp', '1'], parent_label)
def test_roman_nums(self):
first_five = list(itertools.islice(tree_builder.roman_nums(), 0, 5))
self.assertEquals(['i', 'ii', 'iii', 'iv', 'v'], first_five)
def test_add_child(self):
tree = self.build_tree()
child = {
'children': [],
'label': ['204', '2'],
'label_id': '204-2',
'node_type': REGTEXT,
'sortable': (2,),
'text': 'child text',
}
static_child = {
'children': [],
'label': ['204', '3'],
'label_id': '204-3',
'node_type': REGTEXT,
'sortable': (3,),
'text': 'child text',
}
static_tree = {
'children': [child, static_child],
'text': 'parent text',
'label': ['204'],
'label_id': '204',
'node_type': REGTEXT
}
tree_builder.add_child(tree, child)
self.assertEquals(static_tree, tree)
def test_add_child_appendix(self):
parent = {'children': [
{'node_type': 'APPENDIX', 'label': ['204', 'A', '1']},
{'node_type': 'APPENDIX', 'label': ['204', 'A', '3']},
]}
child_to_add = {'node_type': 'APPENDIX', 'label': ['204', 'A', '2(a)']}
tree_builder.add_child(parent, child_to_add)
self.assertEquals(
['204-A-1', '204-A-2(a)', '204-A-3'],
['-'.join(c['label']) for c in parent['children']]
)
def test_add_child_interp(self):
parent = {'children': [
{'node_type': 'INTERP', 'label': ['204', '4', 'Interp']},
{'node_type': 'INTERP', 'label': ['204', '2', 'Interp']}
], 'label': ['204', 'Interp']}
tree_builder.add_child(parent, {'node_type': 'INTERP',
'label': ['204', '3', 'Interp']})
self.assertEqual([(2, 2), (2, 3), (2, 4)],
[c['sortable'] for c in parent['children']])
prefix = ['204', '4', 'a', '2']
parent = {'children': [
{'node_type': 'INTERP', 'label': prefix + ['v', 'Interp']},
{'node_type': 'INTERP', 'label': prefix + ['iv', 'Interp']}
], 'label': ['204', '4', 'a', '2', 'Interp']}
tree_builder.add_child(parent, {'node_type': 'INTERP',
'label': prefix + ['ix', 'Interp']})
self.assertEqual([(4,), (5,), (9,)],
[c['sortable'] for c in parent['children']])
prefix = ['204', '4', 'Interp']
parent = {'children': [
{'node_type': 'INTERP', 'label': prefix + ['1']},
{'node_type': 'INTERP', 'label': prefix + ['3']}
], 'label': ['204', 'Interp']}
tree_builder.add_child(parent, {'node_type': 'INTERP',
'label': prefix + ['2']})
self.assertEqual([(2, 1), (2, 2), (2, 3)],
[c['sortable'] for c in parent['children']])
prefix = ['204', 'Interp', '2']
parent = {'children': [
{'node_type': 'INTERP', 'label': prefix + ['v']},
{'node_type': 'INTERP', 'label': prefix + ['iv']}
], 'label': prefix}
tree_builder.add_child(parent, {'node_type': 'INTERP',
'label': prefix + ['ix']})
self.assertEqual([(4,), (5,), (9,)],
[c['sortable'] for c in parent['children']])
def test_add_child_root_interp(self):
""" Let's add an introductory paragraph child to a root interpretation
node and ensure that the children are sorted correctly. """
parent = {'children': [
{'node_type': 'INTERP', 'label': ['204', '4', 'Interp']},
{'node_type': 'INTERP', 'label': ['204', '2', 'Interp']}
], 'label': ['204', 'Interp']}
tree_builder.add_child(parent, {'node_type': 'INTERP',
'label': ['204', 'Interp', 'h1']})
self.assertEqual([(1, 'h', 1), (2, 2), (2, 4)],
[c['sortable'] for c in parent['children']])
def test_add_child_odd_sort(self):
"""Appendices may have some strange orderings. Make sure they keep
order."""
parent = {'children': [], 'label': ['204', 'A'],
'node_type': 'appendix',
'child_labels': ['204-A-p1', '204-A-X', '204-A-L',
'204-A-h1']}
def mknode(label):
return {'node_type': 'appendix', 'label': label.split('-')}
tree_builder.add_child(parent, mknode('204-A-X'))
tree_builder.add_child(parent, mknode('204-A-L'))
tree_builder.add_child(parent, mknode('204-A-h1'))
tree_builder.add_child(parent, mknode('204-A-p1'))
self.assertEqual([['204', 'A', 'p1'], ['204', 'A', 'X'],
['204', 'A', 'L'], ['204', 'A', 'h1']],
[c['label'] for c in parent['children']])
def test_all_children_are_roman(self):
parent_node = {'children': [
{'label': ['201', '4', 'i']},
{'label': ['201', '4', 'ii']},
{'label': ['201', '4', 'iii']},
{'label': ['201', '4', 'iv']},
{'label': ['201', '4', 'v']},
]}
self.assertTrue(tree_builder.all_children_are_roman(parent_node))
parent_node = {'children': [
{'label': ['201', '4', 'a']},
{'label': ['201', '4', 'i']},
{'label': ['201', '4', 'v']},
]}
self.assertFalse(tree_builder.all_children_are_roman(parent_node))
def test_add_child_root_appendix(self):
""" Let's add an introductory paragraph child to a root interpretation
node and ensure that the children are sorted correctly. """
parent = {'children': [
{'node_type': 'appendix', 'label': ['204', 'A', '4', 'b', 'i']},
{'node_type': 'appendix', 'label': ['204', 'A', '4', 'b', 'v']}
], 'label': ['204', 'appendix']}
tree_builder.add_child(parent, {'node_type': 'appendix',
'label': ['204', 'A', '4', 'b', 'ii']})
self.assertEqual([(1,), (2,), (5,)],
[c['sortable'] for c in parent['children']])
parent = {'children': [
{'node_type': 'appendix', 'label': ['204', 'A', '4', 'b']},
{'node_type': 'appendix', 'label': ['204', 'A', '4', 'i']}
], 'label': ['204', 'appendix']}
tree_builder.add_child(parent, {'node_type': 'appendix',
'label': ['204', 'A', '4', 'g']})
self.assertEqual([('b',), ('g',), ('i',)],
[c['sortable'] for c in parent['children']])
| StarcoderdataPython |
1680823 | <gh_stars>10-100
"""An example of solving a reinforcement learning problem by using evolution to
tune the weights of a neural network."""
import os
import sys
import gym
from gym import spaces
from matplotlib import pyplot as plt
import numpy as np
from leap_ec import Individual, Representation, test_env_var
from leap_ec import probe, ops
from leap_ec.algorithm import generational_ea
from leap_ec.executable_rep import problems, executable, neural_network
from leap_ec.int_rep.ops import individual_mutate_randint
from leap_ec.real_rep.initializers import create_real_vector
from leap_ec.real_rep.ops import mutate_gaussian
##############################
# Function build_probes()
##############################
def build_probes(genomes_file):
"""Set up probes for writings results to file and terminal and
displaying live metric plots."""
assert(genomes_file is not None)
probes = []
# Print fitness stats to stdout
probes.append(probe.FitnessStatsCSVProbe(stream=sys.stdout))
# Save genome of the best individual to a file
probes.append(probe.AttributesCSVProbe(
stream=genomes_file,
best_only =True,
do_fitness=True,
do_genome=True))
# Open a figure to plot a fitness curve to
plt.figure()
plt.ylabel("Fitness")
plt.xlabel("Generations")
plt.title("Best-of-Generation Fitness")
probes.append(probe.FitnessPlotProbe(
ylim=(0, 1), xlim=(0, 1),
modulo=1, ax=plt.gca()))
# Open a figure to plot the best-of-gen network graph to
plt.figure()
probes.append(neural_network.GraphPhenotypeProbe(
modulo=1, ax=plt.gca(),
weights=True, weight_multiplier=3.0))
return probes
##############################
# Entry point
##############################
if __name__ == '__main__':
# Parameters
runs_per_fitness_eval = 5
simulation_steps = 500
pop_size = 5
num_hidden_nodes = 4
mutate_std = 0.05
gui = False # Change to true to watch the cart-pole visualization
# When running the test harness, just run for two generations
# (we use this to quickly ensure our examples don't get bitrot)
if os.environ.get(test_env_var, False) == 'True':
generations = 2
else:
generations = 1000
# Load the OpenAI Gym simulation
environment = gym.make('CartPole-v0')
# Representation
num_inputs = 4
num_actions = environment.action_space.n
# Decode genomes into a feed-forward neural network,
# but also wrap an argmax around the networks so their
# output is a single integer
decoder = executable.WrapperDecoder(
wrapped_decoder=neural_network.SimpleNeuralNetworkDecoder(
shape=(num_inputs, num_hidden_nodes, num_actions)
),
decorator=executable.ArgmaxExecutable)
with open('./genomes.csv', 'w') as genomes_file:
ea = generational_ea(max_generations=generations, pop_size=pop_size,
# Solve a problem that executes agents in the
# environment and obtains fitness from it
problem=problems.EnvironmentProblem(
runs_per_fitness_eval, simulation_steps, environment, 'reward', gui=gui),
representation=Representation(
initialize=create_real_vector(bounds=([[-1, 1]]*decoder.wrapped_decoder.length)),
decoder=decoder),
# The operator pipeline.
pipeline=[
ops.tournament_selection,
ops.clone,
mutate_gaussian(std=mutate_std, hard_bounds=(-1, 1), expected_num_mutations=1),
ops.evaluate,
ops.pool(size=pop_size),
*build_probes(genomes_file) # Inserting all the probes at the end
])
list(ea)
| StarcoderdataPython |
63029 | <gh_stars>0
import socket
import sys
import time
import pigpio
from threading import Thread
import os
host = '192.168.1.64'
port = 80
red = 27#17#22
green = 17#27
blue = 22#17
BOWL = 'Empty'
ROOM = b'Mild'
MODE = 'TEMP'
DEPTH = b'Empty'
p = pigpio.pi()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def reset():
rl = p.set_PWM_dutycycle(red, 0)
gl = p.set_PWM_dutycycle(green, 0)
bl = p.set_PWM_dutycycle(blue, 0)
p.stop()
def rd(br):
rl = p.set_PWM_dutycycle(red, br)
def gr(br):
gl = p.set_PWM_dutycycle(green, br)
def bu(br):
bl = p.set_PWM_dutycycle(blue, br)
CALI = False
try:
s.bind((host, port))
except socket.error as msg:
print("Failed to bind. Error:", str(msg))
try:
port += 1
s.bind((host, port))
except socket.error as msg:
print("Failed to bind. Error:", str(msg))
sys.exit()
def temp_change():
while MODE == 'TEMP':
with os.popen('vcgencmd measure_temp') as measurement:
cel = measurement.read().strip()
degrees = int(cel[5:len(cel)-4])-15
percent = (degrees/26)
if CALI == False:
if percent >= 0.9:
rd(229)
gr(0)
bu(5)
ROOM = b'Hot'
elif percent >= 0.8:
rd(231)
gr(47)
bu(4)
ROOM = b'Warm'
elif percent >= 0.7:
rd(234)
gr(98)
bu(9)
ROOM = b'Moderate'
elif percent >= 0.6:
rd(236)
gr(148)
bu(14)
ROOM = b'Mild'
elif percent >= 0.5:
rd(239)
gr(198)
bu(50)
ROOM = b'Cool'
elif percent >= 0.4:
rd(237)
gr(242)
bu(100)
ROOM = b'Cold'
elif percent >= 0.3:
rd(229)
gr(252)
bu(150)
ROOM = b'Cold'
else:
rd(150)
gr(150)
bu(200)
ROOM = b'Cold'
print(ROOM)
time.sleep(5)
print("Host: ", host, " Port: ", port)
Thread(target=temp_change).start()
s.listen(100)
rd(0)
gr(0)
bu(0)
while 1:
conn, addr = s.accept()
print("Connected with ", addr[0], ":", addr[1])
data = conn.recv(1024)
#Connect to the ultrasonic sensor (Bowl sensor)
if data == b'I am an ultrasonic sensor':
time.sleep(1) #Pause to wait for incoming instruction
while 1: #Now listen for data
try: #Try and receive an instruction
data = conn.recv(1024)
except ConnectionResetError: #If no instruction was received
break #Ignore operation and carry on as normal
if not data: #Again, if no instruction was received
break
if data == b'I am calibrating': #The bowl is calibrating its depth
#Full PURPLE
CALI = True
gr(0)
bu(255)
rd(255)
print("Calibrating")
elif data == b'Fill bowl': #User must then fill the bowl with water
#Full YELLOW
CALI = True
bu(10)
rd(255)
gr(100)
print("Fill bowl")
elif data == b'CHECK': #When the client completes a check
CALI = False
print("Calibration done")
break #Nothing to do. We just wait until it connects again.
if MODE == 'BOWL':
if data == b'Full': #Interactive lightning shows bowl depth on hub
#Full GREEN
rd(0)
bu(0)
gr(255)
DEPTH = b'Full'
elif data == b'Low': #****
#ORANGE
rd(255)
gr(120)
DEPTH = b'Low'
elif data == b'Empty': #****
#Full RED
rd(255)
gr(0)
DEPTH = b'Empty'
elif data == b'Done': #When the client completes calibration
#Full Green
rd(0)
bu(0)
gr(55)
break
print("Depth ", DEPTH)
elif data == b'mode bowl':
MODE = 'BOWL'
print("Mode changed to bowl")
elif data == b'mode temp':
MODE = 'TEMP'
print("Mode changed to temp")
CALI = False
Thread(target=temp_change).start()
elif data == b'give temp':
time.sleep(1)
conn.send(b'temp '+ROOM)
print(ROOM)
elif data == b'give bowl':
time.sleep(1)
conn.send(b'bowl '+DEPTH)
print(DEPTH)
elif not data or data == b'close':
conn.close()
s.close()
reset()
sys.exit()
try:
conn.close()
s.close()
reset()
except:
pass
| StarcoderdataPython |
3220834 | import os
import sys
import pygame
import pygame_gui
# setting path
sys.path.append(os.path.join("."))
from pysimgame.utils.gui_utils import UI_TOGGLEBUTTON_TOGGLED, UIToggleButton
pygame.init()
pygame.display.set_caption("Quick Start")
window_surface = pygame.display.set_mode((1400, 1000))
manager = pygame_gui.UIManager(
(1400, 1000),
theme_path=os.path.join(os.path.dirname(__file__), "theme.json"),
)
background = pygame.Surface((1400, 1000))
background.fill(manager.ui_theme.get_colour("dark_bg"))
toggle_button = UIToggleButton(
pygame.Rect(200, 100, 150, 50), "toggle", manager
)
hello_button = pygame_gui.elements.UIButton(
relative_rect=pygame.Rect((0, 0), (150, 40)),
text="Hello",
manager=manager,
)
clock = pygame.time.Clock()
is_running = True
while is_running:
time_delta = clock.tick(60) / 1000.0
for event in pygame.event.get():
if event.type == pygame.QUIT:
is_running = False
if (
event.type == pygame.USEREVENT
and event.user_type == pygame_gui.UI_BUTTON_PRESSED
and event.ui_element == hello_button
):
print("Hello World!")
if (
event.type == pygame.USEREVENT
and event.ui_element == toggle_button
):
print(event)
if (
event.type == pygame.USEREVENT
and event.user_type == UI_TOGGLEBUTTON_TOGGLED
):
print(event)
print("Toggled ", event.value)
##if event.type == pygame.USEREVENT:
# print(event)
manager.process_events(event)
manager.update(time_delta)
window_surface.blit(background, (0, 0))
manager.draw_ui(window_surface)
# print(hello_button.hovered, menu.hovered)
# print(hello_button.layer, menu.layer)
# print(hello_button.alive(), menu.layer)
#
pygame.display.update()
| StarcoderdataPython |
3204287 | <reponame>fitoprincipe/gee-composite<filename>geecomposite/widgets/export.py<gh_stars>1-10
from geetools import batch
from ipywidgets import *
class toAsset(VBox):
def __init__(self, **kwargs):
super(toAsset, self).__init__(**kwargs)
layout = Layout(width='500px')
self.bapwidget = kwargs.get('bapwidget')
self.root = kwargs.get('root', '')
self.scale = Text(description='scale', value='10')
self.destination = Select(description='Destination',
options=['ImageCollection', 'Folder'],
value='ImageCollection',
layout=layout)
self.folder = Text(description='Path to the ImageCollection',
value=self.root,
layout=layout,
style = {'description_width': 'initial'})
self.nameSub = Text(description='Name of/for the ImageCollection',
layout=layout,
style = {'description_width': 'initial'})
self.name = Text(description='Name for the Image',
layout=layout,
style = {'description_width': 'initial'})
self.exportB = Button(description='Export')
self.bands = SelectMultiple(description='Bands', layout=layout)
self.bands.observe(self.observeBands)
self.exportB.on_click(self.export)
self.destination.observe(self.observeDestination)
self.children = [self.destination, self.folder, self.nameSub,
self.name, self.bands, self.exportB]
def observeDestination(self, v):
if v['name'] == 'value':
value = v['new']
self.folder.description = "Path to the {}".format(value)
self.nameSub.description = "Name of/for the {}".format(value)
def observeBands(self, v):
extra = ['col_id', 'date', 'score']
if v['name'] == 'options':
bands = list(v['new'])
condition = all([b in bands for b in extra])
if not condition:
self.bands.options = bands+extra
def getAssetPath(self):
return "{}/{}".format(self.folder.value, self.nameSub.value)
def getAssetId(self):
return "{}/{}".format(self.getAssetPath(), self.name.value)
def export(self, v=None):
bands = self.bands.value
composite = self.bapwidget.composite().select(bands)
batch.Export.image.toAsset(composite, self.getAssetPath(),
self.name.value, self.destination.value,
float(self.scale.value),
self.bapwidget.site_widget.getRegion())
| StarcoderdataPython |
3280840 | from django.contrib import admin
from user.models import User
# Register your models here.
admin.site.register(User) | StarcoderdataPython |
1631937 | <filename>files/models.py<gh_stars>1-10
from django.db import models
from django.utils import timezone
# Create your models here.
class Photo(models.Model):
"""
Photos Table
"""
client = models.OneToOneField("home.Client", on_delete=models.CASCADE)
passport = models.ImageField("Passport Size", blank=True, upload_to="client/%Y/%m/%d/")
full = models.ImageField("Full Size", blank=True, upload_to="client/%Y/%m/%d/")
others = models.ImageField(upload_to="client/%Y/%m/%d/", blank=True)
created = models.DateTimeField(auto_now=True)
def __str__(self):
return self.client.first_name
class ClearanceFile(models.Model):
"""
Clearance Files Table
"""
title = models.CharField(max_length=100, unique=True)
clearance_no = models.CharField("Clearance Number", max_length=100, unique=True)
submission_date = models.DateTimeField(default=timezone.now)
clearance_date = models.DateTimeField(default=timezone.now)
client_count = models.IntegerField(default=0)
description = models.TextField(null=True, blank=True)
document = models.FileField(upload_to='clients/%Y/%m/%d/', blank=True)
created = models.DateField(default=timezone.now)
def __str__(self):
return self.title
class ClientFile(models.Model):
"""
Client Files Table
"""
client = models.ForeignKey("home.Client", on_delete=models.CASCADE)
title = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
document = models.FileField(upload_to='clients/%Y/%m/%d/', blank=True)
created = models.DateField(default=timezone.now)
def __str__(self):
return self.title | StarcoderdataPython |
178695 | <filename>run_placesCNN_basic.py
# PlacesCNN for scene classification
#
# by <NAME>
# last modified by <NAME>, Dec.27, 2017 with latest pytorch and torchvision (upgrade your torchvision please if there is trn.Resize error)
import torch
from torch.autograd import Variable as V
import torchvision.models as models
from torchvision import transforms as trn
from torch.nn import functional as F
import os
from PIL import Image
import csv
import pandas as pd
# th architecture to use
arch = 'resnet50'
# load the pre-trained weights
model_file = '%s_places365.pth.tar' % arch
if not os.access(model_file, os.W_OK):
weight_url = 'http://places2.csail.mit.edu/models_places365/' + model_file
os.system('wget ' + weight_url)
model = models.__dict__[arch](num_classes=365)
checkpoint = torch.load(model_file, map_location=lambda storage, loc: storage)
state_dict = {str.replace(k,'module.',''): v for k,v in checkpoint['state_dict'].items()}
model.load_state_dict(state_dict)
model.eval()
# load the image transformer
centre_crop = trn.Compose([
trn.Resize((256,256)),
trn.CenterCrop(224),
trn.ToTensor(),
trn.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
# load the class label
file_name = 'categories_places365.txt'
if not os.access(file_name, os.W_OK):
synset_url = 'https://raw.githubusercontent.com/csailvision/places365/master/categories_places365.txt'
os.system('wget ' + synset_url)
classes = list()
with open(file_name) as class_file:
for line in class_file:
classes.append(line.strip().split(' ')[0][3:])
classes = tuple(classes)
"""
Folder structure:
content
|
+-- downloaded_videos
|
+-- frames
| |
| +-- folder with frames inside
|
+-- places365-nb_colab
| |
| +-- run_placesCNN_basic.py
"""
frame_folder = os.path.join(os.getcwd(), 'frames') #if only the file is uploaded
final_predictions = list()
video_df_colnames = pd.concat([pd.Series(['media_id', 'frame_nr']), pd.Series(classes)])
for video_folder in os.listdir('frames'):
media_id = video_folder
for frame in os.listdir(os.path.join(frame_folder, video_folder)):
frame_nr = frame[frame.rfind('_')+1:-4]
img_name = os.path.join(frame_folder, video_folder, frame)
img = Image.open(img_name)
input_img = V(centre_crop(img).unsqueeze(0))
# forward pass
logit = model.forward(input_img)
h_x = F.softmax(logit, 1).data.squeeze()
probs, idx = h_x.sort(0, True)
probs_as_list = probs.squeeze().tolist()
final_predictions.append([media_id, frame_nr]+probs_as_list)
df = pd.DataFrame(final_predictions, columns=video_df_colnames)
df.to_csv('scene_prediction_values.csv', index=False)
| StarcoderdataPython |
42256 | <filename>self_finance/front_end/routes/reference.py
from flask import render_template
from self_finance.front_end import app
def _standard_render():
return render_template("reference.html")
@app.route('/reference')
def reference():
return _standard_render()
| StarcoderdataPython |
1747017 | import sys
import pygame
from time import sleep
from settings import Settings
from ship import Ship
from bullet import Bullet
from alien import Alien
from game_stats import GameStats
from button import Button
from scoreboard import Scoreboard
from difficulty import Difficulty
class AlienInvasion:
'''docstring'''
def __init__(self):
'''init the game screen'''
pygame.init()
# 游戏设置
self.settings = Settings()
# 屏幕
self.screen = pygame.display.set_mode((self.settings.screen_width, self.settings.screen_height))
self.ship = Ship(self)
self.bullets = pygame.sprite.Group()
self.aliens = pygame.sprite.Group()
# 难度选项按钮
self.difficulty = Difficulty(self)
self.gaming = False
self._create_fleet()
# 标题设置
pygame.display.set_caption("Alien Invasion")
self.stats = GameStats(self)
self.play_button = Button(self, "Play")
self.sb = Scoreboard(self)
self.fire_sound = pygame.mixer.Sound("music/Explosion_Cannon_Fire_02.wav")
self.ship_hit_sound = pygame.mixer.Sound("music/Explosion_With_Debris_01.wav")
self.game_over_sound = pygame.mixer.Sound("music/Gameover.wav")
def run_game(self):
'''start game'''
while True:
# 辅助方法, 注意命名以_开头
self._check_events()
if self.stats.game_active and self.stats.mode_selection_active:
self.ship.update()
self._update_bullet()
self._update_aliens()
self._update_screen()
def _check_events(self):
# 监视键盘和鼠标事件
for event in pygame.event.get():
if event.type == pygame.QUIT:
self._store_highscore()
sys.exit()
elif event.type == pygame.KEYDOWN:
self._check_keydown_events(event)
elif event.type == pygame.KEYUP:
self._check_keyup_events(event)
elif event.type == pygame.MOUSEBUTTONDOWN:
mouse_pos = pygame.mouse.get_pos()
if not self.stats.game_active:
self._check_play_button(mouse_pos)
elif not self.stats.mode_selection_active:
self._check_mode_button(mouse_pos)
def _check_keydown_events(self, event):
if event.key == pygame.K_RIGHT:
# 向右移动, 增大center_x
self.ship.moving_right = True
elif event.key == pygame.K_LEFT:
# 向左移动, 更新标记
self.ship.moving_left = True
elif event.key == pygame.K_q:
self._store_highscore()
sys.exit()
elif event.key == pygame.K_SPACE:
self._fire_bullet()
elif event.key == pygame.K_p and not self.stats.game_active:
self._start_game()
self.settings.initialize_dynamic_settings()
def _check_keyup_events(self, event):
'''响应松开按键'''
if event.key == pygame.K_RIGHT:
# 松开右键, 取消标记
self.ship.moving_right = False
elif event.key == pygame.K_LEFT:
self.ship.moving_left = False
def _check_play_button(self, mouse_pos):
'''在玩家单击play时开始新游戏'''
if self.gaming or self.stats.mode_selection_active:
return
button_clicked = self.play_button.rect.collidepoint(mouse_pos)
if button_clicked and not self.stats.game_active:
self.stats.game_active = True
self.stats.mode_selection_active = False
return
def _check_mode_button(self, mouse_pos):
if self.gaming or not self.stats.game_active:
return
mode_selected = ""
mode_clicked = False
for mode, mode_rect in self.difficulty.mode_rects.items():
mode_clicked = mode_rect.collidepoint(mouse_pos)
# print(mode, mode_clicked)
if mode_clicked and self.stats.game_active and not self.stats.mode_selection_active:
mode_selected = mode
self.stats.mode_selection_active = True
break
if self.stats.game_active and self.stats.mode_selection_active:
self._start_game()
self.settings.initialize_dynamic_settings(mode_selected)
self.sb.prep_score()
self.sb.prep_level()
self.sb.prep_ships()
def _start_game(self):
self.stats.reset_stats()
self.stats.game_active = True
self.stats.mode_selection_active = True
self.gaming = True
self.aliens.empty()
self.bullets.empty()
self._create_fleet()
self.ship.center_ship()
pygame.mouse.set_visible(False)
def _update_screen(self):
# 每一次循环都重新绘制
self.screen.fill(self.settings.bg_color)
self.ship.blitme()
# 画出子弹图形
for bullet in self.bullets.sprites():
bullet.draw_bullet()
self.aliens.draw(self.screen)
self.sb.show_score()
if not self.stats.game_active and not self.stats.mode_selection_active:
self.play_button.draw_button()
elif self.stats.game_active and not self.stats.mode_selection_active:
self.difficulty.draw_modes()
# 让最近绘制的屏幕可见
pygame.display.flip()
def _update_bullet(self):
self.bullets.update()
# 删除子弹
for bullet in self.bullets.copy(): #注意循环一个列表时必须保证其长度不变,所以需要对它的副本进行遍历
if bullet.rect.bottom <= 0:
self.bullets.remove(bullet)
self._check_bullet_alien_collisions()
def _check_bullet_alien_collisions(self):
collisions = pygame.sprite.groupcollide(self.bullets, self.aliens, True, True)
if collisions:
for aliens in collisions.values():
self.stats.score += self.settings.alien_points * len(aliens)
self.sb.prep_score()
self.sb.check_high_score()
if not self.aliens:
self.bullets.empty()
self._create_fleet()
self.settings.increase_speed()
self.stats.level += 1
self.sb.prep_level()
def _fire_bullet(self):
if len(self.bullets) < self.settings.bullets_allowed:
new_bullet = Bullet(self)
self.bullets.add(new_bullet)
self.fire_sound.play(loops=0, maxtime=500)
def _create_fleet(self):
'''创建外星人群'''
alien = Alien(self)
alien_width, alien_height = alien.rect.size
ship_height = self.ship.rect.height
available_space_x = self.settings.screen_width - (2 * alien_width)
available_space_y = self.settings.screen_height - \
(3 * alien_width) - ship_height
number_rows = available_space_y // (2 * alien_height)
number_aliens_x = available_space_x // (2 * alien_width)
for row_number in range(number_rows):
for alien_number in range(number_aliens_x):
self._create_alien(alien_number, row_number)
def _create_alien(self, alien_number, row_number):
'''创建一个外星人并放在当前行'''
alien = Alien(self)
alien_width, alien_height = alien.rect.size
alien.x = alien_width + 2 * alien_width * alien_number
alien.rect.x = alien.x
alien.y = alien_height + 2 * alien_height * row_number
alien.rect.y = alien.y
self.aliens.add(alien)
def _check_fleet_edges(self):
''''''
for alien in self.aliens.sprites():
if alien.check_edges():
self._change_fleet_direction()
break
def _change_fleet_direction(self):
''''''
for alien in self.aliens.sprites():
alien.rect.y += self.settings.fleet_drop_speed
self.settings.fleet_direction *= -1
def _update_aliens(self):
'''更新外星人群中所有外星人的位置'''
self._check_fleet_edges()
self.aliens.update()
if pygame.sprite.spritecollideany(self.ship, self.aliens):
self._ship_hit()
self._check_aliens_bottom()
def _ship_hit(self):
if self.stats.ships_left > 0:
self.stats.ships_left -= 1
self.sb.prep_ships()
self.aliens.empty()
self.bullets.empty()
self._create_fleet()
self.ship.center_ship()
self.ship_hit_sound.play(loops=0, maxtime=1000)
sleep(0.5)
else:
self.stats.game_active = False
self.stats.mode_selection_active = False
self.gaming = False
self.game_over_sound.play()
pygame.mouse.set_visible(True)
# print(self.stats.game_active, self.stats.mode_selection_active)
def _check_aliens_bottom(self):
screen_rect = self.screen.get_rect()
for alien in self.aliens.sprites():
if alien.rect.bottom >= screen_rect.bottom:
self._ship_hit()
break
def _store_highscore(self):
with open("score.txt", 'w') as f:
f.write(str(self.stats.high_score))
if __name__ == '__main__':
# 创建游戏实例并运行游戏
ai = AlienInvasion()
ai.run_game()
| StarcoderdataPython |
64837 | <gh_stars>1-10
from os import environ
from cocotb_usb.host import UsbTest
from cocotb_usb.host_valenty import UsbTestValenty
TARGET = environ.get('TARGET')
def get_harness(dut, **kwargs):
'''
Helper function to assign test harness object.
Object is chosen using ``TARGET`` environment variable.
'''
if TARGET == 'valentyusb':
dut_csrs = environ['DUT_CSRS'] # We want a KeyError if this is unset
cdc = int(environ['TEST_CDC']) # We want a KeyError if this is unset
harness = UsbTestValenty(dut, dut_csrs, cdc, **kwargs)
else: # No target matched
harness = UsbTest(dut, **kwargs) # base class
return harness
| StarcoderdataPython |
27537 | import os
import glob
import json
import unittest
import satsearch.config as config
from satstac import Item
from satsearch.search import SatSearchError, Search
class Test(unittest.TestCase):
path = os.path.dirname(__file__)
results = []
@classmethod
def setUpClass(cls):
fnames = glob.glob(os.path.join(cls.path, '*-item*.json'))
for fname in fnames:
with open(fname) as f:
cls.results.append(json.load(f))
def get_searches(self):
""" Initialize and return search object """
return [Search(datetime=r['properties']['datetime']) for r in self.results]
def test_search_init(self):
""" Initialize a search object """
search = self.get_searches()[0]
dts = [r['properties']['datetime'] for r in self.results]
assert(len(search.kwargs) == 1)
assert('time' in search.kwargs)
for kw in search.kwargs:
self.assertTrue(search.kwargs[kw] in dts)
def test_search_for_items_by_date(self):
""" Search for specific item """
search = self.get_searches()[0]
sids = [r['id'] for r in self.results]
items = search.items()
assert(len(items) == 1)
for s in items:
self.assertTrue(s.id in sids)
def test_empty_search(self):
""" Perform search for 0 results """
search = Search(datetime='2001-01-01')
self.assertEqual(search.found(), 0)
def test_geo_search(self):
""" Perform simple query """
with open(os.path.join(self.path, 'aoi1.geojson')) as f:
aoi = json.dumps(json.load(f))
search = Search(datetime='2019-07-01', intersects=aoi)
assert(search.found() == 13)
items = search.items()
assert(len(items) == 13)
assert(isinstance(items[0], Item))
def test_search_sort(self):
""" Perform search with sort """
with open(os.path.join(self.path, 'aoi1.geojson')) as f:
aoi = json.dumps(json.load(f))
search = Search.search(datetime='2019-07-01/2019-07-07', intersects=aoi, sort=['<datetime'])
items = search.items()
assert(len(items) == 27)
def test_get_items_by_id(self):
""" Get Items by ID """
ids = ['LC81692212019263', 'LC81691102019263']
items = Search.items_by_id(ids, collection='landsat-8-l1')
assert(len(items) == 2)
def test_get_ids_search(self):
""" Get Items by ID through normal search """
ids = ['LC81692212019263', 'LC81691102019263']
search = Search.search(ids=ids, collection='landsat-8-l1')
items = search.items()
assert(search.found() == 2)
assert(len(items) == 2)
def test_get_ids_without_collection(self):
with self.assertRaises(SatSearchError):
search = Search.search(ids=['LC80340332018034LGN00'])
items = search.items()
def test_query_bad_url(self):
with self.assertRaises(SatSearchError):
Search.query(url=os.path.join(config.API_URL, 'collections/nosuchcollection'))
def test_search_property_operator(self):
expected = {'query': {'eo:cloud_cover': {'lte': '10'}, 'collection': {'eq': 'sentinel-2-l1c'}}}
instance = Search.search(collection='sentinel-2-l1c',
property=['eo:cloud_cover<=10'])
actual = instance.kwargs
assert actual == expected
| StarcoderdataPython |
101067 | import torch
from torch.utils.data import DataLoader, TensorDataset
from argparse import Namespace
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import numpy as np
import h5py
import json
import os
def load_data_1scale(hdf5_file, ndata, batch_size, singlescale=True):
with h5py.File(hdf5_file, 'r') as f:
x_data = f['train'][:ndata]
data_tuple = (torch.FloatTensor(x_data), ) if singlescale else (
torch.FloatTensor(x_data), torch.FloatTensor(y_data))
data_loader = DataLoader(TensorDataset(*data_tuple),
batch_size=batch_size, shuffle=True, drop_last=True)
return data_loader
def load_data_2scales(hdf5_file,hdf5_file1, ndata, batch_size, singlescale=False):
with h5py.File(hdf5_file, 'r') as f:
x2_data = f['train'][:ndata]
with h5py.File(hdf5_file1, 'r') as f:
x1_data = f['train'][:ndata]
data_tuple = (torch.FloatTensor(x_data), ) if singlescale else (
torch.FloatTensor(x2_data), torch.FloatTensor(x1_data))
data_loader = DataLoader(TensorDataset(*data_tuple),
batch_size=batch_size, shuffle=True, drop_last=True)
print(f'Loaded dataset: {hdf5_file}')
return data_loader
def load_data_3scales(hdf5_file,hdf5_file1,hdf5_file2, ndata, batch_size, singlescale=False):
with h5py.File(hdf5_file, 'r') as f:
x3_data = f['train'][:ndata]
with h5py.File(hdf5_file1, 'r') as f:
x2_data = f['train'][:ndata]
with h5py.File(hdf5_file2, 'r') as f:
x1_data = f['train'][:ndata]
data_tuple = (torch.FloatTensor(x_data), ) if singlescale else (
torch.FloatTensor(x3_data), torch.FloatTensor(x2_data),torch.FloatTensor(x1_data))
data_loader = DataLoader(TensorDataset(*data_tuple),
batch_size=batch_size, shuffle=True, drop_last=True)
return data_loader
| StarcoderdataPython |
4833595 | from viusitemapparser.sitemap_file import SitemapFile
import logging
from viusitemapparser.url_functions import check_if_url
import requests
import os.path
def get_file(filename):
result_file = SitemapFile(filename)
try:
# If remote file: use requests
if check_if_url(filename):
result = requests.get(filename)
if 200 >= result.status_code < 300:
result_file.set_remote_file_from_requests(result)
else:
result_file.error_receiving_remote_file(f"Error receiving '{filename}' - status code {result.status_code}")
# else local file: read it from filesystem
else:
if os.path.isfile(filename):
with open(filename, 'r') as f:
result_file.set_local_file(f.read())
print(f.read())
else:
result_file.error_receiving_local_file(f"There is no file '{filename}'")
except Exception as e:
error_message = f"Unable to receive file: {e}"
logging.error(error_message)
result_file.error_receiving_file(error_message)
return result_file
| StarcoderdataPython |
3271661 | <reponame>gabrielgomesml/AlgorithmAndDataStructureActivities<gh_stars>0
class GrafoLista:
def __init__(self, iteravel, ponderado=False, direcionado=False):
self.iteravel = iteravel
self.ponderado = ponderado
self.direcionado = direcionado
self.listaDeAdj = {}
self.criarListas(iteravel, ponderado, direcionado)
def __str__(self):
for x in self.listaDeAdj:
print(str(x) + ': ' + str(self.listaDeAdj[x]))
def __repr__(self):
return 'GrafoLista(' + str(self.iteravel) + ')'
def __getItem__(self, index):
if not index in self.listaDeAdj:
return '[' + str(index) + '] não encontrado.'
return self.listaDeAdj[index]
def ligados(self, tupla):
if type(tupla) is int:
return '[' + str(tupla) + '] não encontrado. Formato inválido.'
if tupla[0] not in self.listaDeAdj or tupla[1] not in self.listaDeAdj:
return '[' + str(tupla) + '] não encontrado. Par não existente nessa tupla.'
if not self.ponderado:
if tupla[1] in self.listaDeAdj[tupla[0]] or tupla[0] in self.listaDeAdj[tupla[1]]:
return True
elif self.ponderado:
for x in self.listaDeAdj[tupla[0]]:
if tupla[1] is x[0]:
return True
for x in self.listaDeAdj[tupla[1]]:
if tupla[0] is x[0]:
return True
return False
def grauDeSaida(self, vertice):
if not vertice in self.listaDeAdj:
return '[' + str(vertice) + '] não encontrado.'
return len(self.listaDeAdj[vertice])
def grauDeEntrada(self, vertice):
if not vertice in self.listaDeAdj:
return '[' + str(vertice) + '] não encontrado.'
aux = 0
if not self.ponderado:
for x in self.listaDeAdj:
if vertice in self.listaDeAdj[x]:
aux += 1
elif self.ponderado:
for x in self.listaDeAdj:
for y in self.listaDeAdj[x]:
if vertice is y[0]:
aux += 1
return aux
def adjacente(self, vertice):
if not vertice in self.listaDeAdj:
return '[' + str(vertice) + '] não encontrado.'
return self.listaDeAdj[vertice]
def maiorAresta(self):
if not self.ponderado:
return 'Grafo não-ponderado.'
maiores = []
aux = 0
for x in self.listaDeAdj:
for y in self.listaDeAdj[x]:
if y[1] > aux:
aux = y[1]
for x in self.listaDeAdj:
for y in self.listaDeAdj[x]:
if y[1] == aux :
maiores.append((x, y[0]))
return 'Arestas de peso: ' + str(aux) + ' // ' + 'Vértices ligados a ela: ' + str(maiores)
def menorAresta(self):
if not self.ponderado:
return 'Grafo não-ponderado.'
menores = []
aux = 100000000000 #RevisarIssoAqui
for x in self.listaDeAdj:
for y in self.listaDeAdj[x]:
if y[1] < aux:
aux = y[1]
for x in self.listaDeAdj:
for y in self.listaDeAdj[x]:
if y[1] == aux :
menores.append((x, y[0]))
return 'Arestas de peso: ' + str(aux) + ' // ' + 'Vértices ligados a ela: ' + str(menores)
def adicionaVertice(self, vertice):
#if type(tuple) is int:
#return '[' + str(tupla) + '] não adicionado. Formato inválido.'
#if self.ponderado and len(tupla) != 3:
#return '[' + str(tupla) + '] não adicionado. Formato inválido para tuplas ponderadas.'
#elif not self.ponderado and len(tupla) != 2:
#return '[' + str(tupla) + '] não adicionado. Formato inválido para tuplas não-ponderadas.'
#self.criarListas([tupla], self.ponderado, self.direcionado)
if type(vertice) != int:
return '[' + str(vertice) + '] não adicionado. Formato inválido.'
if vertice in self.listaDeAdj:
return '[' + str(vertice) + '] já existente.'
else:
self.listaDeAdj[vertice] = []
def adicionaAresta(self, tupla):
if type(tupla) is int:
return '[' + str(tupla) + '] não adicionado. Formato inválido.'
if self.ponderado and len(tupla) != 3:
return '[' + str(tupla) + '] não adicionado. Formato inválido para tuplas ponderadas.'
elif not self.ponderado and len(tupla) != 2:
return '[' + str(tupla) + '] não adicionado. Formato inválido para tuplas não-ponderadas.'
if tupla[0] not in self.listaDeAdj or tupla[1] not in self.listaDeAdj:
return '[' + str(tupla) + '] não adicionado. Par não existente nessa tupla.'
self.criarListas([tupla], self.ponderado, self.direcionado)
if type(self.iteravel[0]) is tuple:
self.iteravel += tuple(tupla),
elif type(self.iteravel[0]) is list:
self.iteravel.append(list(tupla))
print('[' + str(tupla) + '] adicionada.')
def removeAresta(self, tupla):
if type(tupla) is int:
return '[' + str(tupla) + '] não encontrado. Formato inválido.'
elif len(tupla) != 2:
return '[' + str(tupla) + '] não encontrado. Formato inválido.'
if tupla[0] not in self.listaDeAdj or tupla[1] not in self.listaDeAdj:
return '[' + str(tupla) + '] não removido. Par não existente nessa tupla.'
if not self.direcionado:
self.listaDeAdj[tupla[0]].remove(tupla[1])
self.listaDeAdj[tupla[1]].remove(tupla[0])
else:
self.listaDeAdj[tupla[0]].remove(tupla[1])
print('[' + str(tupla) + '] removida.')
def criarListas(self, iteravel, ponderado, direcionado):
if ponderado:
for tupla in iteravel:
origem, destino, peso = tupla
if not origem in self.listaDeAdj:
self.listaDeAdj[origem] = []
self.listaDeAdj[origem].append((destino, peso))
if not direcionado:
if not destino in self.listaDeAdj:
self.listaDeAdj[destino] = []
self.listaDeAdj[destino].append((origem, peso))
else:
for tupla in iteravel:
origem, destino = tupla
if not origem in self.listaDeAdj:
self.listaDeAdj[origem] = []
self.listaDeAdj[origem].append(destino)
if not direcionado:
if not destino in self.listaDeAdj:
self.listaDeAdj[destino] = []
self.listaDeAdj[destino].append(origem)
def dfs(self, v, antecessores, marcados):
marcados[v] = True
if self.ponderado:
for x in self.adjacente(v):
if not marcados[x[0]]:
antecessores[x[0]] = v
self.dfs(x[0], antecessores, marcados)
else:
for x in self.adjacente(v):
if not marcados[x]:
antecessores[x] = v
self.dfs(x, antecessores, marcados)
def buscaEmProfundidade(self):
marcados = {}
antecessores = {}
for x in self.listaDeAdj:
marcados[x] = False
antecessores[x] = -1
for v in marcados:
if not marcados[v]:
self.dfs(v, antecessores, marcados)
return antecessores
def buscaEmLargura(self):
marcados = {}
antecessores = {}
vertices = []
for x in self.listaDeAdj:
marcados[x] = False
antecessores[x] = -1
for y in marcados:
if not marcados[y]:
vertices.append(y)
marcados[y] = True
while len(vertices) > 0:
v = vertices.pop(0)
if self.ponderado:
for z in self.adjacente(v):
if not marcados[z[0]]:
marcados[z[0]] = True
antecessores[z[0]] = v
vertices.append(z[0])
else:
for z in self.adjacente(v):
if not marcados[z]:
marcados[z] = True
antecessores[z] = v
vertices.append(z)
return antecessores
def converterListaMatriz(self):
return GrafoMatrizes(self.iteravel, self.ponderado, self.direcionado)
import numpy as np
class GrafoMatrizes:
def __init__(self, iteravel, ponderado=False, direcionado=False):
self.iteravel = iteravel
self.ponderado = ponderado
self.direcionado = direcionado
self.v = []
for duplas in self.iteravel:
if self.ponderado:
for vertice in duplas[:len(duplas)-1]:
if vertice not in self.v:
self.v.append(vertice)
else:
for vertice in duplas:
if vertice not in self.v:
self.v.append(vertice)
self.matrizesDeAdj = self.criarMatrizes(iteravel, ponderado, direcionado)
def index(self, vertice):
aux = 0
for n in range(len(self.v)):
if self.v[n] != vertice:
aux += 1
else:
return aux
def criarMatrizes(self, iteravel, ponderado, direcionado):
tam = len(self.v)
corpo = np.zeros((tam,tam), int)
if not self.ponderado:
if not self.direcionado:
for x in iteravel:
corpo[self.index(x[0])][self.index(x[1])] = 1
corpo[self.index(x[1])][self.index(x[0])] = 1
else:
for x in iteravel:
corpo[self.index(x[0])][self.index(x[1])] = 1
else:
if not self.direcionado:
for x in iteravel:
corpo[self.index(x[0])][self.index(x[1])] = x[2]
corpo[self.index(x[1])][self.index(x[0])] = x[2]
else:
for x in iteravel:
corpo[self.index(x[0])][self.index(x[1])] = x[2]
return corpo
def __str__(self):
tam = max(self.v)
colunas = str([x for x in self.v])
colunas = ' ' + colunas[1:len(colunas)-1].replace(',','')
print(colunas)
for x in range(len(self.matrizesDeAdj)):
print(str(self.v[x]) + ' ' + str(self.matrizesDeAdj[x]))
def __repr__(self):
return 'GrafoMatrizes(' + str(self.iteravel) + ')'
def __getItem__(self, index):
arestas = {}
if not self.ponderado:
for x in range(len(self.matrizesDeAdj)):
if index not in arestas and self.matrizesDeAdj[index][x] != 0:
arestas[index] = [(index, x)]
elif index in arestas and self.matrizesDeAdj[index][x] != 0:
arestas[index].append((index, x))
if self.direcionado:
if index not in arestas and self.matrizesDeAdj[x][index] != 0:
arestas[index] = [(x, index)]
elif index in arestas and self.matrizesDeAdj[x][index] != 0:
arestas[index].append((x, index))
else:
for x in range(len(self.matrizesDeAdj)):
if index not in arestas and self.matrizesDeAdj[index][x] != 0:
arestas[index] = [(index, x, self.matrizesDeAdj[index][x])]
elif index in arestas and self.matrizesDeAdj[index][x] != 0:
arestas[index].append((index, x, self.matrizesDeAdj[index][x]))
if self.direcionado:
if index not in arestas and self.matrizesDeAdj[x][index] != 0:
arestas[index] = [(x, index, self.matrizesDeAdj[index][x])]
elif index in arestas and self.matrizesDeAdj[x][index] != 0:
arestas[index].append((x, index, self.matrizesDeAdj[index][x]))
return arestas[index]
def grauDeSaida(self, vertice):
if vertice not in self.v:
return '[' + str(vertice) + '] não encontrado.'
aux = 0
for n in range(len(self.v)):
if self.matrizesDeAdj[self.index(vertice)][n] != 0:
aux += 1
return aux
def grauDeEntrada(self, vertice):
if not self.direcionado:
return self.grauDeSaida(vertice)
else:
aux = 0
for n in range(len(self.v)):
if self.matrizesDeAdj[n][self.index(vertice)] != 0:
aux += 1
return aux
def ligados(self, tupla):
if type(tupla) is int:
return '[' + str(tupla) + '] não encontrado. Formato inválido.'
elif len(tupla) != 2:
return '[' + str(tupla) + '] não encontrado. Formato inválido.'
elif tupla[0] not in self.v or tupla[1] not in self.v:
return '[' + str(tupla) + '] não encontrado. Par não existente nessa tupla.'
if self.matrizesDeAdj[self.index(tupla[0])][self.index(tupla[1])] or self.matrizesDeAdj[self.index(tupla[1])][self.index(tupla[0])] != 0:
return True
return False
def adjacente(self, vertice):
if vertice not in self.v:
return '[' + str(vertice) + '] não encontrado.'
else:
lista = []
for x in self.v:
if not self.direcionado:
if self.matrizesDeAdj[self.index(vertice)][self.index(x)] != 0:
lista.append(x)
else:
if self.matrizesDeAdj[self.index(vertice)][self.index(x)] != 0:
lista.append(x)
if self.matrizesDeAdj[self.index(x)][self.index(vertice)] != 0 and x not in lista:
lista.append(x)
return lista
def maiorAresta(self):
if not self.ponderado:
return 'Grafo não-ponderado.'
maiores = []
lista = []
for x in self.matrizesDeAdj:
maiores.append(max(x))
maior = max(maiores)
for n in self.v:
for m in self.v:
if self.matrizesDeAdj[self.index(n)][self.index(m)] == maior and (m,n) not in lista:
lista.append((n, m))
return 'Arestas de peso: ' + str(maior) + ' // ' + 'Vértices ligados a ela: ' + str(lista)
def menorAresta(self):
if not self.ponderado:
return 'Grafo não-ponderado.'
pesos = []
lista = []
for x in self.matrizesDeAdj:
for y in x:
if y != 0:
pesos.append(y)
menor = min(pesos)
for n in self.v:
for m in self.v:
if self.matrizesDeAdj[self.index(n)][self.index(m)] == menor and (m,n) not in lista:
lista.append((n, m))
return 'Arestas de peso: ' + str(menor) + ' // ' + 'Vértices ligados a ela: ' + str(lista)
def removeAresta(self, tupla):
if type(tupla) is int:
return '[' + str(tupla) + '] não encontrado. Formato inválido.'
elif len(tupla) != 2:
return '[' + str(tupla) + '] não encontrado. Formato inválido.'
elif tupla[0] not in self.v or tupla[1] not in self.v:
return '[' + str(tupla) + '] não removido. Par não existente nessa tupla.'
if not self.direcionado:
self.matrizesDeAdj[self.index(tupla[0])][self.index(tupla[1])] = 0
self.matrizesDeAdj[self.index(tupla[1])][self.index(tupla[0])] = 0
else:
self.matrizesDeAdj[self.index(tupla[0])][self.index(tupla[1])] = 0
print('[' + str(tupla) + '] removida.')
def adicionaAresta(self, tupla):
if type(tupla) is int:
return '[' + str(tupla) + '] não encontrado. Formato inválido.'
elif not self.ponderado and len(tupla) != 2:
return '[' + str(tupla) + '] não encontrado. Formato inválido.'
elif self.ponderado and len(tupla) != 3:
return '[' + str(tupla) + '] não encontrado. Formato inválido.'
elif tupla[0] not in self.v or tupla[1] not in self.v:
return '[' + str(tupla) + '] não adicionado. Par não existente nessa tupla.'
if not self.ponderado:
if not self.direcionado:
self.matrizesDeAdj[self.index(tupla[0])][self.index(tupla[1])] = 1
self.matrizesDeAdj[self.index(tupla[1])][self.index(tupla[0])] = 1
else:
self.matrizesDeAdj[self.index(tupla[0])][self.index(tupla[1])] = 1
self.iteravel
else:
if not self.direcionado:
self.matrizesDeAdj[self.index(tupla[0])][self.index(tupla[1])] = tupla[2]
self.matrizesDeAdj[self.index(tupla[1])][self.index(tupla[0])] = tupla[2]
else:
self.matrizesDeAdj[self.index(tupla[0])][self.index(tupla[1])] = tupla[2]
if type(self.iteravel[0]) is tuple:
self.iteravel += tuple(tupla),
elif type(self.iteravel[0]) is list:
self.iteravel.append(list(tupla))
print('[' + str(tupla) + '] adicionada.')
def adicionaVertice(self, vertice):
if vertice in self.v:
return '[' + str(vertice) + '] já existente.'
self.v.append(vertice)
self.matrizesDeAdj = self.criarMatrizes(self.iteravel, self.ponderado, self.direcionado)
print('[' + str(vertice) + '] adicionado.')
def dfs(self, vertice, antecessor, marcado):
marcado[self.index(vertice)] = True
for u in self.adjacente(vertice):
if not marcado[self.index(u)]:
antecessor[self.index(u)] = vertice
self.dfs(u, antecessor, marcado)
def buscaEmProfundidade(self):
marcado = len(self.v) * [False]
antecessor = len(self.v) * [-1]
for vertice in self.v:
if not marcado[self.index(vertice)]:
self.dfs(vertice, antecessor, marcado)
for i in range(0, len(self.v)):
print(str(i) + ': ' + str(antecessor[i]))
def buscaEmLargura(self):
marcado = len(self.v) * [False]
antecessor = len(self.v) * [-1]
vertices = []
for i in self.v:
if not marcado[self.index(i)]:
vertices.append(i)
marcado[self.index(i)] = True
while len(vertices) > 0:
v = vertices.pop(0)
for u in self.adjacente(v):
if not marcado[self.index(u)]:
marcado[self.index(u)] = True
antecessor[self.index(u)] = v
vertices.append(u)
for i in range(0, len(self.v)):
print(str(i) + ': ' + str(antecessor[i]))
def converterMatrizLista(self):
return GrafoLista(self.iteravel, self.ponderado, self.direcionado) | StarcoderdataPython |
26800 | '''def operations(a,b,c):
if(c=='+'):
return a+b
elif(c=='-'):
return a-b
elif(c=='*'):
return a*b
elif(c=='/'):
return a/b
elif(c=='%'):
return a%b
elif(c=='**'):
return a**b
elif(c=='//'):
return a//b
else:
print("Non specfied soperation")
print(operations(10,5,'+'))
print(operations(10,5,'-'))
print(operations(10,5,'*'))
print(operations(10,5,'/'))
print(operations(10,5,'%'))
print(operations(10,2,'**'))
print(operations(10,3,'//'))
print(operations((int(input("Enter a "))),int(input("Enter b ")),input("Enter c ")))'''
def evaluate():
print(eval(input("Enter an arithmetic expression: ")))
evaluate() | StarcoderdataPython |
89340 | <filename>hello-python/hello-world.py<gh_stars>1-10
#!/usr/bin/python
print "Hello, World." | StarcoderdataPython |
3395474 | from shapely.geometry import Point
import geopandas as gpd
pnt1 = Point(80.99456, 7.86795)
pnt2 = Point(80.97454, 7.872174)
points_df = gpd.GeoDataFrame({"geometry": [pnt1, pnt2]}, crs="EPSG:4326")
points_df = points_df.to_crs("EPSG:5234")
points_df2 = points_df.shift() # We shift the dataframe by 1 to align pnt1 with pnt2
points_df.distance(points_df2)
| StarcoderdataPython |
3389187 | #!/usr/bin/env python
import os
from app import create_app, freezer
from flask.ext.script import Manager
from config import basedir
from itertools import chain
from jinja2 import Template
import datetime
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
@manager.command
def build():
app = create_app('production')
if(app.config['SHOW_DRAFTS'] | app.debug | app.testing):
print('WARNING!! This build will show drafts or is running in debug or testing mode.')
freezer.init_app(app)
freezer.freeze()
buildSitemap()
@manager.command
def test():
import nose
import sys
from config import basedir
nose.run(
argv = [sys.argv[0],
os.path.join(basedir, 'tests')]
)
def buildSitemap():
template_path = os.path.join(basedir, 'app/templates/sitemap.xml')
directory = os.path.join(basedir, 'app/build')
endpoints = findHTML(directory)
template = ''
with open(template_path, 'r') as f:
template = Template(f.read())
with open(os.path.join(directory, 'sitemap.xml'), 'w') as f:
f.write(template.render(pages = endpoints))
def findHTML(directory):
base = os.path.join(basedir, 'app/build')
endpoints = []
subdirs = []
contents = os.listdir(directory)
for item in contents:
path = os.path.join(directory, item)
if item.endswith('.html'):
url = path.replace(base, 'http://kylerjohnston.com')
endpoints.append([url, getLastMod(path)])
if os.path.isdir(path):
subdirs.append(path)
# Recurse through all subdirectories
if len(subdirs) > 0:
for subdir in subdirs:
ep = findHTML(subdir)
for e in ep:
endpoints.append(e)
return endpoints
def getLastMod(f):
return datetime.date.fromtimestamp(os.path.getmtime(f)).strftime('%Y-%m-%d')
if __name__ == '__main__':
manager.run()
| StarcoderdataPython |
138084 | <filename>src/cars/CarHuman.py<gh_stars>1-10
from src.cars.Car import Car
class CarHuman(Car):
def __init__(self, track):
super(CarHuman, self).__init__(track)
| StarcoderdataPython |
56730 | <gh_stars>0
import sys
import tensorflow as tf2
DEFAULT_GPU_LIST = [0, 1, 2]
SCALE = 2
MEMORY_LENGTH = 1000000
STACK_LENGTH = 4
BATCH_SIZE = 64
LEARNING_RATE = 0.00025
GAMMA = 0.9
EPSILON = 1.0
EPSILON_MIN = 0.01
EPSILON_DECAY = 0.00003
GIVEN_GPU = [eval(sys.argv[1])] if len(sys.argv) > 1 else DEFAULT_GPU_LIST
def get_strategy(gpu_visible=None):
gpu_total = tf2.config.experimental.list_physical_devices(device_type="GPU")
gpu_candidates = []
if gpu_visible is None:
gpu_visible = GIVEN_GPU
for gpu_id in gpu_visible:
if 0 <= gpu_id < len(gpu_total):
gpu_candidates.append(gpu_total[gpu_id])
tf2.config.experimental.set_visible_devices(devices=gpu_candidates, device_type="GPU")
print("gpu_total :", gpu_total, "| gpu_candidates :", gpu_candidates)
strategy = tf2.distribute.OneDeviceStrategy(device="/cpu:0")
if len(gpu_candidates) == 1:
strategy = tf2.distribute.OneDeviceStrategy(device="/gpu:0")
elif len(gpu_candidates) > 1:
strategy = tf2.distribute.MirroredStrategy()
return strategy
| StarcoderdataPython |
4454 | <reponame>claws/adsb
import asyncio
import datetime
import logging
import socket
from . import protocol
from typing import Tuple
from asyncio import AbstractEventLoop
logger = logging.getLogger(__name__)
class Server(object):
def __init__(
self,
host: str = "localhost",
port: int = 30003,
backlog=100,
loop: AbstractEventLoop = None,
) -> None:
self.loop = loop or asyncio.get_event_loop()
self.host = host
self._requested_port = port
self.port = None
self.backlog = backlog
self.listener = None
self.protocols = {}
async def start(self) -> None:
""" Start the server """
try:
self.listener = await self.loop.create_server(
lambda: protocol.SBSServerProtocol(self),
self.host,
self._requested_port,
family=socket.AF_INET,
backlog=self.backlog,
) # type: asyncio.Server
# Fetch actual port in use. This can be different from the
# specified port if the port was passed as 0 which means use
# an ephemeral port.
assert len(self.listener.sockets) == 1
_, self.port = self.listener.sockets[0].getsockname()
except asyncio.CancelledError:
logger.exception("Connection waiter Future was cancelled")
except Exception:
logger.exception("An error occurred in start")
async def stop(self) -> None:
""" Stop the server """
if self.listener:
# Avoid iterating over the protocols dict which may change size
# while it is being iterating over.
peers = list(self.protocols)
for peer in peers:
prot = self.protocols.get(peer)
if prot:
prot.close()
self.listener.close()
def register_protocol(
self, peer: Tuple[str, int], prot: "SBSServerProtocol"
) -> None:
""" Register a protocol instance with the server.
:param peer: Tuple of (host:str, port:int).
:param prot: a SBSServerProtocol instance.
"""
self.protocols[peer] = prot
def deregister_protocol(self, peer: Tuple[str, int]) -> None:
""" De-register a protocol instance from the server.
This peer will no longer receive messages.
:param peer: Tuple of (host:str, port:int).
"""
del self.protocols[peer]
def send_message(self, msg: bytes, peer: Tuple[str, int] = None) -> None:
""" Send a message.
:param msg: A bytes object representing the SBS format message to
send to peers. The message is assumed to include the end of
message delimiter.
:param peer: A specific peer to send the message to. Peer is a
Tuple of (host:str, port:int). If not specified then the message
is broadcast to all peers.
"""
if self.protocols:
if peer:
prot = self.protocols.get(peer)
if prot:
prot.send_message(msg)
else:
raise Exception(
f"Server can't send msg to non-existant peer: {peer}"
)
else:
# broadcast message to all peers
for peer, prot in self.protocols.items():
prot.send_message(msg)
else:
raise Exception("Server can't send msg, no peers available")
| StarcoderdataPython |
3366838 | <filename>quad_mesh_to_rgba/coastlines.py
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import cartopy
import bokeh.plotting
figure = bokeh.plotting.figure(sizing_mode='stretch_both',
match_aspect=True)
# Struggling to find appropriate extent for cartopy feature
# Left, right, bottom, top
x0, x1, y0, y1 = (0, 21, 0, 11)
extent = x0, x1, y0, y1
def coastlines(figure, scale="110m", extent=None):
"""Add cartopy coastline to a figure
Translates cartopy.feature.COASTLINE object
into collection of bokeh lines
.. note:: This method assumes the map projection
is cartopy.crs.PlateCarreee
:param figure: bokeh figure instance
:param scale: cartopy coastline scale '110m', '50m' or '10m'
:param extent: x_start, x_end, y_start, y_end
"""
coastline = cartopy.feature.COASTLINE
coastline.scale = scale
for geometry in coastline.intersecting_geometries(extent):
figure.line(*geometry[0].xy,
color='black',
level='overlay')
coastlines(figure,
scale="10m",
extent=extent)
# Custom normalisation
norm = mpl.colors.Normalize(vmin=0,
vmax=200)
# Imshow
ni, nj = 10, 10
values = np.arange(ni * nj).reshape(ni, nj)
mappable = plt.imshow(values, norm=norm)
rgba = mappable.to_rgba(mappable.get_array(),
bytes=True).reshape(ni, nj, 4)
figure.image_rgba(image=[rgba],
x=0,
y=0,
dh=ni,
dw=nj)
# Imshow
ni, nj = 10, 10
values = np.arange(ni * nj).reshape(ni, nj)
values += 100
mappable = plt.imshow(values, norm=norm)
rgba = mappable.to_rgba(mappable.get_array(),
bytes=True).reshape(ni, nj, 4)
figure.image_rgba(image=[rgba],
x=nj - 1,
y=1,
dh=ni,
dw=nj)
# Set bokeh x/y range extents
figure.x_range.start = x0
figure.x_range.end = x1
figure.y_range.start = y0
figure.y_range.end = y1
bokeh.plotting.show(figure)
| StarcoderdataPython |
124383 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'第 0009 题:一个HTML文件,找出里面的链接。'
__author__ = 'Drake-Z'
import os, re
from html.parser import HTMLParser
from html.entities import name2codepoint
class MyHTMLParser(HTMLParser):
def handle_starttag(self, tag, attrs):
if tag == 'a':
for (variables, value) in attrs:
if variables == 'href':
if re.match(r'http(.*?)', value):
print(value)
if __name__ == '__main__':
with open('test.html', encoding='utf-8') as html:
parser = MyHTMLParser()
parser.feed(html.read()) | StarcoderdataPython |
39971 | <gh_stars>1-10
import os
import tensorflow as tf
import random
import numpy as np
import matplotlib.pyplot as plt
# uncomment for inline for the notebook:
# %matplotlib inline
import pickle
# enter the directory where the training images are:
TRAIN_DIR = 'train/'
IMAGE_SIZE = 512
train_image_file_names = [TRAIN_DIR+i for i in os.listdir(TRAIN_DIR)]
# to decode a single png img:
# graph = tf.Graph()
# with graph.as_default():
# file_name = tf.placeholder(dtype=tf.string)
# file1 = tf.read_file(file_name)
# image = tf.image.decode_png(file1)
# with tf.Session(graph=graph) as session:
# tf.global_variables_initializer().run()
# image_vector = session.run(image, feed_dict={
# file_name: train_image_file_names[1]})
# print(image_vector)
# session.close()
# method to decode many png images:
def decode_image(image_file_names, resize_func=None):
images = []
graph = tf.Graph()
with graph.as_default():
file_name = tf.placeholder(dtype=tf.string)
file1 = tf.read_file(file_name)
image = tf.image.decode_png(file1)
# , channels=3) <-- use three channels for rgb pictures
k = tf.placeholder(tf.int32)
tf_rot_img = tf.image.rot90(image, k=k)
# im_rot = tf.placeholder(tf.float32, shape=(IMAGE_SIZE, IMAGE_SIZE, 3))
tf_flip_img = tf.image.flip_left_right(tf_rot_img)
with tf.Session(graph=graph) as session:
tf.global_variables_initializer().run()
for i in range(len(image_file_names)):
for j in range(4): # rotation at 0, 90, 180, 270 degrees
rotated_img = session.run(tf_rot_img, feed_dict={
file_name: image_file_names[i], k: j})
images.append(rotated_img)
flipped_img = session.run(
tf_flip_img, feed_dict={
file_name: image_file_names[i], k: j})
images.append(flipped_img)
if (i+1) % 1000 == 0:
print('Images processed: ', i+1)
session.close()
return images
train_images = decode_image(train_image_file_names)
print('shape train: ', np.shape(train_images))
# Let's see some of the images
# for i in range(10,14):
# plt.imshow(train_images[i].reshape([IMAGE_SIZE,IMAGE_SIZE]), cmap=plt.get_cmap('gray'))
# plt.show()
# for rgb images:
# for i in range(10,20):
# plt.imshow(train_images[i])
# plt.show()
def create_batch(data, label, batch_size):
i = 0
while i*batch_size <= len(data):
with open(label + '_' + str(i) + '.pickle', 'wb') as handle:
content = data[(i * batch_size):((i+1) * batch_size)]
pickle.dump(content, handle)
print('Saved', label, 'part #' + str(i),
'with', len(content), 'entries.')
i += 1
# Create one hot encoding for labels
# labels = [[1., 0.] if 'dog' in name else [0., 1.] for name in train_image_file_names]
# these are all real images, so let's encode them all with 1's
labels = [[1., 0.] for name in train_image_file_names]
# TO EXPORT DATA WHEN RUNNING LOCALLY - UNCOMMENT THESE LINES
# a batch with 5000 images has a size of around 3.5 GB
# create_batch(labels, 'pickled/', np.shape(train_images)[0])
create_batch(train_images, 'pickled/', np.shape(train_images)[0])
print('done creating dataset')
| StarcoderdataPython |
1786763 | # -*- coding: latin-1 -*-
# This program is public domain
# Author: <NAME>
"""
Define unit conversion support for NeXus style units.
The unit format is somewhat complicated. There are variant spellings
and incorrect capitalization to worry about, as well as forms such as
"mili*metre" and "1e-7 seconds".
This is a minimal implementation. It does not support the complete
dimensional analysis provided by the package UDUnits on which NeXus is
based, or even all the units used in the NeXus definition files.
Unlike other units modules, this module does not carry the units along
with the value, but merely provides a conversion function for
transforming values.
Usage example::
>>> from dataflow.lib import unit
>>> u = unit.Converter('mili*metre') # Units stored in mm
>>> v = u(3000, 'm') # Convert the value 3000 mm into meters
NeXus example::
# Load sample orientation in radians regardless of how it is stored.
# 1. Open the path
import nxs
file = nxs.open(filename)
file.openpath('/entry1/sample/sample_orientation')
# 2. scan the attributes, retrieving 'units'
units = [for attr, value in file.attrs() if attr == 'units']
# 3. set up the converter (assumes that units actually exists)
u = unit.Converter(units[0])
# 4. read the data and convert to the correct units
v = u(file.read(), 'radians')
NeXus example using h5py, and a private version of unit::
import h5py
from dataflow.lib import unit
file = h5py.File(filename)
field = file['/entry1/sample/sample_orientation']
u = unit.Converter(field.attrs.get('units', ''))
v = u(field.value, 'radians')
Note: minutes are used for angle and seconds are used for time. We
cannot determine the correct interpretation without knowing something
about the fields themselves. If this becomes an issue, we will need to
allow the application to set the dimension for the units rather than
getting the dimension from the units as we are currently doing.
"""
# TODO: Parse the udunits database directly
# UDUnits:
# https://www.unidata.ucar.edu/software/udunits/udunits-1/udunits.txt
from __future__ import division
__all__ = ['Converter']
import math
# Limited form of units for returning objects of a specific type.
# Maybe want to do full units handling with e.g., pyre's
# unit class. For now lets keep it simple. Note that
def _build_metric_units(unit, abbr):
"""
Construct standard SI names for the given unit.
Builds e.g.,
s, ns
second, nanosecond, nano*second
seconds, nanoseconds
Includes prefixes for femto through peta.
Ack! Allows, e.g., Coulomb and coulomb even though Coulomb is not
a unit because some NeXus files store it that way!
Returns a dictionary of names and scales.
"""
prefix = dict(peta=1e15, tera=1e12, giga=1e9, mega=1e6, kilo=1e3,
deci=1e-1, centi=1e-2, milli=1e-3, mili=1e-3, micro=1e-6,
nano=1e-9, pico=1e-12, femto=1e-15)
short_prefix = dict(P=1e15, T=1e12, G=1e9, M=1e6, k=1e3,
d=1e-1, c=1e-2, m=1e-3, u=1e-6,
n=1e-9, p=1e-12, f=1e-15)
short_prefix['μ'] = 1e-6
map = {abbr:1}
map.update([(P+abbr, scale) for (P, scale) in short_prefix.items()])
for name in [unit, unit.capitalize()]:
map.update({name:1, name+'s':1})
map.update([(P+name, scale) for (P, scale) in prefix.items()])
map.update([(P+'*'+name, scale) for (P, scale) in prefix.items()])
map.update([(P+name+'s', scale) for (P, scale) in prefix.items()])
return map
def _build_plural_units(**kw):
"""
Construct names for the given units. Builds singular and plural form.
"""
map = {}
map.update([(name, scale) for name, scale in kw.items()])
map.update([(name+'s', scale) for name, scale in kw.items()])
return map
def _build_all_units():
# Various distance measures
distance = _build_metric_units('meter', 'm')
distance.update(_build_metric_units('metre', 'm'))
distance.update(_build_plural_units(micron=1e-6,
Angstrom=1e-10,
angstrom=1e-10,
))
distance.update({'A': 1e-10, 'Ang' :1e-10, 'Å': 1e-10, 'Ångström': 1e-10})
# Various time measures.
# Note: minutes are used for angle rather than time
time = _build_metric_units('second', 's')
time.update(_build_plural_units(hour=3600, day=24*3600, week=7*24*3600))
time.update({'1e-7 s':1e-7, '1e-7 second':1e-7, '1e-7 seconds':1e-7})
# Various angle measures.
# Note: seconds are used for time rather than angle
angle = _build_plural_units(degree=1, minute=1/60., arcminute=1/60.,
arcsecond=1/3600., radian=180/math.pi)
angle.update(deg=1, arcmin=1/60., arcsec=1/3600., rad=180/math.pi)
frequency = _build_metric_units('hertz', 'Hz')
frequency.update(_build_metric_units('Hertz', 'Hz'))
frequency.update(_build_plural_units(rpm=1/60.))
# Note: degrees are used for angle
# TODO: temperature needs an offset as well as a scale
temperature = _build_metric_units('kelvin', 'K')
temperature.update(_build_metric_units('Kelvin', 'K'))
charge = _build_metric_units('coulomb', 'C')
charge.update({'microAmp*hour':0.0036})
sld = {'10^-6 Angstrom^-2': 1e-6, 'Angstrom^-2': 1}
Q = {'invAng': 1, 'invAngstroms': 1,
'10^-3 Angstrom^-1': 1e-3, 'nm^-1': 10}
energy = _build_metric_units('electronvolt', 'eV')
power = _build_metric_units('watt', 'W')
# APS files may be using 'a.u.' for 'arbitrary units'. Other
# facilities are leaving the units blank, using ??? or not even
# writing the units attributes.
unknown = {None:1, '???':1, '': 1, 'a.u.':1}
dims = [unknown, distance, time, angle, frequency,
temperature, charge, sld, Q, energy, power]
return dims
class Converter(object):
"""
Unit converter for NeXus style units.
"""
# Define the units, using both American and European spelling.
scalemap = None
scalebase = 1
dims = _build_all_units()
def __init__(self, name):
self.base = name
for map in self.dims:
if name in map:
self.scalemap = map
self.scalebase = self.scalemap[name]
break
else:
self.scalemap = {'': 1}
self.scalebase = 1
#raise ValueError, "Unknown unit %s"%name
def scale(self, units=""):
if units == "" or self.scalemap is None:
return 1
return self.scalebase/self.scalemap[units]
def conversion(self, units=""):
if units == "" or self.scalemap is None:
return 1.0
try:
return self.scalebase/self.scalemap[units]
except KeyError:
raise KeyError("%s not in %s (base = %s)"%(units, " ".join(sorted(self.scalemap.keys())), self.base))
def __call__(self, value, units=""):
# Note: calculating value*1.0 rather than simply returning value
# would produce an unnecessary copy of the array, which in the
# case of the raw counts array would be bad. Sometimes copying
# and other times not copying is also bad, but copy on modify
# semantics isn't supported.
a = self.conversion(units)
return value if a == 1.0 else value*a
def _check(expect, get):
if expect != get:
raise ValueError("Expected %s but got %s"%(expect, get))
#print expect, "==", get
def test():
_check(2, Converter('mm')(2000, 'm')) # 2000 mm -> 2 m
_check(0.003, Converter('microseconds')(3, units='ms')) # 3 us -> 0.003 ms
_check(45, Converter('nanokelvin')(45)) # 45 nK -> 45 nK
# TODO: more tests
_check(0.5, Converter('seconds')(1800, units='hours')) # 1800 -> 0.5 hr
_check(2.5, Converter('a.u.')(2.5, units=''))
if __name__ == "__main__":
test()
| StarcoderdataPython |
3287665 | <gh_stars>0
import logging
import sched
import sys
import winsound
import webbrowser
from scraper import init_scrapers
from termcolor import colored
class Engine:
def __init__(self, args, config, driver):
self.refresh_interval = config.refresh_interval
self.max_price = config.max_price
self.scheduler = sched.scheduler()
self.scrapers = init_scrapers(driver, config.urls)
for s in self.scrapers:
self.schedule(s)
def run(self):
self.scheduler.run(blocking=True)
def schedule(self, s):
if self.scheduler.queue:
t = self.scheduler.queue[-1].time + self.refresh_interval
self.scheduler.enterabs(t, 1, Engine.tick, (self, s))
else:
self.scheduler.enter(self.refresh_interval, 1, Engine.tick, (self, s))
def tick(self, s):
result = s.scrape()
if result is None:
logging.error(f'{s.name}: scrape failed')
else:
self.process_scrape_result(s, result)
return self.schedule(s)
def process_scrape_result(self, s, result):
currently_in_stock = bool(result)
last_price = result.last_price
if currently_in_stock:
if self.max_price is not None and result.price <= self.max_price:
self.in_stock_good_price(s.name, result.price, s.url)
else:
self.in_stock_bad_price(s.name, result.price)
else:
self.not_in_stock(s.name)
def in_stock_good_price(self, product_name, price, url):
for i in range(0,20):
print(colored('***********************************************************************************', 'green'))
print("\n\n\n", colored(product_name, 'white'), colored('in stock for', 'green'), colored(f'${price}', 'green'))
# open the webpage for this item
webbrowser.open(f'{url}', new=2)
# play the alarm sound until the user ends the program
print("Press Ctrl-C to end the program")
while True:
winsound.PlaySound(".\\resources\\VenatorHangarHit.wav", winsound.SND_FILENAME)
winsound.PlaySound(".\\resources\\yes.wav", winsound.SND_FILENAME)
def in_stock_bad_price(self, product_name, price):
print(colored(product_name, 'white'), colored(f'in stock for bad price: ${price}', 'yellow'))
def not_in_stock(self, product_name):
print(colored(product_name, 'white'), colored('not in stock', 'red'))
def send_alert(self, s, result, reason):
logging.info(f'{s.name}: {reason}')
self.alerter(subject=result.alert_subject, content=result.alert_content)
def hunt(args, config, driver):
engine = Engine(args, config, driver)
engine.run() | StarcoderdataPython |
130900 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.dimensiondata import DimensionDataVIPNode, DimensionDataPool
from libcloud.common.dimensiondata import DimensionDataPoolMember
from libcloud.loadbalancer.base import LoadBalancer, Member, Algorithm
from libcloud.loadbalancer.drivers.dimensiondata \
import DimensionDataLBDriver as DimensionData
from libcloud.loadbalancer.types import State
from libcloud.test import MockHttp
from libcloud.test.file_fixtures import LoadBalancerFileFixtures
from libcloud.test.secrets import DIMENSIONDATA_PARAMS
class DimensionDataTests(unittest.TestCase):
def setUp(self):
DimensionData.connectionCls.conn_classes = (None, DimensionDataMockHttp)
DimensionDataMockHttp.type = None
self.driver = DimensionData(*DIMENSIONDATA_PARAMS)
def test_invalid_region(self):
try:
self.driver = DimensionData(*DIMENSIONDATA_PARAMS, region='blah')
except ValueError:
pass
def test_invalid_creds(self):
DimensionDataMockHttp.type = 'UNAUTHORIZED'
try:
self.driver.list_balancers()
self.assertTrue(False)
# Above command should have thrown an InvalidCredsException
except InvalidCredsError:
pass
def test_create_balancer(self):
self.driver.ex_set_current_network_domain('1234')
members = []
members.append(Member(
id=None,
ip='1.2.3.4',
port=80))
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '172.16.58.3')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_create_balancer_with_defaults(self):
self.driver.ex_set_current_network_domain('1234')
balancer = self.driver.create_balancer(
name='test',
port=None,
protocol=None,
algorithm=None,
members=None)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '172.16.58.3')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_create_balancer_no_members(self):
self.driver.ex_set_current_network_domain('1234')
members = None
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '172.16.58.3')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_create_balancer_empty_members(self):
self.driver.ex_set_current_network_domain('1234')
members = []
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '172.16.58.3')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_list_balancers(self):
bal = self.driver.list_balancers()
self.assertEqual(bal[0].name, 'myProduction.Virtual.Listener')
self.assertEqual(bal[0].id, '6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal[0].port, '8899')
self.assertEqual(bal[0].ip, '172.16.58.3')
self.assertEqual(bal[0].state, State.RUNNING)
def test_balancer_list_members(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
members = self.driver.balancer_list_members(balancer)
self.assertEqual(2, len(members))
self.assertEqual(members[0].ip, '10.0.3.13')
self.assertEqual(members[0].id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(members[0].port, 9889)
def test_balancer_attach_member(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
member = Member(
id=None,
ip='192.168.3.11',
port=80,
balancer=balancer,
extra=None)
member = self.driver.balancer_attach_member(balancer, member)
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
def test_balancer_detach_member(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
member = Member(
id='3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0',
ip='192.168.3.11',
port=80,
balancer=balancer,
extra=None)
result = self.driver.balancer_detach_member(balancer, member)
self.assertEqual(result, True)
def test_destroy_balancer(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
response = self.driver.destroy_balancer(balancer)
self.assertEqual(response, True)
def test_set_get_network_domain_id(self):
self.driver.ex_set_current_network_domain('1234')
nwd = self.driver.ex_get_current_network_domain()
self.assertEqual(nwd, '1234')
def test_ex_create_pool_member(self):
pool = DimensionDataPool(
id='4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
)
node = DimensionDataVIPNode(
id='2344',
name='test',
status=State.RUNNING,
ip='192.168.3.11'
)
member = self.driver.ex_create_pool_member(
pool=pool,
node=node,
port=80
)
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.name, '10.0.3.13')
self.assertEqual(member.ip, '192.168.3.11')
def test_ex_create_node(self):
node = self.driver.ex_create_node(
network_domain_id='12345',
name='test',
ip='192.168.127.12',
ex_description='',
connection_limit=25000,
connection_rate_limit=2000)
self.assertEqual(node.name, 'myProductionNode.1')
self.assertEqual(node.id, '9e6b496d-5261-4542-91aa-b50c7f569c54')
def test_ex_create_pool(self, ):
pool = self.driver.ex_create_pool(
network_domain_id='1234',
name='test',
balancer_method='ROUND_ROBIN',
ex_description='test',
service_down_action='NONE',
slow_ramp_time=30)
self.assertEqual(pool.id, '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(pool.name, 'test')
self.assertEqual(pool.status, State.RUNNING)
def test_ex_create_virtual_listener(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test',
port=80,
pool=DimensionDataPool(
id='1234',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
))
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_ex_create_virtual_listener_unusual_port(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test',
port=8900,
pool=DimensionDataPool(
id='1234',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
))
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_get_balancer(self):
bal = self.driver.get_balancer('6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal.name, 'myProduction.Virtual.Listener')
self.assertEqual(bal.id, '6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal.port, '8899')
self.assertEqual(bal.ip, '172.16.58.3')
self.assertEqual(bal.state, State.RUNNING)
def test_list_protocols(self):
protocols = self.driver.list_protocols()
self.assertNotEqual(0, len(protocols))
def test_ex_get_nodes(self):
nodes = self.driver.ex_get_nodes()
self.assertEqual(2, len(nodes))
self.assertEqual(nodes[0].name, 'ProductionNode.1')
self.assertEqual(nodes[0].id, '34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(nodes[0].ip, '10.10.10.101')
def test_ex_get_node(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(node.name, 'ProductionNode.2')
self.assertEqual(node.id, '34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(node.ip, '10.10.10.101')
def test_ex_update_node(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
node.connection_limit = '100'
result = self.driver.ex_update_node(node)
self.assertEqual(result.connection_limit, '100')
def test_ex_destroy_node(self):
result = self.driver.ex_destroy_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertTrue(result)
def test_ex_set_node_state(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
result = self.driver.ex_set_node_state(node, False)
self.assertEqual(result.connection_limit, '10000')
def test_ex_get_pools(self):
pools = self.driver.ex_get_pools()
self.assertNotEqual(0, len(pools))
self.assertEqual(pools[0].name, 'myDevelopmentPool.1')
self.assertEqual(pools[0].id, '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
def test_ex_get_pool(self):
pool = self.driver.ex_get_pool('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
self.assertEqual(pool.name, 'myDevelopmentPool.1')
self.assertEqual(pool.id, '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
def test_ex_update_pool(self):
pool = self.driver.ex_get_pool('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
pool.slow_ramp_time = '120'
result = self.driver.ex_update_pool(pool)
self.assertTrue(result)
def test_ex_destroy_pool(self):
response = self.driver.ex_destroy_pool(
pool=DimensionDataPool(
id='4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None))
self.assertTrue(response)
def test_get_pool_members(self):
members = self.driver.ex_get_pool_members('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
self.assertEqual(2, len(members))
self.assertEqual(members[0].id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(members[0].name, '10.0.3.13')
self.assertEqual(members[0].status, 'NORMAL')
self.assertEqual(members[0].ip, '10.0.3.13')
self.assertEqual(members[0].port, 9889)
self.assertEqual(members[0].node_id, '3c207269-e75e-11e4-811f-005056806999')
def test_get_pool_member(self):
member = self.driver.ex_get_pool_member('3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.name, '10.0.3.13')
self.assertEqual(member.status, 'NORMAL')
self.assertEqual(member.ip, '10.0.3.13')
self.assertEqual(member.port, 9889)
def test_set_pool_member_state(self):
member = self.driver.ex_get_pool_member('3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
result = self.driver.ex_set_pool_member_state(member, True)
self.assertTrue(result)
def test_ex_destroy_pool_member(self):
response = self.driver.ex_destroy_pool_member(
member=DimensionDataPoolMember(
id='',
name='test',
status=State.RUNNING,
ip='1.2.3.4',
port=80,
node_id='3c207269-e75e-11e4-811f-005056806999'),
destroy_node=False)
self.assertTrue(response)
def test_ex_destroy_pool_member_with_node(self):
response = self.driver.ex_destroy_pool_member(
member=DimensionDataPoolMember(
id='',
name='test',
status=State.RUNNING,
ip='1.2.3.4',
port=80,
node_id='34de6ed6-46a4-4dae-a753-2f8d3840c6f9'),
destroy_node=True)
self.assertTrue(response)
def test_ex_get_default_health_monitors(self):
monitors = self.driver.ex_get_default_health_monitors(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(monitors), 6)
self.assertEqual(monitors[0].id, '01683574-d487-11e4-811f-005056806999')
self.assertEqual(monitors[0].name, 'CCDEFAULT.Http')
self.assertFalse(monitors[0].node_compatible)
self.assertTrue(monitors[0].pool_compatible)
def test_ex_get_default_persistence_profiles(self):
profiles = self.driver.ex_get_default_persistence_profiles(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(profiles), 4)
self.assertEqual(profiles[0].id, 'a34ca024-f3db-11e4-b010-005056806999')
self.assertEqual(profiles[0].name, 'CCDEFAULT.Cookie')
self.assertEqual(profiles[0].fallback_compatible, False)
self.assertEqual(len(profiles[0].compatible_listeners), 1)
self.assertEqual(profiles[0].compatible_listeners[0].type, 'PERFORMANCE_LAYER_4')
def test_ex_get_default_irules(self):
irules = self.driver.ex_get_default_irules(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(irules), 4)
self.assertEqual(irules[0].id, '2b20cb2c-ffdc-11e4-b010-005056806999')
self.assertEqual(irules[0].name, 'CCDEFAULT.HttpsRedirect')
self.assertEqual(len(irules[0].compatible_listeners), 1)
self.assertEqual(irules[0].compatible_listeners[0].type, 'PERFORMANCE_LAYER_4')
class DimensionDataMockHttp(MockHttp):
fixtures = LoadBalancerFileFixtures('dimensiondata')
def _oec_0_9_myaccount_UNAUTHORIZED(self, method, url, body, headers):
return (httplib.UNAUTHORIZED, "", {}, httplib.responses[httplib.UNAUTHORIZED])
def _oec_0_9_myaccount(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_INPROGRESS(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener_6115469d_a8bb_445b_bb23_d23b5283f2b9(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener_6115469d_a8bb_445b_bb23_d23b5283f2b9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool_4d360b1f_bc2c_4ab7_9884_1f03ba2768f7(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool_4d360b1f_bc2c_4ab7_9884_1f03ba2768f7.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember_3dd806a2_c2c8_4c0c_9a4f_5219ea9266c0(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember_3dd806a2_c2c8_4c0c_9a4f_5219ea9266c0.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createPool(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createPool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createNode(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_addPoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_addPoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createVirtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createVirtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_removePoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_removePoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteVirtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteVirtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deletePool(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deletePool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteNode(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node_34de6ed6_46a4_4dae_a753_2f8d3840c6f9(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node_34de6ed6_46a4_4dae_a753_2f8d3840c6f9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editNode(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPool(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultHealthMonitor(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultHealthMonitor.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultPersistenceProfile(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultPersistenceProfile.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultIrule(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultIrule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
| StarcoderdataPython |
3207387 | """Calculate Intersection-Over-Union(IOU) of two bounding boxes."""
import numpy as np
def bbox_iou(bbox_a, bbox_b):
"""Calculate Intersection-Over-Union(IOU) of two bounding boxes.
Parameters
----------
bbox_a : numpy.ndarray
An ndarray with shape :math:`(N, 4)`.
bbox_b : numpy.ndarray
An ndarray with shape :math:`(M, 4)`.
Returns
-------
numpy.ndarray
An ndarray with shape :math:`(N, M)` indicates IOU between each pairs of
bounding boxes in `bbox_a` and `bbox_b`.
"""
if bbox_a.shape[1] < 4 or bbox_b.shape[1] < 4:
raise IndexError("Bounding boxes axis 1 must have at least length 4")
tl = np.maximum(bbox_a[:, None, :2], bbox_b[:, :2])
br = np.minimum(bbox_a[:, None, 2:4], bbox_b[:, 2:4])
area_i = np.prod(br - tl, axis=2) * (tl < br).all(axis=2)
area_a = np.prod(bbox_a[:, 2:4] - bbox_a[:, :2], axis=1)
area_b = np.prod(bbox_b[:, 2:4] - bbox_b[:, :2], axis=1)
return area_i / (area_a[:, None] + area_b - area_i)
| StarcoderdataPython |
181194 | import logging
from argparse import ArgumentParser
import yaml
from src.app import Application
class ConfigFromCLI:
def __init__(self):
self._host = '127.0.0.1'
self._port = 8000
self._set_config()
def _set_config(self):
config = self._get_config_from_file()
if config:
self._host = config.get('host')
self._port = int(config.get('port'))
def _get_config_from_file(self):
args = self._get_args()
if args.config:
with open(args.config) as file:
return yaml.load(file, Loader=yaml.Loader)
@staticmethod
def _get_args():
parser = ArgumentParser()
parser.add_argument(
'-c', '--config', type=str,
required=False, help='Sets config file path'
)
return parser.parse_args()
@property
def host(self):
return self._host
@property
def port(self):
return self._port
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler('../../src/client/client.log', encoding='utf-8'),
logging.StreamHandler()
]
)
config = ConfigFromCLI()
with Application() as app:
app.host, app.port = config.host, config.port
app.run()
| StarcoderdataPython |
3302703 | from src.wrapper.sh1106 import Screen
from src.modules.clock_module import Module as ClockModule
from src.modules.temperature_module import Module as TemperatureModule
from PIL import Image, ImageDraw, ImageFont
import os
font_path = os.path.join('assets', 'Font.ttf')
class MenuItem:
def __init__(self, title, module):
self.title = title
self.module = module
class Settings:
items_on_display = 2
items_margin = 25
class Menu:
def __init__(self, display):
self.display = display
self.font = ImageFont.truetype(font_path, 20)
self.current_scroll_index = 0
self.start_image = Image.new('1', (display.width, display.height), "WHITE")
self.draw = ImageDraw.Draw(self.start_image)
self.module = None
self.menu_items = [
MenuItem('Uhrzeit', ClockModule(self.display)),
MenuItem('Temp.', TemperatureModule(self.display))
]
self.reload_menu_items()
self.draw_menu()
def draw_menu(self):
for index, item in enumerate(self.shown_menu):
self.draw.text((5, Settings.items_margin * index + 5), f"{index + 1} {item.title}", font=self.font, fill=0)
def reload_menu_items(self):
self.shown_menu = self.menu_items[self.current_scroll_index : (Settings.items_on_display + self.current_scroll_index)]
def rerender_display(self):
self.reload_menu_items()
self.display.clear()
self.start_image = Image.new('1', (self.display.width, self.display.height), "WHITE")
self.draw = ImageDraw.Draw(self.start_image)
self.draw_menu()
def menu_up(self, channel):
self.current_scroll_index = min(self.current_scroll_index + 1, len(self.menu_items) - 1)
self.rerender_display()
def menu_down(self, channel):
self.current_scroll_index = max(0, self.current_scroll_index - 1)
self.rerender_display()
def menu_option1(self, channel):
self.select(1)
def menu_option2(self, channel):
self.select(2)
def menu_option3(self, channel):
if self.module is None:
self.select(3)
else:
self.module = None
def select(self, option):
try:
module_class = self.shown_menu[option - 1].module
self.module = module_class
except IndexError:
print("This option does not exists.")
except Exception:
pass | StarcoderdataPython |
3321834 | import sys
import importlib
import bpy
from pathlib import Path
# running as a script from terminal:
path_script = Path(__file__)
# running as a script from within blender:
#path_script = Path(bpy.context.space_data.text.filepath)
path_repo = path_script.parent.parent
sys.path.append(str(path_repo.joinpath('LIB')))
import numpy as np
import math
import csv
from BlenderVisual import blender_element as be
from BlenderVisual import blender_composite as bc
from BlenderVisual import blender_utility as bu
importlib.reload(bc)
importlib.reload(be)
importlib.reload(bu)
##########################
## WORLD
##########################
#render
bu.render_config(x=1080, y=720, sample=32, use_gpu = False)
world = bu.reset_world()
world.color = (1,1,1)
sce = bpy.data.scenes[0]
sce_sp = bpy.data.scenes[1]
##########################
## Studio
##########################
studio = bc.photo_studio(sce)
# stage
objectSize = np.asarray([4, 4, 4])
fieldCenter = objectSize/2
studio.set_location(fieldCenter)
studio.set_subject_size(6)
studio.adjust_light('key', rot_x=np.pi/3)
studio.adjust_light('fill', rot_x=np.pi/3)
studio.adjust_light('rim', rot_x=np.pi/3)
studio.set_rotation(0, np.pi/8*7)
mat_black = be.createEmissionMaterial('k_emission', [0,0,0,1])
anchor_axis = studio.create_camera_anchor('axis_anchor', [-.9, -.9, -8])
arrow = bc.axis_arrow(sce, .15)
arrow.set_anchor(anchor_axis)
arrow.reset_location()
arrow.set_mat(mat_black)
wm_text = 'Cross section'
anchor_wm = studio.create_camera_anchor('wm_anchor', [-1.4, .9, -8])
wm = bc.watermark(sce, wm_text, mat_black)
wm.set_anchor(anchor_wm)
wm.set_scale(.1)
#####################
# data
#####################
working_dir = str(path_repo) + '/example'
from scipy.interpolate import RegularGridInterpolator
X,Y,Z = np.mgrid[-2:2:40j, -2:2:40j, -2:2:40j]
#surf_eq = X**3 - Y + Z**2 - (X**2+Y**2+Z**2)**2 + 3*(X**2+Y**2+Z**2) - 3
surf_eq = X**4 + Y**4 + Z**4 - ((X**2+Y+Z**3)**2)**1.5 + (X+Y+Z) - 3
x = np.arange(surf_eq.shape[0])
y = np.arange(surf_eq.shape[1])
z = np.arange(surf_eq.shape[2])
fn = RegularGridInterpolator((x,y,z), surf_eq)
#####################
# objects
#####################
colormap = bu.colormapVIRIDIS
#smin = np.percentile(surf_eq.flatten(), 20)
#smax = np.percentile(surf_eq.flatten(), 95)
smin = -8
smax = 8
#be.create_color_bar(colormap, smin, smax, 5, 'g/L')
cbar = bc.colorbar(sce, colormap, smin, smax)
cbar.annotate(5, 'g/L', '')
cbar.resize_text(1)
anchor_cbar = studio.create_camera_anchor('cbar_anchor', [-1.3, -.2, -8])
cbar.set_anchor(anchor_cbar)
cbar.set_scale(.15)
cbar.set_brightness(4)
init_bin = [.2, 0, .2]
init_cross = [bu.get_interp_loc_from_slice(b, 40, [0,4]) for b in init_bin]
vol = bc.box_slice('vol', sce, objectSize, (1,.3,0,1))
s0 = vol.add_slice('x', 0, init_cross[0])
s1 = vol.add_slice('y', 1, init_cross[1])
s2 = vol.add_slice('z', 2, init_cross[2])
vol.set_divider(init_cross, radius = .02, color = (1,1,1,1))
x_cut = np.interp(init_cross[0], [0,objectSize[0]] , [0, 39])
values_yz = bu.get_interpolation_from_3D(fn, 0, [x_cut,0,0],
[x_cut,39,39],[1,100,100])
rgba = be.mapToColor(values_yz, colormap, xmin=smin, xmax= smax,
log=False, maptype='RGBA')
img = be.createImage('x_img', rgba)
vol.update_plane(s0, img, 0, init_cross[0])
z_cut = np.interp(init_cross[2], [0,objectSize[2]] , [0, 39])
values_xy = bu.get_interpolation_from_3D(fn, 2, [0,0,z_cut],
[39,39,z_cut],[100,100,1])
rgba = be.mapToColor(values_xy, colormap, xmin=smin, xmax= smax,
log=False, maptype='RGBA')
img = be.createImage('z_img', rgba)
vol.update_plane(s2, img, 2, init_cross[2])
#####################
# slice
#####################
def visualSlice(scene):
f = scene.frame_current
f0 = scene.frame_start
f1 = scene.frame_end
y_min = 0
y_max = 39
y_slice = np.interp(f, [f0, f1], [y_min, y_max])
values_xz = bu.get_interpolation_from_3D(fn, 1, [0,y_slice,0],
[39,y_slice,39],[100,1,100])
values_zx = np.transpose(values_xz)
rgba = be.mapToColor(values_zx, colormap, xmin=smin, xmax= smax,log=False, maptype='RGBA')
img = be.createImage('y{}'.format(f), rgba)
loc = bu.get_interp_loc_from_slice(y_slice, 40, [0,4])
vol.update_plane(s1, img, 1, loc)
wm.set_text('y = {:.2f}'.format(y_slice))
## animation
bpy.app.handlers.frame_change_pre.clear()
bpy.app.handlers.frame_change_pre.append(visualSlice)
sce.frame_start = 1
sce.frame_end = 40
sce.frame_current = sce.frame_start
sce.render.image_settings.file_format = 'JPEG'
frames = list(range(1,sce.frame_end+1))
#frames = list(range(1, 2))
for frame_nr in frames:
sce.frame_set(frame_nr)
arrow.reset_location()
sce.render.filepath = working_dir+'/scratch/section/' +str(frame_nr)
bpy.ops.render.render(write_still=True)
| StarcoderdataPython |
1777 | <reponame>MaxwellDPS/healthchecks
import os
from django.conf import settings
from django.template.loader import render_to_string
from django.utils import timezone
import json
import requests
from urllib.parse import quote, urlencode
from hc.accounts.models import Profile
from hc.lib import emails
from hc.lib.string import replace
try:
import apprise
except ImportError:
# Enforce
settings.APPRISE_ENABLED = False
def tmpl(template_name, **ctx):
template_path = "integrations/%s" % template_name
# \xa0 is non-breaking space. It causes SMS messages to use UCS2 encoding
# and cost twice the money.
return render_to_string(template_path, ctx).strip().replace("\xa0", " ")
class Transport(object):
def __init__(self, channel):
self.channel = channel
def notify(self, check):
""" Send notification about current status of the check.
This method returns None on success, and error message
on error.
"""
raise NotImplementedError()
def is_noop(self, check):
""" Return True if transport will ignore check's current status.
This method is overridden in Webhook subclass where the user can
configure webhook urls for "up" and "down" events, and both are
optional.
"""
return False
def checks(self):
return self.channel.project.check_set.order_by("created")
class Email(Transport):
def notify(self, check, bounce_url):
if not self.channel.email_verified:
return "Email not verified"
unsub_link = self.channel.get_unsub_link()
headers = {
"X-Bounce-Url": bounce_url,
"List-Unsubscribe": "<%s>" % unsub_link,
"List-Unsubscribe-Post": "List-Unsubscribe=One-Click",
}
try:
# Look up the sorting preference for this email address
p = Profile.objects.get(user__email=self.channel.email_value)
sort = p.sort
except Profile.DoesNotExist:
# Default sort order is by check's creation time
sort = "created"
# list() executes the query, to avoid DB access while
# rendering a template
ctx = {
"check": check,
"checks": list(self.checks()),
"sort": sort,
"now": timezone.now(),
"unsub_link": unsub_link,
}
emails.alert(self.channel.email_value, ctx, headers)
def is_noop(self, check):
if not self.channel.email_verified:
return True
if check.status == "down":
return not self.channel.email_notify_down
else:
return not self.channel.email_notify_up
class Shell(Transport):
def prepare(self, template, check):
""" Replace placeholders with actual values. """
ctx = {
"$CODE": str(check.code),
"$STATUS": check.status,
"$NOW": timezone.now().replace(microsecond=0).isoformat(),
"$NAME": check.name,
"$TAGS": check.tags,
}
for i, tag in enumerate(check.tags_list()):
ctx["$TAG%d" % (i + 1)] = tag
return replace(template, ctx)
def is_noop(self, check):
if check.status == "down" and not self.channel.cmd_down:
return True
if check.status == "up" and not self.channel.cmd_up:
return True
return False
def notify(self, check):
if not settings.SHELL_ENABLED:
return "Shell commands are not enabled"
if check.status == "up":
cmd = self.channel.cmd_up
elif check.status == "down":
cmd = self.channel.cmd_down
cmd = self.prepare(cmd, check)
code = os.system(cmd)
if code != 0:
return "Command returned exit code %d" % code
class HttpTransport(Transport):
@classmethod
def get_error(cls, response):
# Override in subclasses: look for a specific error message in the
# response and return it.
return None
@classmethod
def _request(cls, method, url, **kwargs):
try:
options = dict(kwargs)
options["timeout"] = 5
if "headers" not in options:
options["headers"] = {}
if "User-Agent" not in options["headers"]:
options["headers"]["User-Agent"] = "healthchecks.io"
r = requests.request(method, url, **options)
if r.status_code not in (200, 201, 202, 204):
m = cls.get_error(r)
if m:
return f'Received status code {r.status_code} with a message: "{m}"'
return f"Received status code {r.status_code}"
except requests.exceptions.Timeout:
# Well, we tried
return "Connection timed out"
except requests.exceptions.ConnectionError:
return "Connection failed"
@classmethod
def get(cls, url, **kwargs):
# Make 3 attempts--
for x in range(0, 3):
error = cls._request("get", url, **kwargs)
if error is None:
break
return error
@classmethod
def post(cls, url, **kwargs):
# Make 3 attempts--
for x in range(0, 3):
error = cls._request("post", url, **kwargs)
if error is None:
break
return error
@classmethod
def put(cls, url, **kwargs):
# Make 3 attempts--
for x in range(0, 3):
error = cls._request("put", url, **kwargs)
if error is None:
break
return error
class Webhook(HttpTransport):
def prepare(self, template, check, urlencode=False):
""" Replace variables with actual values. """
def safe(s):
return quote(s) if urlencode else s
ctx = {
"$CODE": str(check.code),
"$STATUS": check.status,
"$NOW": safe(timezone.now().replace(microsecond=0).isoformat()),
"$NAME": safe(check.name),
"$TAGS": safe(check.tags),
}
for i, tag in enumerate(check.tags_list()):
ctx["$TAG%d" % (i + 1)] = safe(tag)
return replace(template, ctx)
def is_noop(self, check):
if check.status == "down" and not self.channel.url_down:
return True
if check.status == "up" and not self.channel.url_up:
return True
return False
def notify(self, check):
spec = self.channel.webhook_spec(check.status)
if not spec["url"]:
return "Empty webhook URL"
url = self.prepare(spec["url"], check, urlencode=True)
headers = {}
for key, value in spec["headers"].items():
headers[key] = self.prepare(value, check)
body = spec["body"]
if body:
body = self.prepare(body, check)
if spec["method"] == "GET":
return self.get(url, headers=headers)
elif spec["method"] == "POST":
return self.post(url, data=body.encode(), headers=headers)
elif spec["method"] == "PUT":
return self.put(url, data=body.encode(), headers=headers)
class Slack(HttpTransport):
def notify(self, check):
text = tmpl("slack_message.json", check=check)
payload = json.loads(text)
return self.post(self.channel.slack_webhook_url, json=payload)
class HipChat(HttpTransport):
def is_noop(self, check):
return True
class OpsGenie(HttpTransport):
@classmethod
def get_error(cls, response):
try:
return response.json().get("message")
except ValueError:
pass
def notify(self, check):
headers = {
"Conent-Type": "application/json",
"Authorization": "GenieKey %s" % self.channel.opsgenie_key,
}
payload = {"alias": str(check.code), "source": settings.SITE_NAME}
if check.status == "down":
payload["tags"] = check.tags_list()
payload["message"] = tmpl("opsgenie_message.html", check=check)
payload["note"] = tmpl("opsgenie_note.html", check=check)
payload["description"] = tmpl("opsgenie_description.html", check=check)
url = "https://api.opsgenie.com/v2/alerts"
if self.channel.opsgenie_region == "eu":
url = "https://api.eu.opsgenie.com/v2/alerts"
if check.status == "up":
url += "/%s/close?identifierType=alias" % check.code
return self.post(url, json=payload, headers=headers)
class PagerDuty(HttpTransport):
URL = "https://events.pagerduty.com/generic/2010-04-15/create_event.json"
def notify(self, check):
description = tmpl("pd_description.html", check=check)
payload = {
"service_key": self.channel.pd_service_key,
"incident_key": str(check.code),
"event_type": "trigger" if check.status == "down" else "resolve",
"description": description,
"client": settings.SITE_NAME,
"client_url": check.details_url(),
}
return self.post(self.URL, json=payload)
class PagerTree(HttpTransport):
def notify(self, check):
url = self.channel.value
headers = {"Conent-Type": "application/json"}
payload = {
"incident_key": str(check.code),
"event_type": "trigger" if check.status == "down" else "resolve",
"title": tmpl("pagertree_title.html", check=check),
"description": tmpl("pagertree_description.html", check=check),
"client": settings.SITE_NAME,
"client_url": settings.SITE_ROOT,
"tags": ",".join(check.tags_list()),
}
return self.post(url, json=payload, headers=headers)
class PagerTeam(HttpTransport):
def notify(self, check):
url = self.channel.value
headers = {"Content-Type": "application/json"}
payload = {
"incident_key": str(check.code),
"event_type": "trigger" if check.status == "down" else "resolve",
"title": tmpl("pagerteam_title.html", check=check),
"description": tmpl("pagerteam_description.html", check=check),
"client": settings.SITE_NAME,
"client_url": settings.SITE_ROOT,
"tags": ",".join(check.tags_list()),
}
return self.post(url, json=payload, headers=headers)
class Pushbullet(HttpTransport):
def notify(self, check):
text = tmpl("pushbullet_message.html", check=check)
url = "https://api.pushbullet.com/v2/pushes"
headers = {
"Access-Token": self.channel.value,
"Conent-Type": "application/json",
}
payload = {"type": "note", "title": settings.SITE_NAME, "body": text}
return self.post(url, json=payload, headers=headers)
class Pushover(HttpTransport):
URL = "https://api.pushover.net/1/messages.json"
def notify(self, check):
others = self.checks().filter(status="down").exclude(code=check.code)
# list() executes the query, to avoid DB access while
# rendering a template
ctx = {"check": check, "down_checks": list(others)}
text = tmpl("pushover_message.html", **ctx)
title = tmpl("pushover_title.html", **ctx)
pieces = self.channel.value.split("|")
user_key, prio = pieces[0], pieces[1]
# The third element, if present, is the priority for "up" events
if len(pieces) == 3 and check.status == "up":
prio = pieces[2]
payload = {
"token": settings.PUSHOVER_API_TOKEN,
"user": user_key,
"message": text,
"title": title,
"html": 1,
"priority": int(prio),
}
# Emergency notification
if prio == "2":
payload["retry"] = settings.PUSHOVER_EMERGENCY_RETRY_DELAY
payload["expire"] = settings.PUSHOVER_EMERGENCY_EXPIRATION
return self.post(self.URL, data=payload)
class VictorOps(HttpTransport):
def notify(self, check):
description = tmpl("victorops_description.html", check=check)
mtype = "CRITICAL" if check.status == "down" else "RECOVERY"
payload = {
"entity_id": str(check.code),
"message_type": mtype,
"entity_display_name": check.name_then_code(),
"state_message": description,
"monitoring_tool": settings.SITE_NAME,
}
return self.post(self.channel.value, json=payload)
class Matrix(HttpTransport):
def get_url(self):
s = quote(self.channel.value)
url = settings.MATRIX_HOMESERVER
url += "/_matrix/client/r0/rooms/%s/send/m.room.message?" % s
url += urlencode({"access_token": settings.MATRIX_ACCESS_TOKEN})
return url
def notify(self, check):
plain = tmpl("matrix_description.html", check=check)
formatted = tmpl("matrix_description_formatted.html", check=check)
payload = {
"msgtype": "m.text",
"body": plain,
"format": "org.matrix.custom.html",
"formatted_body": formatted,
}
return self.post(self.get_url(), json=payload)
class Discord(HttpTransport):
def notify(self, check):
text = tmpl("slack_message.json", check=check)
payload = json.loads(text)
url = self.channel.discord_webhook_url + "/slack"
return self.post(url, json=payload)
class Telegram(HttpTransport):
SM = "https://api.telegram.org/bot%s/sendMessage" % settings.TELEGRAM_TOKEN
@classmethod
def get_error(cls, response):
try:
return response.json().get("description")
except ValueError:
pass
@classmethod
def send(cls, chat_id, text):
# Telegram.send is a separate method because it is also used in
# hc.front.views.telegram_bot to send invite links.
return cls.post(
cls.SM, json={"chat_id": chat_id, "text": text, "parse_mode": "html"}
)
def notify(self, check):
from hc.api.models import TokenBucket
if not TokenBucket.authorize_telegram(self.channel.telegram_id):
return "Rate limit exceeded"
text = tmpl("telegram_message.html", check=check)
return self.send(self.channel.telegram_id, text)
class Sms(HttpTransport):
URL = "https://api.twilio.com/2010-04-01/Accounts/%s/Messages.json"
def is_noop(self, check):
return check.status != "down"
def notify(self, check):
profile = Profile.objects.for_user(self.channel.project.owner)
if not profile.authorize_sms():
profile.send_sms_limit_notice("SMS")
return "Monthly SMS limit exceeded"
url = self.URL % settings.TWILIO_ACCOUNT
auth = (settings.TWILIO_ACCOUNT, settings.TWILIO_AUTH)
text = tmpl("sms_message.html", check=check, site_name=settings.SITE_NAME)
data = {
"From": settings.TWILIO_FROM,
"To": self.channel.sms_number,
"Body": text,
}
return self.post(url, data=data, auth=auth)
class WhatsApp(HttpTransport):
URL = "https://api.twilio.com/2010-04-01/Accounts/%s/Messages.json"
def is_noop(self, check):
if check.status == "down":
return not self.channel.whatsapp_notify_down
else:
return not self.channel.whatsapp_notify_up
def notify(self, check):
profile = Profile.objects.for_user(self.channel.project.owner)
if not profile.authorize_sms():
profile.send_sms_limit_notice("WhatsApp")
return "Monthly message limit exceeded"
url = self.URL % settings.TWILIO_ACCOUNT
auth = (settings.TWILIO_ACCOUNT, settings.TWILIO_AUTH)
text = tmpl("whatsapp_message.html", check=check, site_name=settings.SITE_NAME)
data = {
"From": "whatsapp:%s" % settings.TWILIO_FROM,
"To": "whatsapp:%s" % self.channel.sms_number,
"Body": text,
}
return self.post(url, data=data, auth=auth)
class Trello(HttpTransport):
URL = "https://api.trello.com/1/cards"
def is_noop(self, check):
return check.status != "down"
def notify(self, check):
params = {
"idList": self.channel.trello_list_id,
"name": tmpl("trello_name.html", check=check),
"desc": tmpl("trello_desc.html", check=check),
"key": settings.TRELLO_APP_KEY,
"token": self.channel.trello_token,
}
return self.post(self.URL, params=params)
class Apprise(HttpTransport):
def notify(self, check):
if not settings.APPRISE_ENABLED:
# Not supported and/or enabled
return "Apprise is disabled and/or not installed"
a = apprise.Apprise()
title = tmpl("apprise_title.html", check=check)
body = tmpl("apprise_description.html", check=check)
a.add(self.channel.value)
notify_type = (
apprise.NotifyType.SUCCESS
if check.status == "up"
else apprise.NotifyType.FAILURE
)
return (
"Failed"
if not a.notify(body=body, title=title, notify_type=notify_type)
else None
)
class MsTeams(HttpTransport):
def notify(self, check):
text = tmpl("msteams_message.json", check=check)
payload = json.loads(text)
return self.post(self.channel.value, json=payload)
class Zulip(HttpTransport):
@classmethod
def get_error(cls, response):
try:
return response.json().get("msg")
except ValueError:
pass
def notify(self, check):
_, domain = self.channel.zulip_bot_email.split("@")
url = "https://%s/api/v1/messages" % domain
auth = (self.channel.zulip_bot_email, self.channel.zulip_api_key)
data = {
"type": self.channel.zulip_type,
"to": self.channel.zulip_to,
"topic": tmpl("zulip_topic.html", check=check),
"content": tmpl("zulip_content.html", check=check),
}
return self.post(url, data=data, auth=auth)
| StarcoderdataPython |
4815585 | <reponame>justinshenk/simba<filename>simba/data_plot.py
import os
import pandas as pd
import statistics
import numpy as np
import cv2
from configparser import ConfigParser, MissingSectionHeaderError, NoOptionError, NoSectionError
import glob
from simba.drop_bp_cords import *
def data_plot_config(configini, SelectedBp):
config = ConfigParser()
configFile = str(configini)
config.read(configFile)
noAnimals = config.getint('General settings', 'animal_no')
projectPath = config.get('General settings', 'project_path')
poseConfigSetting = config.get('create ensemble settings', 'pose_estimation_body_parts')
frames_dir_out = os.path.join(projectPath, 'frames', 'output', 'live_data_table')
bplist = define_bp_drop_down(configini)
print(bplist)
if not os.path.exists(frames_dir_out):
os.makedirs(frames_dir_out)
csv_dir_in = os.path.join(projectPath, 'csv', 'features_extracted')
vidInfPath = os.path.join(projectPath, 'logs', 'video_info.csv')
try:
wfileType = config.get('General settings', 'workflow_file_type')
except NoOptionError:
wfileType = 'csv'
vidinfDf = pd.read_csv(vidInfPath)
videoCounter = 0
try:
multiAnimalIDList = config.get('Multi animal IDs', 'id_list')
multiAnimalIDList = multiAnimalIDList.split(",")
if multiAnimalIDList[0] != '':
multiAnimalStatus = True
print('Applying settings for multi-animal tracking...')
else:
multiAnimalStatus = False
multiAnimalIDList = []
for animal in range(noAnimals):
multiAnimalIDList.append('Animal_' + str(animal + 1) + '_')
print('Applying settings for classical tracking...')
except NoSectionError:
multiAnimalIDList = []
for animal in range(noAnimals):
multiAnimalIDList.append('Animal_' + str(animal + 1) + '_')
multiAnimalStatus = False
print('Applying settings for classical tracking...')
Xcols, Ycols, Pcols = getBpNames(configini)
bpDict = create_body_part_dictionary(multiAnimalStatus, multiAnimalIDList, noAnimals, Xcols, Ycols, Pcols, [])
##### FIND RELEVANT COLUMN
if poseConfigSetting != 'user_defined':
if noAnimals == 1:
move1ColName = "Movement_mouse_centroid"
if noAnimals == 2:
move1ColName = "Movement_mouse_1_centroid"
if poseConfigSetting == 'user_defined':
if noAnimals == 1:
move1ColName = "movement_" + SelectedBp
if noAnimals == 2:
move1ColName = "movement_" + SelectedBp + '_1'
########### FIND CSV FILES ###########
filesFound = glob.glob(csv_dir_in + "/*." + wfileType)
print('Generating data plots for ' + str(len(filesFound)) + ' video(s)...')
for currentFile in filesFound:
frameCounter = 0
list_nose_movement_M1, list_nose_movement_M2 = [], []
loop = 0
CurrentVideoName = os.path.basename(currentFile.replace('.csv', ''))
videoSettings = vidinfDf.loc[vidinfDf['Video'] == str(CurrentVideoName)]
try:
fps = int(videoSettings['fps'])
except TypeError:
print('Error: make sure all the videos that are going to be analyzed are represented in the project_folder/logs/video_info.csv file')
videoCounter += 1
csv_df = pd.read_csv(currentFile)
savePath = os.path.join(frames_dir_out, CurrentVideoName)
if not os.path.exists(savePath):
os.makedirs(savePath)
df_lists = [csv_df[i:i + fps] for i in range(0, csv_df.shape[0], fps)]
for currentDf in df_lists:
try:
mmMove_nose_M1 = currentDf[move1ColName].mean()
except (KeyError, UnboundLocalError):
move1ColName = bplist[0][0]
move1ColName = 'Movement_' + str(move1ColName)
mmMove_nose_M1 = currentDf[move1ColName].mean()
list_nose_movement_M1.append(mmMove_nose_M1)
current_velocity_M1_cm_sec = round(mmMove_nose_M1, 2)
meanVelocity_M1 = statistics.mean(list_nose_movement_M1)
meanVelocity_M1 = round(meanVelocity_M1, 2)
total_Movement_M1 = sum(list_nose_movement_M1)
total_Movement_M1 = round(total_Movement_M1, 2)
if noAnimals == 2:
if poseConfigSetting != 'user_defined':
mmMove_nose_M2 = currentDf["Movement_mouse_2_centroid"].mean()
if poseConfigSetting == 'user_defined':
mmMove_nose_M2 = currentDf["movement_" + SelectedBp].mean()
list_nose_movement_M2.append(mmMove_nose_M2)
current_velocity_M2_cm_sec = round(mmMove_nose_M2, 2)
meanVelocity_M2 = statistics.mean(list_nose_movement_M2)
meanVelocity_M2 = round(meanVelocity_M2, 2)
total_Movement_M2 = sum(list_nose_movement_M2)
total_Movement_M2 = round(total_Movement_M2, 2)
# save images
for index, row in currentDf.iterrows():
img_size = (400, 600, 3)
img = np.ones(img_size) * 255
cv2.putText(img, str('Mean velocity animal 1: '), (5, 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (255, 0, 0), 1)
cv2.putText(img, str('Total movement animal 1: '), (5, 40), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0, 0, 255), 1)
cv2.putText(img, str('Current velocity animal 1: '), (5, 60), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0, 100, 0), 1)
cv2.putText(img, str(meanVelocity_M1) + str(' cm/s'), (275, 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (255, 0, 0), 1)
cv2.putText(img, str(total_Movement_M1) + str(' cm'), (275, 40), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0, 0, 255), 1)
cv2.putText(img, str(current_velocity_M1_cm_sec) + str(' cm/s'), (275, 60), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0, 100, 0), 1)
if noAnimals == 2:
cv2.putText(img, str('Mean velocity animal 2: '), (5, 80), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (255, 0, 0), 1)
cv2.putText(img, str('Total movement animal 2: '), (5, 100), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0, 0, 255), 1)
cv2.putText(img, str('Current velocity animal 2: '), (5, 120), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0, 100, 0), 1)
cv2.putText(img, str(meanVelocity_M2) + str(' cm/s'), (275, 80), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (255, 0, 0), 1)
cv2.putText(img, str(total_Movement_M2) + str(' cm'), (275, 100), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0, 0, 255), 1)
cv2.putText(img, str(current_velocity_M2_cm_sec) + str(' cm/s'), (275, 120), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0, 100, 0), 1)
if poseConfigSetting != 'user_defined':
centroid_distance_cm = (int(row["Centroid_distance"])) / 10
centroid_distance_cm = round(centroid_distance_cm, 2)
nose_2_nose_dist_cm = (int(row["Nose_to_nose_distance"])) / 10
nose_2_nose_dist_cm = round(nose_2_nose_dist_cm, 2)
cv2.putText(img, str('Centroid distance: '), (5, 140), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (153, 50, 204), 1)
cv2.putText(img, str('Nose to nose distance: '), (5, 160), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (153, 50, 204), 1)
cv2.putText(img, str(centroid_distance_cm) + str(' cm'), (275, 140), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (153, 50, 204), 1)
cv2.putText(img, str(nose_2_nose_dist_cm) + str(' cm'), (275, 160), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (153, 50, 204), 1)
imageSaveName = os.path.join(savePath, str(loop) + '.png')
cv2.imwrite(imageSaveName, img)
print('Live plot ' + str(loop) + '/' + str(len(csv_df)) + ' for video ' + str(videoCounter) + '/' + str(len(filesFound)))
loop += 1
frameCounter += 1
print('Finished generating data plots.') | StarcoderdataPython |
3303218 |
# Pick pivot
# Partition in lower and higher part
# Recursively sort lower and higher
def quicksort(in_list):
if len(in_list)<2:
return in_list
pivot_index = int(len(in_list)/2) #choice of pivot?
pivot_val = in_list[pivot_index]
# in place?
lower_list = [val for i,val in enumerate(in_list) if val<=pivot_val and i!= pivot_index]
higher_list = [val for val in in_list if val>pivot_val]
return quicksort(lower_list) + [pivot_val] + quicksort(higher_list)
import random
print(quicksort([random.randint(0,10) for i in range(10)])) | StarcoderdataPython |
1716285 | <filename>src/genie/libs/parser/iosxe/tests/ShowIpNhrpNhs/cli/equal/golden_output_2_expected.py
expected_output = {
"Tunnel100": {
"nhs_ip": {
"172.16.58.3": {
"nhs_state": "RE",
"nbma_address": "172.16.17.32",
"priority": 0,
"cluster": 0,
}
}
}
}
| StarcoderdataPython |
196481 | <gh_stars>0
import inspect
import logging
import os
import time
import traceback
from AndroidRunner.Devices import Devices
from AndroidRunner.PluginHandler import PluginHandler
from AndroidRunner.util import makedirs
import paths
# noinspection PyUnusedLocal
class PluginTests(object):
def __init__(self, config):
self.logger = logging.getLogger(self.__class__.__name__)
self.errors = []
self.config = config
adb_path = config.get('adb_path', 'adb')
self.devices = Devices(config['devices'], adb_path=adb_path)
self.profilers = None
self.output_root = paths.OUTPUT_DIR
self.result_file = os.path.join(self.output_root, 'Test_results.txt')
self.dirs = {}
@staticmethod
def get_progress_xml_file():
return "Testing, no progress file has been made"
def check_profilers(self):
self.check_init_profilers()
default_profilers = ['android', 'batterystats', 'trepn', 'monsoon']
for profiler in self.profilers:
if profiler.name.lower() not in default_profilers:
self.check_profiler(profiler.currentProfiler, profiler.name)
def check_init_profilers(self):
self.profilers = []
for name, params in list(self.config.get('profilers', {}).items()):
try:
self.profilers.append(PluginHandler(name, params))
except Exception:
self.errors.append('Profiler {}: Initializing profiler resulted in the following error:\n{}'.
format(name, traceback.format_exc()))
def check_profiler(self, profiler, profiler_name):
profiler_parent = inspect.getmro(type(profiler))[1]
profiler_parent_module = profiler_parent.__module__
profiler_parent_name = profiler_parent.__name__
if '.Profiler' in profiler_parent_module and profiler_parent_name == 'Profiler':
self.check_profiler_methods(profiler, profiler_name)
else:
self.errors.append('Profiler {}: doesn\'t have the \'Profiler\' '
'as parent class, plugin can\'t be tested'.format(profiler_name))
def check_profiler_methods(self, profiler, profiler_name):
device = self.check_profiler_dependencies(profiler, profiler_name)
if device is not None:
self.set_dirs(device, profiler_name)
methods = ['load', 'set_output', 'start_profiling', 'stop_profiling', 'collect_results',
'aggregate_subject', 'unload', 'aggregate_end']
for current_method in methods:
self.check_profiler_method(device, profiler, current_method, profiler_name)
def set_dirs(self, device, profiler_name):
self.dirs['subject'] = os.path.join(self.output_root, 'data', device.name, 'test_dir_1', 'test_dir_2',
profiler_name)
self.dirs['aggregated'] = os.path.join(paths.OUTPUT_DIR, '{}_aggregated.csv'.format(profiler_name))
self.dirs['data_folder'] = os.path.join(paths.OUTPUT_DIR, 'data')
makedirs(self.dirs['subject'])
def check_profiler_method(self, device, profiler, current_method, profiler_name):
try:
if current_method == 'set_output':
method_result = getattr(profiler, current_method)(self.dirs['subject'])
elif current_method == 'stop_profiling':
time.sleep(5)
method_result = getattr(profiler, current_method)(device)
time.sleep(5)
elif current_method == 'aggregate_subject':
method_result = getattr(profiler, current_method)()
elif current_method == 'aggregate_end':
method_result = getattr(profiler, current_method)(self.dirs['data_folder'], self.dirs['aggregated'])
else:
method_result = getattr(profiler, current_method)(device)
if method_result is not None:
self.errors.append("Profiler {}: Method {} gives non expected return value.".
format(profiler_name, current_method))
except NotImplementedError:
self.errors.append('Profiler {}: Method {} not implemented.'.format(profiler_name, current_method))
except Exception:
self.errors.append('Profiler {}: Method {} gave the following error: \n{}'
.format(profiler_name, current_method, traceback.format_exc()))
def check_profiler_dependencies(self, profiler, profiler_name):
method = 'dependencies()'
try:
method_result = profiler.dependencies()
self.check_dependencies(method_result, profiler_name)
except NotImplementedError:
self.errors.append('Profiler {}: Method {} not implemented.'.format(profiler_name, method))
except Exception:
self.errors.append('Profiler {}: Method {} gave the following error: \n{}'
.format(profiler_name, method, traceback.format_exc()))
device = None
try:
for current_device in self.devices:
installed_apps = current_device.is_installed(profiler.dependencies())
not_installed_apps = [name for name, installed in list(installed_apps.items()) if not installed]
if len(not_installed_apps) == 0:
device = current_device
break
finally:
if device is None:
self.errors.append('Profiler {}: plugin not further tested, '
'no device available that meets the dependencies. '
'Check devices and dependencies'.format(profiler_name))
def check_dependencies(self, dependencies, profiler_name):
if isinstance(dependencies, list):
for dependency in dependencies:
if isinstance(dependency, str):
if len(dependency.split(".")) == 3:
continue
else:
self.errors.append('Profiler {}: dependency \'{}\' has an invalid format'
.format(profiler_name, dependency))
else:
self.errors.append('Profiler {}: invalid object in dependency list'
.format(profiler_name))
else:
self.errors.append('Profiler {}: return value of dependencies() not a list object'.format(profiler_name))
def format_errors(self):
result_string = ''
if len(self.errors) > 0:
result_string += '{} Errors found during testing: \n'.format(len(self.errors))
for error in self.errors:
result_string += '\n{}'.format(error)
else:
result_string += "No errors found during testing"
return result_string
def write_to_file(self, formatted_errors):
with open(self.result_file, 'w') as f:
f.write(formatted_errors)
def start(self):
self.check_profilers()
formated_errors = self.format_errors()
self.write_to_file(formated_errors)
print('\n{}'.format(formated_errors))
print('\nTest results saved to file: {}'.format(self.result_file))
| StarcoderdataPython |
1751411 | <reponame>jayvdb/django-compat-patcher
from __future__ import absolute_import, print_function, unicode_literals
import os, sys, random
import pytest
import _test_utilities
from django_compat_patcher.registry import get_relevant_fixers, get_relevant_fixer_ids, get_fixer_by_id
from django_compat_patcher.utilities import get_patcher_setting
from django_compat_patcher import patch
def test_get_patcher_setting():
with pytest.raises(ValueError):
get_patcher_setting("DEBUG") # only DCP settings allowed
assert get_patcher_setting("DCP_INCLUDE_FIXER_IDS") == "*"
assert get_patcher_setting("DCP_INCLUDE_FIXER_IDS",
settings=dict(DCP_INCLUDE_FIXER_IDS=["a"])) == ["a"]
# TODO patch django settings to check that they are used IFF no parameter "settings"
def test_get_relevant_fixer_ids():
settings = random.choice(({}, None))
fixer_ids = get_relevant_fixer_ids(current_django_version="1.9", settings=settings)
assert [expected_fixer_id in fixer_ids for expected_fixer_id in
['fix_deletion_templatetags_future_url', 'fix_deletion_core_handlers_wsgi_WSGIRequest_REQUEST']]
assert len(fixer_ids) > 5
fixer_ids = get_relevant_fixer_ids(current_django_version="1.10")
assert [expected_fixer_id in fixer_ids for expected_fixer_id in
['fix_deletion_templatetags_future_url', 'fix_deletion_core_handlers_wsgi_WSGIRequest_REQUEST']]
assert len(fixer_ids) > 5
fixer_ids = get_relevant_fixer_ids(current_django_version="1.3")
assert fixer_ids == ['fix_incoming_django_urls']
# TODO update this test when new fixers arrive, and test inclusion/exclusion filters
settings = dict(DCP_INCLUDE_FIXER_IDS=[],
DCP_INCLUDE_FIXER_FAMILIES=[],
DCP_EXCLUDE_FIXER_IDS=[],
DCP_EXCLUDE_FIXER_FAMILIES=[])
fixer_ids = get_relevant_fixer_ids(current_django_version="1.9", settings=settings)
assert len(fixer_ids) == 0
settings = dict(DCP_INCLUDE_FIXER_IDS=[],
DCP_INCLUDE_FIXER_FAMILIES=["django1.9"],
DCP_EXCLUDE_FIXER_IDS=[],
DCP_EXCLUDE_FIXER_FAMILIES=[])
fixer_ids = get_relevant_fixer_ids(current_django_version="1.9", settings=settings)
assert len(fixer_ids) >= 2
settings = dict(DCP_INCLUDE_FIXER_IDS="*",
DCP_INCLUDE_FIXER_FAMILIES=[],
DCP_EXCLUDE_FIXER_IDS=[],
DCP_EXCLUDE_FIXER_FAMILIES=[])
fixer_ids = get_relevant_fixer_ids(current_django_version="1.9", settings=settings)
assert len(fixer_ids) >= 2
settings = dict(DCP_INCLUDE_FIXER_IDS=[],
DCP_INCLUDE_FIXER_FAMILIES="*",
DCP_EXCLUDE_FIXER_IDS=[],
DCP_EXCLUDE_FIXER_FAMILIES=[])
fixer_ids = get_relevant_fixer_ids(current_django_version="1.9", settings=settings)
assert len(fixer_ids) >= 2
settings = dict(DCP_INCLUDE_FIXER_IDS=['fix_deletion_templatetags_future_url'],
DCP_INCLUDE_FIXER_FAMILIES=["django1.9"],
DCP_EXCLUDE_FIXER_IDS=[],
DCP_EXCLUDE_FIXER_FAMILIES=["django1.6", "django1.7", "django1.8", "django1.9"])
fixer_ids = get_relevant_fixer_ids(current_django_version="1.9", settings=settings)
assert fixer_ids == []
settings = dict(DCP_INCLUDE_FIXER_IDS=['fix_deletion_templatetags_future_url'],
DCP_INCLUDE_FIXER_FAMILIES=["django1.9"],
DCP_EXCLUDE_FIXER_IDS=['fix_deletion_templatetags_future_url'],
DCP_EXCLUDE_FIXER_FAMILIES=[])
fixer_ids = get_relevant_fixer_ids(current_django_version="1.9", settings=settings)
assert 'fix_deletion_core_handlers_wsgi_WSGIRequest_REQUEST' in fixer_ids
settings = dict(DCP_INCLUDE_FIXER_IDS=[],
DCP_INCLUDE_FIXER_FAMILIES=["django1.9"],
DCP_EXCLUDE_FIXER_IDS=[],
DCP_EXCLUDE_FIXER_FAMILIES="*")
fixer_ids = get_relevant_fixer_ids(current_django_version="1.9", settings=settings)
assert fixer_ids == []
def test_get_fixer_by_id():
res = get_fixer_by_id("fix_deletion_templatetags_future_ssi")
assert isinstance(res, dict)
assert res["fixer_id"] == "fix_deletion_templatetags_future_ssi"
with pytest.raises(LookupError):
get_fixer_by_id("ddssdfsdfsdf")
def test_django_patcher():
applied_fixer_ids = patch()
assert len(applied_fixer_ids) > 0 # todo strengthen this tets?
| StarcoderdataPython |
1706427 | <gh_stars>0
# python list generator
print '************** Generator Test Programs **************'
l = [x * x for x in range(10)]
print l
g = (x * x for x in range(10))
print g
for x in g:
print x
def fib(max):
n, a, b = 0, 0, 1
while n < max:
yield b
a, b = b, a + b
n = n + 1
for x in fib(10):
print x
raw_input() | StarcoderdataPython |
1793104 | from .supertype import supertype
| StarcoderdataPython |
151115 | # This program has been developed by students from the bachelor Computer Science at Utrecht University within the
# Software and Game project course
# ©Copyright Utrecht University Department of Information and Computing Sciences.
"""Contains test data."""
test_get_assignments_data = \
{
"courses": [
{
"assignments": [
{
"cmid": 6,
"name": "Learning basic loops",
"duedate": 1573776060,
},
{
"cmid": 9,
"name": "Learning booleans",
"duedate": 1573776060,
},
]
}
],
"warnings": []
}
test_get_assignments_check = \
[
{
"cmid": 6,
"name": "Learning basic loops",
"duedate": 1573776060,
},
{
"cmid": 9,
"name": "Learning booleans",
"duedate": 1573776060,
},
]
test_assignment_completion_check = \
{
"statuses": [
{
"cmid": 6,
"state": 1
},
{
"cmid": 9,
"state": 0
}
],
"warnings": []
}
test_get_enrolled_users = \
[
{
"id": 4,
"username": "WS",
"firstname": "Will",
"lastname": "Smith",
"fullname": "<NAME>",
}
]
test_inactivity_get_enrolled_users = \
[
{
"id": 2
},
{
"id": 3
},
{
"id": 4
},
{
"id": 5
}
]
test_get_courses_by_id = \
{
'courses': [
{
'id': 2,
'fullname': 'BeginningCourse'
}
]
}
test_get_courses_by_id_ended = \
{
'courses': [
{
'id': 2,
'fullname': '<NAME>',
'displayname': 'No view course',
'shortname': 'nvc',
'categoryid': 1,
'categoryname': 'Miscellaneous',
'sortorder': 10001,
'summary': '',
'summaryformat': 1,
'summaryfiles': [],
'overviewfiles': [],
'contacts': [
{'id': 4,
'fullname': '<NAME>'}],
'enrollmentmethods': ['manual'],
'idnumber': '',
'format': 'topics',
'showgrades': 1,
'newsitems': 5,
'startdate': 1605740400,
'enddate': 1637276800,
'maxbytes': 0,
'showreports': 0,
'visible': 1,
'groupmode': 0,
'groupmodeforce': 0,
'defaultgroupingid': 0,
'enablecompletion': 1,
'completionnotify': 0,
'lang': '',
'theme': '',
'marker': 0,
'legacyfiles': 0,
'calendartype': '',
'timecreated': 1605708824,
'timemodified': 1605708824,
'requested': 0,
'cacherev': 1605801045,
'filters': [{'filter': 'displayh5p', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'activitynames', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'mathjaxloader', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'emoticon', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'urltolink', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'mediaplugin', 'localstate': 0, 'inheritedstate': 1}],
'courseformatoptions': [{'name': 'hiddensections', 'value': 0},
{'name': 'coursedisplay', 'value': 0}]}]
}
test_get_courses_by_id_live = \
{
'courses': [
{
'id': 2,
'fullname': 'No view course',
'displayname': 'No view course',
'shortname': 'nvc',
'categoryid': 1,
'categoryname': 'Miscellaneous',
'sortorder': 10001,
'summary': '',
'summaryformat': 1,
'summaryfiles': [],
'overviewfiles': [],
'contacts': [
{'id': 4,
'fullname': '<NAME>'}],
'enrollmentmethods': ['manual'],
'idnumber': '',
'format': 'topics',
'showgrades': 1,
'newsitems': 5,
'startdate': 1605740400,
'enddate': 1637276400,
'maxbytes': 0,
'showreports': 0,
'visible': 1,
'groupmode': 0,
'groupmodeforce': 0,
'defaultgroupingid': 0,
'enablecompletion': 1,
'completionnotify': 0,
'lang': '',
'theme': '',
'marker': 0,
'legacyfiles': 0,
'calendartype': '',
'timecreated': 1605708824,
'timemodified': 1605708824,
'requested': 0,
'cacherev': 1605801045,
'filters': [{'filter': 'displayh5p', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'activitynames', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'mathjaxloader', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'emoticon', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'urltolink', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'mediaplugin', 'localstate': 0, 'inheritedstate': 1}],
'courseformatoptions': [{'name': 'hiddensections', 'value': 0},
{'name': 'coursedisplay', 'value': 0}]}]
}
test_get_courses_by_id_young = \
{
'courses': [
{
'id': 2,
'fullname': 'No view course',
'displayname': 'No view course',
'shortname': 'nvc',
'categoryid': 1,
'categoryname': 'Miscellaneous',
'sortorder': 10001,
'summary': '',
'summaryformat': 1,
'summaryfiles': [],
'overviewfiles': [],
'contacts': [
{'id': 4,
'fullname': '<NAME>'}],
'enrollmentmethods': ['manual'],
'idnumber': '',
'format': 'topics',
'showgrades': 1,
'newsitems': 5,
'startdate': 1605740400,
'enddate': 1606487200,
'maxbytes': 0,
'showreports': 0,
'visible': 1,
'groupmode': 0,
'groupmodeforce': 0,
'defaultgroupingid': 0,
'enablecompletion': 1,
'completionnotify': 0,
'lang': '',
'theme': '',
'marker': 0,
'legacyfiles': 0,
'calendartype': '',
'timecreated': 1606400370,
'timemodified': 1605708824,
'requested': 0,
'cacherev': 1605801045,
'filters': [{'filter': 'displayh5p', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'activitynames', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'mathjaxloader', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'emoticon', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'urltolink', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'mediaplugin', 'localstate': 0, 'inheritedstate': 1}],
'courseformatoptions': [{'name': 'hiddensections', 'value': 0},
{'name': 'coursedisplay', 'value': 0}]}]
}
test_get_courses_by_id_old = \
{
'courses': [
{
'id': 2,
'fullname': 'No view course',
'displayname': 'No view course',
'shortname': 'nvc',
'categoryid': 1,
'categoryname': 'Miscellaneous',
'sortorder': 10001,
'summary': '',
'summaryformat': 1,
'summaryfiles': [],
'overviewfiles': [],
'contacts': [
{'id': 4,
'fullname': '<NAME>'}],
'enrollmentmethods': ['manual'],
'idnumber': '',
'format': 'topics',
'showgrades': 1,
'newsitems': 5,
'startdate': 1605740400,
'enddate': 1637276400,
'maxbytes': 0,
'showreports': 0,
'visible': 1,
'groupmode': 0,
'groupmodeforce': 0,
'defaultgroupingid': 0,
'enablecompletion': 1,
'completionnotify': 0,
'lang': '',
'theme': '',
'marker': 0,
'legacyfiles': 0,
'calendartype': '',
'timecreated': 1605708824,
'timemodified': 1605708824,
'requested': 0,
'cacherev': 1605801045,
'filters': [{'filter': 'displayh5p', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'activitynames', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'mathjaxloader', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'emoticon', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'urltolink', 'localstate': 0, 'inheritedstate': 1},
{'filter': 'mediaplugin', 'localstate': 0, 'inheritedstate': 1}],
'courseformatoptions': [{'name': 'hiddensections', 'value': 0},
{'name': 'coursedisplay', 'value': 0}]}]
}
test_learning_locker_viewed_course = \
{
"more": "",
"statements": [
{
"actor": {
"name": "Admin User",
"account": {
"homePage": "http://127.0.0.1:80",
"name": "2"
},
"objectType": "Assistant"
},
"verb": {
"id": "http://id.tincanapi.com/verb/viewed",
"display": {
"en": "viewed"
}
},
"object": {
"id": "http://127.0.0.1:80/course/view.php?id=2",
"definition": {
"type": "http://id.tincanapi.com/activitytype/lms/course",
"name": {
"en": "BeginningCourse"
},
"extensions": {
"https://w3id.org/learning-analytics/learning-management-system/short-id": "BC",
"https://w3id.org/learning-analytics/learning-management-system/external-id": "7"
}
},
"objectType": "Activity"
},
"timestamp": "2019-10-16T11:26:19+01:00",
"context": {
"platform": "Moodle",
"language": "en",
"extensions": {
"http://lrs.learninglocker.net/define/extensions/info": {
"http://moodle.org": "3.7.2 (Build: 20190909)",
"https://github.com/xAPI-vle/moodle-logstore_xapi": "v4.4.0",
"event_name": "\\core\\event\\course_viewed",
"event_function": "\\src\\transformer\\events\\core\\course_viewed"
}
},
"contextActivities": {
"grouping": [
{
"id": "http://127.0.0.1:80",
"definition": {
"type": "http://id.tincanapi.com/activitytype/lms",
"name": {
"en": "\"New Site\""
}
},
"objectType": "Activity"
}
],
"category": [
{
"id": "http://moodle.org",
"definition": {
"type": "http://id.tincanapi.com/activitytype/source",
"name": {
"en": "Moodle"
}
},
"objectType": "Activity"
}
]
}
},
"id": "c98c8522-3d43-4098-9b5d-812392458328",
"stored": "2019-10-16T10:27:02.866Z",
"authority": {
"objectType": "Assistant",
"name": "<NAME>",
"mbox": "mailto:<EMAIL>"
},
"version": "1.0.0"
}
]
}
| StarcoderdataPython |
198577 | #! usr/bin/env python
# coding:utf-8
#=====================================================
# Copyright (C) 2020 * Ltd. All rights reserved.
#
# Author : Chen_Sheng19
# Editor : VIM
# Create time : 2020-06-09
# File name :
# Description : product TFRecord data from image file
#
#=====================================================
import tensorflow as tf
import numpy as np
from tqdm import tqdm
import os
from sklearn.utils import shuffle
from PIL import Image
def load_smaple(sample_dir,shuffle_flag = True):
print("loading dataset...")
lfilenames = []
labelnames = []
for dirpath,dirnames,filenames in os.walk(sample_dir):
for filename in filenames:
filepath = os.sep.join([dirpath,filename])
lfilenames.append(filepath)
labelname = dirpath.split("\\")[-1]
labelnames.append(labelname)
lab = list(sorted(set(labelnames)))
labdict = dict(zip(lab,list(range(len(lab)))))
labels = [labdict[i] for i in labelnames]
if shuffle_flag:
return shuffle(np.asarray(lfilenames),np.asarray(labels)),np.asarray(lab)
else:
return (np.asarray(lfilenames),np.asarray(labels)),np.asarray(lab)
dir_path = "man_woman"
(filenames,labels),_ = load_smaple(dir_path,False)
def make_TFRec(filenames,labels):
#1.创建writer
writer = tf.python_io.TFRecordWriter("mydata.tfrecords")
for i in tqdm(range(len(labels))):
image = Image.open(filenames[i])
img = image.resize((256,256))
img_raw = img.tobytes()
#2.读取到的内容转化为tfrecords格式
example = tf.train.Example( #example
features = tf.train.Features(#features
feature = {"label": tf.train.Feature(int64_list = tf.train.Int64List(value = [labels[i]])),
"img_raw": tf.train.Feature(bytes_list = tf.train.BytesList(value = [img_raw]))}))#feature字典
writer.write(example.SerializeToString())#序列化压缩
writer.close()
make_TFRec(filenames,labels)
def read_and_decode(filenames,flag="train",batch_size=3):
#1.读取文件生成队列
if flag == "train":
filename_queue = tf.train.string_input_producer(filenames)
else:
filename_queue = tf.train.string_input_producer(filenames,num_epochs=1,shuffle=False)
#2.从队列读取example
reader = tf.TFRecordReader()
_,serialized_example = reader.read(filename_queue)
#3.将example解析为features
features = tf.parse_single_example(serialized_example,
features = {"label":tf.FixedLenFeature([],tf.int64),
"img_raw":tf.FixedLenFeature([],tf.string)})
#4.将features解析为图片数据
image = tf.decode_raw(features['img_raw'],tf.uint8)
image = tf.reshape(image,[256,256,3])
label = tf.cast(features['label'],tf.int32)
if flag == "train":
image = tf.cast(image,tf.float32) * (1./255) - 0.5
img_batch,label_batch = tf.train.batch([image,label],batch_size=batch_size,capacity=20)
return img_batch,label_batch
return image,label
TFRecordfilnames = ["mydata.tfrecords"]
image,label = read_and_decode(TFRecordfilnames,flag='test')
save_image_path = "show\\"
if tf.gfile.Exists(save_image_path):
tf.gfile.DeleteRecursively(save_image_path)
tf.gfile.MakeDirs(save_image_path)
with tf.Session() as sess:
sess.run(tf.local_variables_initializer())
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
myset = set([])
try:
i = 0
while True:
example,example_label = sess.run([image,label])
example_label = str(example_label)
if example_label not in myset:
myset.add(example_label)
tf.gfile.MakeDirs(save_image_path+example_label)
img = Image.fromarray(example,'RGB')
img.save(save_image_path+example_label+"\\"+str(i)+'_Label_'+'.jpg')
print(i)
i += 1
except tf.errors.OutOfRangeError:
print('Done Test -- epoch limit reached')
finally:
coord.request_stop()
coord.join(threads)
print("stop()")
| StarcoderdataPython |
3251633 | <filename>check_data_quality/cc/checkrn.py
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import os
import codecs
# gbk gb18030
def checkFile(filePath, readCode='utf_8_sig'):
dir_name = os.path.dirname(filePath)
new_file_name = os.path.splitext(os.path.basename(filePath))[0] \
+ '_error.csv'
new_file_path = dir_name + os.sep + new_file_name
print(new_file_path)
with codecs.open(filePath, 'rb', readCode) as fr, \
codecs.open(new_file_path, 'w', readCode) as fw:
fw.write("start check " + filePath)
i = 0
error_line = 0
while True:
# 读取一行
line = fr.readline()
# 如果文件内容结束
if not line:
break
# 去除前后空格和行尾换行符
line = line.strip()
if line[-1:] != '"':
# if not line:
# 如果是空行,或者当前行最后一个字符不是双引号
error_line = error_line + 1
print(filePath + " line: " + str(i))
fw.write(filePath + ' line:' + str(i) + '\n')
print(line)
fw.write(line + '\n')
i = i + 1
fw.write('Total error line: ' + str(error_line))
def read_dir(dirPath, ext, readCode):
for (root, dirs, files) in os.walk(dirPath):
print(dirs)
for filename in files:
if filename[-len(ext):] == ext:
checkFile(os.path.join(root, filename), readCode)
for dirname in dirs:
read_dir(os.path.join(root, dirname), ext, readCode)
if __name__ == "__main__":
if len(sys.argv) != 4:
print("usage: checkrn.py dirpath ext readcode")
exit(1)
read_dir(sys.argv[1], sys.argv[2], sys.argv[3])
print("done")
| StarcoderdataPython |
35489 | import mock
import pytest
from prf.tests.prf_testcase import PrfTestCase
from pyramid.exceptions import ConfigurationExecutionError
from prf.resource import Resource, get_view_class, get_parent_elements
from prf.view import BaseView
class TestResource(PrfTestCase):
def test_init_(self):
res = Resource(self.conf)
assert res.member_name == ''
assert res.collection_name == ''
assert res.parent == None
assert res.uid == ''
with pytest.raises(ValueError):
#member name cant be empty
res.add('', view=BaseView)
def test_repr_(self):
res = Resource(self.conf, 'member', 'collection', uid='uid')
assert 'uid' in res.__repr__()
def test_get_ancestors(self):
root = Resource(self.conf)
one = root.add('one', view=BaseView)
assert one.get_ancestors() == []
two = one.add('two', view=BaseView)
anc = two.get_ancestors()
assert anc[0] == one
def test_add(self):
root = Resource(self.conf)
two = root.add('two', view=BaseView, id_name='two')
assert two.parent == root
assert two.member_name == 'two'
assert two.collection_name == 'twos'
assert two.uid == 'twos'
assert two.is_singular is False
three = two.add('tree', 'trix', view=BaseView, id_name='three')
assert three.parent == two
assert three.member_name == 'tree'
assert three.collection_name == 'trix'
assert three.uid == 'twos:trix'
assert three.is_singular is False
assert three in two.children
four = three.add('four', view=BaseView)
sing = two.add('sing', collection_name=None, view=BaseView)
assert sing.is_singular is True
pref = root.add('five', prefix='pref', view=BaseView)
assert pref.uid == 'pref:fives'
def test_add_id_name(self):
root = Resource(self.conf)
two = root.add('two', view=BaseView, id_name='username')
assert two.id_name == 'username'
three = two.add('tree', view=BaseView, id_name='username')
assert three.path == 'twos/{two_username}/trees'
@mock.patch('prf.resource.maybe_dotted')
def test_get_view_class(self, fake_maybe_dotted):
root = Resource(self.conf)
fake_maybe_dotted.return_value = BaseView
assert get_view_class(BaseView, root) == BaseView
assert get_view_class('prf.view.BaseView', root) == BaseView
fake_maybe_dotted.reset_mock()
def test_get_parent_elements(self):
root = Resource(self.conf)
ppref, npref = get_parent_elements(
root.add('one', view=BaseView).add('two', view=BaseView).add('three', view=BaseView))
assert ppref == 'ones/{one_id}/twos/{two_id}'
assert npref == 'ones:twos:'
@pytest.mark.skip('route_prefix is broken')
def test_get_parent_elements_w_route_prefix(self):
self.conf.route_prefix = 'route_prefix'
root = Resource(self.conf)
ppref, npref = get_parent_elements(
root.add('one', view=BaseView).add('two', view=BaseView).add('three', view=BaseView))
assert ppref == 'route_prefix/ones/{one_id}/twos/{two_id}'
assert npref == 'route_prefix:ones:'
| StarcoderdataPython |
3211871 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 30 02:03:04 2019
@author: linyizi
"""
class LDA_original:
@staticmethod
def _convergence_(new, old, epsilon = 1.0e-3):
'''
Check convergence.
'''
delta = abs(new - old)
return np.all(delta) < epsilon
@staticmethod
def _normalization_col(x):
'''
Normalize a matrix.
Each element is divided by the corresponding column sum.
'''
return x/np.sum(x,0)
@staticmethod
def _accumulate_Phi(beta, Phi, doc):
'''
This function accumulates the effect of Phi_new from all documents after e step.
beta is V*k matrix.
Phi is N_d * k matrix.
Return updated beta.
'''
row_index = list(doc.keys())
word_count = list(doc.values())
for i in range(len(row_index)):
beta[row_index[i],:] = word_count[i] * Phi[i,:]
return beta
def __init__(self, k, max_em_iter=50, max_alpha_iter=50, max_Estep_iter=50):
self._k = k
self._max_em_iter = max_em_iter
self._max_alpha_iter = max_alpha_iter
self._max_Estep_iter = max_Estep_iter
def initializaiton(self, V):
'''
Initialize alpha and beta.
alpha is a k-dim vector. beta is V*k matrix.
'''
k = self._k
alpha = np.random.uniform(size = k)
alpha_new = alpha/np.sum(alpha)
beta = np.random.dirichlet(alpha_new, V)
return alpha_new, beta
def Estep(self, doc, alpha, beta, N_d):
'''
E step for a document, which calculate the posterior parameters.
beta_old and alpha-old is coming from previous iteration.
Return Phi and gamma of a document.
'''
k = self._k
max_iter = self._max_Estep_iter
gamma_old = [alpha[i] + N_d/k for i in range(k)]
row_index = list(doc.keys())
word_count = np.array(list(doc.values()))
for i in range(max_iter):
# Update Phi
Phi = np.zeros((N_d, k))
for i in range(N_d):
for j in range(k):
Phi[i,j] = beta[row_index[i],j] * np.exp(digamma(gamma_old[j]))
Phi[i,:] = Phi[i,:]/np.sum(Phi[i,:])
#Update gamma
Phi_sum = np.zeros(k)
for j in range(k):
z = 0
for i in range(N_d):
z += Phi[i,j] * word_count[i]
Phi_sum[j] = z
gamma_new = alpha + Phi_sum
# Converge or not
if (i>0) & self._convergence_(gamma_new, gamma_old):
break
else:
gamma_old = gamma_new.copy()
return gamma_new, Phi
def newton_raphson(self, alpha_old, gamma_matrix):
'''
This function uses New Raphson method to update alpha in the M step.
alpha_old is a k-dim vector.
gamma_matrix is a M * k matrix which stores all gamma from M documents.
Return updated alpha.
'''
k = self._k
max_iter = self._max_alpha_iter
M = gamma_matrix.shape[0]
pg = np.sum(digamma(gamma_matrix), 0) - np.sum(digamma(np.sum(gamma_matrix, 1)))
alpha_new = alpha_old.copy()
for t in range(max_iter):
alpha_sum = np.sum(alpha_old)
g = M * (digamma(alpha_sum) - digamma(alpha_old)) + pg
h = -M * polygamma(1, alpha_old)
z = M * polygamma(1, alpha_sum)
c = np.sum(g/h)/(z**(-1.0) + np.sum(h**(-1.0)))
delta = (g-c)/h
alpha_new -= delta
if np.any(alpha_new) < 0:
alpha_new = self.newton_raphson(alpha_old/10, gamma_matrix)
return alpha_new
if (t > 1) & self._convergence_(delta, np.zeros((1,k))):
break
else:
alpha_old = alpha_new.copy()
return alpha_new
def fit(self, doc, vocabulary):
'''
Latent Dirichlet Allocation Model.
doc is a set of documents, each document is a dictionary.
vocabulary contains the words in all documents.
Return updated alpha and beta.
'''
k = self._k
max_iter = self._max_em_iter
N_d = [len(d) for d in doc] # Get the length of each document.
V = len(vocabulary) # Get the length of vocabulary
M = len(doc) # Get the document number.
# Initialize alpha, beta and the statistics od gamma
alpha_new, beta_new = self.initializaiton(V)
gamma_matrix = np.zeros((M, k))
for iter in range(max_iter):
beta_old = beta_new.copy()
alpha_old = alpha_new.copy()
# E step
for i in range(M):
gamma, Phi = self.Estep(doc[i], alpha_old, beta_old, N_d[i])
beta_new = self._accumulate_Phi(beta_new, Phi, doc[i])
gamma_matrix[i,:] = gamma
# M step
alpha_new = self.newton_raphson(alpha_old, gamma_matrix)
beta_new = self._normalization_col(beta_new)
# check convergence
if self._convergence_(alpha_new, alpha_old) & self._convergence_(np.sum(beta_new,0), np.sum(beta_old,0)):
break
return alpha_new, beta_new | StarcoderdataPython |
1684175 | <reponame>PhilippMatthes/carnivora<filename>carnivora/instabot/driver.py
import datetime
import threading
import os
from traceback import format_exc
from selenium import webdriver # For webpage crawling
from time import sleep
import platform
from selenium.common.exceptions import TimeoutException, NoSuchElementException
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys # For input processing
from random import randint
import pickle # For data management
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
from carnivora.instabot.config import Config
from carnivora.instabot.dispatcher import Dispatcher
from carnivora.instabot.log import Log
from tf_open_nsfw.classify_nsfw import classify_nsfw
class Driver(threading.Thread):
def __init__(
self,
username,
password,
screenshot_path,
window_width=1920,
window_height=1080,
):
self.username = username
self.password = password
log_path = Config.bot_path + "log/" + username
if not os.path.exists(log_path):
os.makedirs(log_path)
self.interacting_users_path = log_path + "/interacting_users.pickle"
self.hashtags_path = log_path + "/hashtags.pickle"
self.action_list_path = log_path + "/action_list.pickle"
self.followed_users_all_time_path = log_path + "/followed_users_all_time.pickle"
self.accounts_to_unfollow_path = log_path + "/accounts_to_unfollow.pickle"
self.log_path = log_path + "/log.pickle"
self.running_path = log_path + "/running.pickle"
try:
with open(self.interacting_users_path, "rb") as f:
self.interacting_users = pickle.load(f)
except (FileNotFoundError, EOFError):
with open(self.interacting_users_path, "wb") as f:
self.interacting_users = []
pickle.dump([], f)
try:
with open(self.hashtags_path, "rb") as f:
self.hashtags = pickle.load(f)
except (FileNotFoundError, EOFError):
with open(self.hashtags_path, "wb") as f:
self.hashtags = {}
for h in Config.topics:
self.hashtags[h] = 2
pickle.dump(self.hashtags, f)
try:
with open(self.action_list_path, "rb") as f:
self.action_list = pickle.load(f)
except (FileNotFoundError, EOFError):
with open(self.action_list_path, "wb") as f:
self.action_list = {}
pickle.dump({}, f)
try:
with open(self.followed_users_all_time_path, "rb") as f:
self.followed_accounts = pickle.load(f)
except (FileNotFoundError, EOFError):
with open(self.followed_users_all_time_path, "wb") as f:
self.followed_accounts = {}
pickle.dump({}, f)
try:
with open(self.accounts_to_unfollow_path, "rb") as f:
self.accounts_to_unfollow = pickle.load(f)
except (FileNotFoundError, EOFError):
with open(self.accounts_to_unfollow_path, "wb") as f:
self.accounts_to_unfollow = []
pickle.dump([], f)
try:
from xvfbwrapper import Xvfb
try:
self.vdisplay = Xvfb()
self.vdisplay.start()
except EnvironmentError:
print("Selenium Webdriver will run without Xvfb. There was an error starting Xvfb.")
except ImportError:
print("Selenium Webdriver will run without Xvfb. Install Xvfb to run Selenium Webdriver inside Xvfb.")
path = os.path.dirname(os.path.realpath(__file__))
phantom_js_path = path + "/phantomjs"
self.browser = webdriver.PhantomJS(phantom_js_path)
self.browser.set_window_size(window_width, window_height)
self.screenshot_path = screenshot_path
self.dispatcher = Dispatcher(log_path=log_path)
super(Driver, self).__init__()
def start(self):
super(Driver, self).start()
def running(self):
try:
with open(self.running_path, "rb") as f:
return bool(pickle.load(f))
except (FileNotFoundError, EOFError):
return False
@staticmethod
def now():
return datetime.datetime.now()
def focus(self, element, browser):
if self.running():
browser.execute_script("arguments[0].focus();", element)
def user_followed_already(self, user):
if self.running():
return user in self.followed_accounts
def login(self, username, password, browser, log_path, timeout=5):
if self.running():
Log.update(self.screenshot_path, self.browser, log_path, "Logging in")
browser.get(Config.start_url)
try:
username_field = WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.NAME, "username"))
)
pass_field = WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.NAME, "password"))
)
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in login')
return
username_field.send_keys(username)
pass_field.send_keys(password)
pass_field.send_keys(Keys.RETURN)
Log.update(self.screenshot_path, self.browser, log_path, "Logged in")
def update_action_list(self, author, action_type, topic):
if author not in self.action_list.keys():
value = {"type": action_type, "time": Driver.now(), "topic": topic}
self.action_list[author] = [value]
else:
value = {"type": action_type, "time": Driver.now(), "topic": topic}
author_actions = self.action_list[author]
author_actions.append(value)
self.action_list[author] = author_actions
with open(self.action_list_path, "wb") as f:
pickle.dump(self.action_list, f)
def on_dialog_page(self, browser, log_path, check_timeout=5):
if self.running():
return True
#try:
# WebDriverWait(browser, check_timeout).until(
# ec.presence_of_element_located((By.XPATH, Config.dialog_xpath))
# )
#except (TimeoutException, NoSuchElementException):
# Log.update(self.screenshot_path, self.browser, log_path, 'No longer on dialog page.')
# return False
#else:
# return True
def comment(self, topic, browser, log_path, timeout=5):
if self.running():
author = self.author(browser=browser, log_path=log_path)
query = Config.comments[randint(0, len(Config.comments) - 1)]
say = query.format(author, Config.smileys[randint(0, len(Config.smileys) - 1)])
try:
WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.XPATH, Config.comment_xpath))
)
WebDriverWait(browser, timeout).until(
ec.element_to_be_clickable((By.XPATH, Config.comment_xpath))
)
comment_button = WebDriverWait(browser, timeout).until(
ec.element_to_be_clickable((By.XPATH, Config.comment_submit_xpath))
)
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in comment')
return
comment_button.click()
comment_field = browser.find_element_by_xpath(Config.comment_xpath)
comment_field.send_keys(say)
comment_field.send_keys(Keys.RETURN)
Log.update(self.screenshot_path, self.browser, log_path,
"Commented on " + str(author) + "s picture with: " + say)
self.update_action_list(author=author, action_type="comment", topic=topic)
def search(self, browser, log_path, query):
if self.running():
browser.get("https://www.instagram.com/explore/tags/" + query + "/")
Log.update(self.screenshot_path, self.browser, log_path, "Searching for " + query + ".")
# Selects the first picture in a loaded topic screen
def select_first(self, browser, log_path, timeout=5):
if self.running():
try:
WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.XPATH, Config.first_ele_xpath))
)
pictures = browser.find_elements_by_xpath(Config.first_ele_xpath)
except NoSuchElementException:
Log.update(self.screenshot_path, self.browser, log_path,
'NoSuchElementException in select_first: ' + str(format_exc()))
return
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in select_first')
return
if len(pictures) > 9:
first_picture = pictures[9]
else:
first_picture = pictures[len(pictures) - 1]
self.focus(first_picture, browser=browser)
first_picture.click()
def next_picture(self, browser):
if self.running():
actions = ActionChains(browser)
actions.send_keys(Keys.ARROW_RIGHT)
actions.perform()
def author(self, browser, log_path, timeout=5):
if self.running():
try:
author_element = WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.XPATH, Config.author_xpath))
)
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in author')
return
return str(author_element.get_attribute("title"))
def already_liked(self, browser, log_path, error_timeout=5):
if self.running():
try:
WebDriverWait(browser, error_timeout).until(
ec.presence_of_element_located((By.XPATH, Config.like_button_full_xpath))
)
except TimeoutException:
return False
Log.update(self.screenshot_path, self.browser, log_path, 'Post was already liked.')
return True
# Likes a picture
def like(self, browser, log_path, topic, timeout=5):
if self.running():
author = self.author(browser=browser, log_path=log_path)
try:
WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.XPATH, Config.like_button_xpath))
)
like_button = WebDriverWait(browser, timeout).until(
ec.element_to_be_clickable((By.XPATH, Config.like_button_xpath))
)
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in like')
return
like_button.click()
src = self.extract_picture_source(browser=browser, log_path=log_path)
Log.update(self.screenshot_path, self.browser, log_path, "Liked picture/video by: " + author, image=src)
self.update_action_list(author=author, action_type="like", topic=topic)
# Unfollows a user
def unfollow(self, browser, log_path, name, timeout=5):
if self.running():
browser.get("https://www.instagram.com/" + name + "/")
try:
WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.XPATH, Config.unfollow_xpath))
)
unfollow_button = WebDriverWait(browser, timeout).until(
ec.element_to_be_clickable((By.XPATH, Config.unfollow_xpath))
)
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in unfollow')
return
unfollow_button.click()
Log.update(self.screenshot_path, self.browser, log_path, "Unfollowed: " + name)
def update_accounts_to_unfollow(self, author):
self.accounts_to_unfollow.append(author)
with open(self.accounts_to_unfollow_path, "wb") as f:
pickle.dump(self.accounts_to_unfollow, f)
def update_followed_accounts(self, author):
self.followed_accounts.update({author: Driver.now()})
with open(self.followed_users_all_time_path, "wb") as userfile:
pickle.dump(self.followed_accounts, userfile)
# Follows a user
def follow(self, browser, log_path, topic, timeout=15):
if self.running():
author = self.author(browser=browser, log_path=log_path)
try:
WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.XPATH, Config.follow_xpath))
)
follow_button = WebDriverWait(browser, timeout).until(
ec.element_to_be_clickable((By.XPATH, Config.follow_xpath))
)
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in follow')
return
follow_button.click()
Log.update(self.screenshot_path, browser=self.browser, log_path=self.log_path, text="Followed: " + author)
self.update_action_list(author=author, action_type="follow", topic=topic)
self.update_accounts_to_unfollow(author=author)
self.update_followed_accounts(author=author)
def open_unfollow_screen(self, browser, log_path, timeout=15):
if self.running():
try:
WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.XPATH, Config.following_xpath))
)
heart = WebDriverWait(browser, timeout).until(
ec.element_to_be_clickable((By.XPATH, Config.following_xpath))
)
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in open_unfollow_screen')
return
heart.click()
def update_interacting_users(self, user):
self.interacting_users.append(user)
with open(self.interacting_users_path, "wb") as f:
pickle.dump(self.interacting_users, f)
def check_follows(self, browser, log_path, timeout=15):
if self.running():
try:
WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.XPATH, Config.sections_xpath))
)
sections = browser.find_elements_by_xpath(Config.sections_xpath)
except NoSuchElementException:
Log.update(self.screenshot_path, self.browser, log_path,
'NoSuchElementException in check_follows: ' + str(format_exc()))
return
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in check_follows')
return
users = []
for element in sections:
try:
profile = element.find_element_by_xpath(Config.local_name_xpath)
except NoSuchElementException:
Log.update(self.screenshot_path, self.browser, log_path,
'NoSuchElementException in check_follows: ' + str(format_exc()))
return
name = profile.get_attribute("title")
users.append(name)
for user in users:
if user not in self.interacting_users:
if user in self.action_list.keys():
actions = self.action_list[user]
for action in actions:
self.hashtags[action["topic"]] += 1
self.update_interacting_users(user=user)
def update_hashtags(self, hashtag, boost=0.1):
if hashtag in self.hashtags:
self.hashtags[hashtag] += boost
else:
self.hashtags[hashtag] = boost
with open(self.hashtags_path, "wb") as f:
pickle.dump(self.hashtags, f)
def store_hashtags(self, browser, log_path, timeout=5):
if self.running():
all_hashtags = self.extract_hash_tags(browser=browser, log_path=log_path, timeout=timeout)
for hashtag in all_hashtags:
self.update_hashtags(hashtag=hashtag)
def extract_hash_tags(self, browser, log_path, timeout=5):
if self.running():
try:
WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.XPATH, Config.hashtags_xpath))
)
sections = browser.find_elements_by_xpath(Config.hashtags_xpath)
except NoSuchElementException:
Log.update(self.screenshot_path, self.browser, log_path,
'Exception in extract_hash_tags: ' + str(format_exc()))
return []
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in extract_hash_tags')
return []
all_hashtags = []
for section in sections:
all_hashtags.extend(set(part[1:] for part in section.text.split() if part.startswith('#')))
return all_hashtags
def extract_picture_source(self, browser, log_path, timeout=5):
if self.running():
try:
WebDriverWait(browser, timeout).until(
ec.presence_of_element_located((By.XPATH, Config.image_div_container_xpath))
)
sections = browser.find_elements_by_xpath(Config.image_div_container_xpath)
except NoSuchElementException:
Log.update(self.screenshot_path, self.browser, log_path,
'Exception in extract_picture_source: ' + str(format_exc()))
return
except TimeoutException:
Log.update(self.screenshot_path, self.browser, log_path, 'Timeout in extract_picture_source')
return
for section in sections:
try:
image = section.find_element_by_tag_name("img")
except NoSuchElementException:
Log.update(self.screenshot_path, self.browser, log_path,
'Exception in extract_picture_source: ' + str(format_exc()))
return
return image.get_attribute("src")
def post_hashtags_are_sfw(self, browser, log_path, timeout=5):
if self.running():
all_hashtags = ["#"+x for x in self.extract_hash_tags(browser=browser, log_path=log_path, timeout=timeout)]
for hashtag in all_hashtags:
if hashtag in Config.nsfw_hashtags:
Log.update(
screenshot_path=self.screenshot_path,
browser=self.browser,
log_path=self.log_path,
text="This post contains the blacklisted hashtag {}.".format(hashtag),
)
return False
Log.update(
screenshot_path=self.screenshot_path,
browser=self.browser,
log_path=self.log_path,
text="This post contains none of the blacklisted hashtags. (Hashtags: {})".format(", ".join(all_hashtags)),
)
return True
def post_is_sfw(self, browser, log_path, limit=0.1):
if self.running():
if not self.post_hashtags_are_sfw(browser=browser, log_path=log_path):
return False
image_url = self.extract_picture_source(browser=browser, log_path=log_path)
if not image_url:
Log.update(
screenshot_path=self.screenshot_path,
browser=self.browser,
log_path=self.log_path,
text="Picture source could not be extracted."
)
return True
sfw, nsfw = classify_nsfw(image_url)
Log.update(
screenshot_path=self.screenshot_path,
browser=self.browser,
log_path=self.log_path,
text="Analysis of this post yielded it to be {}% SFW.".format(int(100 * sfw)),
image=image_url
)
return nsfw < limit
def run(self):
self.login(browser=self.browser, log_path=self.log_path, password=<PASSWORD>, username=self.username)
while self.running():
try:
self.open_unfollow_screen(browser=self.browser, log_path=self.log_path)
self.check_follows(browser=self.browser, log_path=self.log_path)
top_hashtags = sorted(self.hashtags.keys(), key=lambda k: self.hashtags[k], reverse=True)[:20]
for i, topic in enumerate(top_hashtags):
self.search(query=topic, browser=self.browser, log_path=self.log_path)
self.select_first(browser=self.browser, log_path=self.log_path)
delay, action = self.dispatcher.next_action()
Log.update(self.screenshot_path, self.browser, self.log_path,
"Dispatcher selected action: {} (Sleeping {}s)".format(action, delay))
sleep(delay)
if action == "comment":
if self.post_is_sfw(browser=self.browser, log_path=self.log_path):
self.comment(
topic=topic,
browser=self.browser,
log_path=self.log_path
)
self.dispatcher.log_action("comment")
self.store_hashtags(browser=self.browser, log_path=self.log_path)
elif action == "like":
count = 0
while self.already_liked(browser=self.browser, log_path=self.log_path):
if not self.on_dialog_page(self.browser, self.log_path):
break
if count > 10:
break
self.next_picture(browser=self.browser)
count += 1
if self.on_dialog_page(self.browser, self.log_path):
if self.post_is_sfw(browser=self.browser, log_path=self.log_path):
self.like(topic=topic, browser=self.browser,
log_path=self.log_path)
self.dispatcher.log_action("like")
self.store_hashtags(browser=self.browser, log_path=self.log_path)
elif action == "follow":
count = 0
while self.user_followed_already(self.author(browser=self.browser, log_path=self.log_path)):
if not self.on_dialog_page(self.browser, self.log_path):
break
if count > 10:
break
self.next_picture(browser=self.browser)
count += 1
if self.on_dialog_page(self.browser, self.log_path):
if self.post_is_sfw(browser=self.browser, log_path=self.log_path):
self.follow(
topic=topic,
browser=self.browser,
log_path=self.log_path
)
self.dispatcher.log_action("follow")
self.store_hashtags(browser=self.browser, log_path=self.log_path)
elif action == "unfollow":
if len(self.accounts_to_unfollow) > 50:
this_guy = self.accounts_to_unfollow[0]
self.unfollow(name=this_guy, browser=self.browser, log_path=self.log_path)
del self.accounts_to_unfollow[0]
self.dispatcher.log_action("unfollow")
except Exception:
Log.update(self.screenshot_path, self.browser, self.log_path,
text='General Exception: ' + str(format_exc()))
Log.update(self.screenshot_path, self.browser, self.log_path, text='Stopped bot')
if self.vdisplay:
self.vdisplay.stop()
super(Driver, self).join()
| StarcoderdataPython |
3384895 | import torch
import torch.nn as nn
import numpy as np
__all__ = ['Pruner']
class Pruner:
def __init__(self, net, rank_type='l2_weight', num_class=1000, \
safeguard=0, random=False, device='cuda', resource='FLOPs'):
self.net = net
self.rank_type = rank_type
self.chains = {} # chainning conv (use activation index 2 present a conv)
self.y = None
self.safeguard = safeguard
self.random = random
self.device = device
self.resoure_type = resource
self.reset()
def forward(self, x):
raise NotImplementedError
def get_valid_filters(self):
raise NotImplementedError
def get_valid_flops(self):
raise NotImplementedError
def count_params(self):
'''Count a number of network's trainable parameters'''
params_conv, params_all = 0, 0
for module in self.net.modules():
if isinstance(module, nn.Conv2d):
params_all += np.prod(module.weight.size())
params_conv += np.prod(module.weight.size())
if isinstance(module, nn.Linear):
params_all += np.prod(module.weight.size())
return params_all, params_conv
def reset(self):
self.cur_flops = 0
self.base_flops = 0
self.cur_size, conv_size = self.count_params()
self.base_size = self.cur_size - conv_size
self.quota = None
self.filter_ranks = {}
self.rates = {}
self.cost_map = {}
self.in_params = {}
self.omap_size = {}
self.conv_in_channels = {}
self.conv_out_channels = {}
def flop_regularize(self, l):
for key in self.filter_ranks:
self.filter_ranks[key] -= l * self.rates[key]
def compute_rank(self, grad):
activation_idx = len(self.activations) - self.grad_idx - 1
activation = self.activations[activation_idx]
if self.rank_type == 'analysis':
if activation_idx not in self.filter_ranks:
self.filter_ranks[activation_idx] = activation * grad
else:
self.filter_ranks[activation_idx] = torch.cat((self.filter_ranks[activation_idx], activation*grad), 0)
else:
if self.rank_type == 'meanAbsMeanImpact':
values = torch.abs((grad * activation).sum((2, 3)) / np.prod(activation.shape[2:]))
# NxC to C
values = values.sum(0) / activation.size(0)
if activation_idx not in self.filter_ranks:
self.filter_ranks[activation_idx] = torch.zeros(activation.size(1), device=self.device)
self.filter_ranks[activation_idx] += values
self.grad_idx += 1
def calculate_cost(self, encoding):
pass
def get_unit_flops_for_layer(self, layer_id):
pass
def get_unit_filters_for_layer(self, layer_id):
pass
def one_shot_lowest_ranking_filters(self, target):
# Consolidation of chained channels
# Use the maximum rank among the chained channels for the criteria for those channels
# Greedily pick from the lowest rank.
#
# This return list of [layers act_index, filter_index, rank]
data = []
chained = []
# keys of filter_ranks are activation index
checked = []
org_filter_size = {}
new_filter_size = {}
for i in sorted(self.filter_ranks.keys()):
org_filter_size[i] = self.filter_ranks[i].size(0)
if i in checked:
continue
current_chain = []
k = i
while k in self.chains:
current_chain.append(k)
k = self.chains[k]
current_chain.append(k)
checked.append(k)
sizes = np.array([self.filter_ranks[x].size(0) for x in current_chain])
max_size = np.max(sizes)
for k in current_chain:
new_filter_size[k] = max_size
ranks = [self.filter_ranks[x].to(self.device) for x in current_chain]
cnt = torch.zeros(int(max_size), device=self.device)
for idx in range(len(ranks)):
pass
def one_shot_lowest_ranking_filters_multi_targets(self, targets):
pass
def pruning_with_transformations(self, original_dist, \
perturbation, target, masking=False):
pass
def pruning_with_transformations_multi_target(self, original_dist,\
perturbation, target, masking=False):
pass
def normalize_ranks_per_layer(self):
pass
def get_pruning_plan_from_layer_budget(self, layer_budget):
pass
def sort_weights(self):
pass
def get_pruning_plan_from_importance(self, target, importance):
pass
def pack_pruning_target(self, filters_to_prune_per_layer, get_segment=True,\
progressive=True):
pass
def get_pruning_plan(self, num_filters_to_prune, progressive=True,\
get_segment=False):
pass
def get_uniform_ratio(self, target):
pass
def uniform_grow(self, growth_rate):
pass
def get_pruning_plan_multi_target(self, targets):
pass
| StarcoderdataPython |
148757 | # 创建了新的tags标签文件后必须重启服务器
from django import template
from ..models import Ouser
from comment.models import CommentUser
register = template.Library()
@register.simple_tag()
def get_user_data(uid):
"""返回用户的信息"""
user = Ouser.objects.filter(id=uid)
if user:
return user[0]
else:
return ''
@register.simple_tag()
def get_tourist_data(uid):
"""返回评论者的信息"""
user = CommentUser.objects.filter(id=uid)
if user:
return user[0]
else:
return ''
| StarcoderdataPython |
190136 | <reponame>teslafields/code-challenges<filename>longest_palindromic.py
class Solution:
def longestPalindrome(self, s):
slen = len(s)
longest = ''
longest_len = 0
for i in range(1, slen-1):
loops+=1
l, r = i-1, i+1
subs = s[i]
while l >= 0 and r < slen:
if s[l] != s[r]:
if s[r] == s[i]:
l+=1
subs = ''
else:
break
subs = s[l] + subs + s[r]
l -= 1
r += 1
loops+=1
subs_len = i - l
if subs_len > 1:
if subs_len > longest_len:
longest_len = subs_len
longest = subs
return longest
# Test program
s = "tracecars"
print(str(Solution().longestPalindrome(s)))
# racecar
| StarcoderdataPython |
103211 | <reponame>sroet/openpathsampling-cli
from paths_cli.compiling.core import InstanceCompilerPlugin
from paths_cli.plugin_management import OPSPlugin
class CategoryPlugin(OPSPlugin):
"""
Category plugins only need to be made for top-level
"""
def __init__(self, plugin_class, aliases=None, requires_ops=(1, 0),
requires_cli=(0, 3)):
super().__init__(requires_ops, requires_cli)
self.plugin_class = plugin_class
if aliases is None:
aliases = []
self.aliases = aliases
@property
def name(self):
return self.plugin_class.category
def __repr__(self):
return (f"CompilerPlugin({self.plugin_class.__name__}, "
f"{self.aliases})")
class EngineCompilerPlugin(InstanceCompilerPlugin):
category = 'engine'
class CVCompilerPlugin(InstanceCompilerPlugin):
category = 'cv'
class VolumeCompilerPlugin(InstanceCompilerPlugin):
category = 'volume'
class NetworkCompilerPlugin(InstanceCompilerPlugin):
category = 'network'
class SchemeCompilerPlugin(InstanceCompilerPlugin):
category = 'scheme'
class StrategyCompilerPlugin(InstanceCompilerPlugin):
category = 'strategy'
class ShootingPointSelectorPlugin(InstanceCompilerPlugin):
category = 'shooting-point-selector'
class InterfaceSetPlugin(InstanceCompilerPlugin):
category = 'interface-set'
| StarcoderdataPython |
1667458 | # Generated by Django 3.1.2 on 2020-12-25 21:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mvp', '0010_brandcollector'),
]
operations = [
migrations.AddField(
model_name='brandcollector',
name='message',
field=models.TextField(default='hello admin!', max_length=500, verbose_name='Feedback Message'),
),
]
| StarcoderdataPython |
1645865 | <gh_stars>0
import abc
import os
import time
import typing
import requests
from flask import current_app
from urllib.parse import urlencode
from src.models.bright import HealthCheck, HealthCheckStatus
__all__ = ("BrightAPI",)
class BrightBase(abc.ABC):
"""Base class for Bright API."""
default_headers = {"Content-Type": "application/json", "Accept": "application/json"}
def __init__(
self,
url=None,
session=None,
basic_auth=(),
cert_auth=(),
verify=True,
timeout=5,
):
"""
:param session: an already existing session session.
:param basic_auth: a tuple of username and password
to use when establishing a session via HTTP BASIC
authentication.
:param cert_auth: a tuple of cert and key to use
when establishing a session. The pair is used for both
authentication and encryption.
:param verify: check whether verify SSL connection
:param timeout: how much time until connection is dropped, in seconds.
"""
self.url = url
if session is None:
self._session = requests.Session()
else:
self._session = session
if basic_auth:
self._create_basic_session(basic_auth)
elif cert_auth is not None:
self._create_cert_session(cert_auth)
self._session.headers.update(self.default_headers)
self.verify = verify
self.timeout = timeout
def _create_basic_session(self, basic_auth):
self._session.auth = basic_auth
def _create_cert_session(self, cert_auth):
self._session.cert = cert_auth
@property
def version(self):
base = f"{self.url}/json"
params = {
"service": "cmmain",
"call": "getVersion",
}
response = self._session.post(
url=base, json=params, verify=self.verify, timeout=self.timeout
).json()
return response.get("cmVersion")
@abc.abstractmethod
def measurable(self, name):
pass
@staticmethod
@abc.abstractmethod
def measurable_mapper(raw):
pass
class Bright(BrightBase):
"""Generic Bright implementation."""
def measurable(self, name):
raise NotImplementedError("use a specific Bright version")
@staticmethod
def measurable_mapper(raw):
raise NotImplementedError("use a specific Bright version")
class Bright7(BrightBase):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.base = f"{self.url}/json"
def entity(self, name):
params = {"service": "cmdevice", "call": "getDevice", "arg": name}
return self._session.post(
url=self.base, json=params, verify=self.verify, timeout=self.timeout
).json()
def measurable(self, name):
params = {"service": "cmmon", "call": "getHealthcheck", "arg": name}
return self._session.post(
url=self.base, json=params, verify=self.verify, timeout=self.timeout
).json()
def latest_measurable_data(self, measurable, entity) -> dict:
measurable_id = self.measurable(measurable).get("uniqueKey")
entity_id = self.entity(entity).get("uniqueKey")
if not entity_id or not measurable_id:
return {}
params = {
"service": "cmmon",
"call": "getLatestPickedRates",
"args": [[entity_id], [{"metricId": measurable_id}]],
}
return [
dict(**data, measurable=measurable, entity=entity)
for data in self._session.post(
self.url, json=params, verify=self.verify
).json()
]
@staticmethod
def measurable_mapper(raw) -> HealthCheck:
return (
HealthCheck(
name=raw["measurable"],
status=HealthCheckStatus(round(float(raw["rate"]))),
node=raw["entity"],
timestamp=raw["timeStamp"],
seconds_ago=int(time.time() - raw["timeStamp"]),
raw=raw,
)
if raw
else None
)
class Bright8(BrightBase):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.base = f"{self.url}/rest/v1"
def measurable(self, name):
base = f"{self.url}/json"
params = {"service": "cmmon", "call": "getMonitoringMeasurable", "arg": name}
return self._session.post(
url=base, json=params, verify=self.verify, timeout=self.timeout
).json()
def latest_measurable_data(self, measurable, entity=None) -> dict:
params = {
"measurable": measurable,
**({"entity": entity} if entity is not None else {}),
}
url = f"{self.base}/monitoring/latest?{urlencode(params)}"
return (
self._session.get(url=url, verify=self.verify, timeout=self.timeout)
.json()
.get("data", [])
)
@staticmethod
def measurable_mapper(raw) -> HealthCheck:
return (
HealthCheck(
name=raw["measurable"],
status=HealthCheckStatus(raw["value"]),
node=raw["entity"],
timestamp=raw["time"],
seconds_ago=int(raw["age"]),
raw=raw,
)
if raw
else None
)
class BrightAPI:
def __init__(
self,
host=None,
port=443,
protocol="https",
basic_auth=(),
cert_auth=(),
version=None,
**kwargs,
):
host = host or current_app.config["BRIGHT_COMPUTING_HOST"]
port = port or current_app.config["BRIGHT_COMPUTING_PORT"]
url = f"{protocol}://{host}:{port}"
if not basic_auth and not cert_auth:
cert = current_app.config["BRIGHT_COMPUTING_CERT_PATH"]
key = current_app.config["BRIGHT_COMPUTING_KEY_PATH"]
# handle relative paths
if not os.path.isabs(cert) and not os.path.isabs(key):
instance_path = os.path.dirname(current_app.instance_path)
cert = os.path.join(instance_path, cert)
key = os.path.join(instance_path, key)
cert_auth = (cert, key)
self.version = version or Bright(url=url, **kwargs).version
self.instance = self.factory(self.version)(
url=url, basic_auth=basic_auth, cert_auth=cert_auth, **kwargs
)
@staticmethod
def factory(version):
major_version = int(float(version))
if major_version not in (7, 8):
raise ValueError("Unsupported version")
elif major_version == 7:
return Bright7
elif major_version == 8:
return Bright8
@staticmethod
def supported_measurables():
return current_app.config["SUPPORTED_MEASURABLES"]
def health_checks(self, node=None) -> typing.List[HealthCheck]:
checks = (
self.health_check(key=measurable, node=node)
for measurable in self.supported_measurables()
)
return list(filter(lambda x: x is not None, checks))
def health_check(self, key, node=None) -> typing.Optional[HealthCheck]:
"""Get translated measurable to a health check."""
if key not in self.supported_measurables():
return None
return self.measurable_mapper(
raw=next(
iter(self.latest_measurable_data(measurable=key, entity=node)), None
)
)
def __getattr__(self, name):
return self.instance.__getattribute__(name)
| StarcoderdataPython |
21212 | import unittest
import os
import json
import pandas as pd
import numpy as np
class TestingExercise2_07(unittest.TestCase):
def setUp(self) -> None:
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(ROOT_DIR, '..', 'dtypes.json'), 'r') as jsonfile:
self.dtyp = json.load(jsonfile)
self.data = pd.read_csv(os.path.join(ROOT_DIR, '..', 'Datasets', 'earthquake_data.csv'),
dtype = self.dtyp)
def test_object_vars(self):
self.object_variables = self.data.select_dtypes(include = [np.object]).nunique().sort_values()
self.assertEqual(max(self.object_variables), (3821))
if __name__ == '__main__':
unittest.main() | StarcoderdataPython |
1799776 | import sys
import os
import tempfile
import unittest
import sd3.cfa.graph
import sd3.cfa.shortestpath
class TestGraph(unittest.TestCase):
def test_edge(self):
node_src_id = 1
node_dest_id = 2
node_src = sd3.cfa.graph.Node(node_src_id)
node_dest = sd3.cfa.graph.Node(node_dest_id)
# Test creation
edge = sd3.cfa.graph.Edge(node_src, node_dest)
self.assertIs(edge.get_src(), node_src)
self.assertIs(edge.get_dest(), node_dest)
# Test attributes
first_attr = "a"
second_attr = "b"
self.assertFalse(edge.has_attr(first_attr))
self.assertFalse(edge.has_attr(second_attr))
edge.add_attr(first_attr)
self.assertTrue(edge.has_attr(first_attr))
self.assertFalse(edge.has_attr(second_attr))
edge.add_attr(second_attr)
self.assertTrue(edge.has_attr(first_attr))
self.assertTrue(edge.has_attr(second_attr))
def test_node(self):
node_src_id = 1
node_dest_id = 2
node_data = "Fake data"
node_src = sd3.cfa.graph.Node(node_src_id)
# Test initial state
self.assertEqual(node_src.get_id(), node_src_id)
self.assertIsNone(node_src.get_data())
self.assertListEqual(node_src.get_successors(), [])
self.assertListEqual(node_src.get_predecessors(), [])
# Test data
node_src.set_data(node_data)
self.assertEqual(node_src.get_data(), node_data)
# Test successor add
node_dest = sd3.cfa.graph.Node(node_dest_id)
self.assertFalse(node_src.has_successor(node_dest))
node_src.add_successor(node_dest)
self.assertTrue(node_src.has_successor(node_dest))
self.assertFalse(node_dest.has_successor(node_src))
successors = node_src.get_successors()
self.assertEqual(len(successors), 1)
self.assertIs(successors[0].get_src(), node_src)
self.assertIs(successors[0].get_dest(), node_dest)
predecessors = node_dest.get_predecessors()
self.assertEqual(len(predecessors), 1)
self.assertIs(predecessors[0].get_src(), node_src)
self.assertIs(predecessors[0].get_dest(), node_dest)
def test_node_loop(self):
node = sd3.cfa.graph.Node(1)
node.add_successor(node)
successors = node.get_successors()
predecessors = node.get_predecessors()
self.assertEqual(len(successors), 1)
self.assertEqual(len(predecessors), 1)
self.assertIs(successors[0].get_src(), node)
self.assertIs(successors[0].get_dest(), node)
self.assertIs(predecessors[0].get_src(), node)
self.assertIs(predecessors[0].get_dest(), node)
def test_graph(self):
node_id = 1
second_node_id = 2
third_node_id = 2
# New graph is empty
graph = sd3.cfa.graph.Graph()
self.assertIsNone(graph.get_entry())
self.assertEqual(len(graph.get_node_list()), 0)
# Forbid entry set on an unknown node
node = sd3.cfa.graph.Node(node_id)
self.assertRaises(KeyError, graph.set_entry, node)
self.assertRaises(KeyError, graph.set_exit, node)
# Test node add
(node, is_new) = graph.add_node(node_id)
self.assertEqual(node.get_id(), node_id)
self.assertTrue(is_new)
self.assertTrue(graph.has_node(node_id))
# Test node add with the same id
(existing_node, is_new) = graph.add_node(node_id)
self.assertEqual(existing_node.get_id(), node_id)
self.assertFalse(is_new)
# Test that node can be fetched
self.assertIs(graph.get_node(node_id), node)
# Test set entry with an invalid node
fake_node = sd3.cfa.graph.Node(node_id)
self.assertRaises(KeyError, graph.set_entry, fake_node)
self.assertRaises(KeyError, graph.set_exit, fake_node)
# Test valid entry set
graph.set_entry(node)
self.assertIs(graph.get_entry(), node)
# Test valid exit set
graph.set_exit(node)
self.assertIs(graph.get_exit(), node)
# Test node list
(second_node, _) = graph.add_node(second_node_id)
self.assertListEqual(graph.get_node_list(), [node, second_node])
# Test node iterator
nodes_id = set()
nodes_it = set()
for it_node_id, it_node in graph.get_node_it():
nodes_id.add(it_node_id)
nodes_it.add(it_node)
self.assertSetEqual(nodes_id, {node_id, second_node_id})
self.assertSetEqual(nodes_it, {node, second_node})
# Test edges
(third_node, _) = graph.add_node(third_node_id)
node.add_successor(second_node)
second_node.add_successor(third_node)
edge_set = set()
for edge in list(graph.get_edges_it()):
edge_set.add((edge.get_src(), edge.get_dest()))
expected_edge_set = set()
expected_edge_set.add((node, second_node))
expected_edge_set.add((second_node, third_node))
self.assertSetEqual(edge_set, expected_edge_set)
# Run draw to test that the function doesn't crash
node_id_str = lambda n: "%s" % n.get_id()
(fd, path) = tempfile.mkstemp(suffix=".png")
graph.draw(path, node_id_str=node_id_str)
os.close(fd)
os.remove(path)
def test_shortest_path(self):
graph = sd3.cfa.graph.Graph()
# Create nodes
(node1, _) = graph.add_node(1)
(node2, _) = graph.add_node(2)
(node3, _) = graph.add_node(3)
(node4, _) = graph.add_node(4)
(node5, _) = graph.add_node(5)
(node6, _) = graph.add_node(6)
(node7, _) = graph.add_node(7)
# Create edges
node1.add_successor(node2)
node1.add_successor(node5)
node1.add_successor(node7)
node2.add_successor(node3)
node2.add_successor(node4)
node3.add_successor(node4)
node3.add_successor(node7)
node5.add_successor(node3)
node5.add_successor(node5)
node5.add_successor(node6)
node7.add_successor(node6)
# Check some paths
self.assertEqual(sd3.cfa.shortestpath.get(graph, node1, node3), 2)
self.assertEqual(sd3.cfa.shortestpath.get(graph, node1, node4), 2)
self.assertEqual(sd3.cfa.shortestpath.get(graph, node1, node6), 2)
self.assertEqual(sd3.cfa.shortestpath.get(graph, node1, node7), 1)
self.assertEqual(sd3.cfa.shortestpath.get(graph, node2, node4), 1)
self.assertEqual(
sd3.cfa.shortestpath.get(graph, node2, node1),
sd3.cfa.shortestpath.INFINITE)
self.assertEqual(sd3.cfa.shortestpath.get(graph, node5, node6), 1)
| StarcoderdataPython |
3327075 | """
This module contains functions related to orbit calculations
"""
# Standard library imports
from typing import Any,Dict,List,Tuple,Sequence
#https://mypy.readthedocs.io/en/stable/cheat_sheet_py3.html
# Third party imports
import pandas as pd
import numpy as np
from numpy import rad2deg, deg2rad
from numpy.linalg import norm
import toolz as tz
# Using Newton-Ramson method
from scipy.integrate import solve_ivp
from myastro import util as ut
from myastro import data_catalog as dc
from myastro import timeutil as tc
from myastro import coord as co
from myastro import orbit as ob
from myastro.orbit import EphemrisInput
from myastro.timeutil import PI_HALF, PI, TWOPI
from myastro.keplerian import KeplerianOrbit
from myastro.lagrange_coeff import rv_from_r0v0
from myastro.timeutil import epochformat2jd, jd2mjd, T, mjd2jd, jd2str_date, MDJ_J2000, JD_J2000
from myastro.planets import g_xyz_equat_sun_j2000, g_rlb_eclip_sun_eqxdate
from myastro.util import mu_by_name, mu_Sun
from myastro.orbit import calc_perturbed_accelaration
from myastro.log import get_logger
logger = get_logger(__file__.split('/')[-1])
def f1(vector):
# Utility function
return vector/pow(norm(vector),3)
def calc_F(a, b ,c):
# Function to compute the difference between nearly equal numbers
# Appendix F of Orbital Mechanics
q = a * (2*b-a)/pow(b,2)
return (pow(q,2)- 3*q +3 / (1+pow(1-q,1.5)))*q
def my_dfdt(t, y, r0, v0, t0):
"""
Computes the time derivative of the unknown function. Integrating this function, we obtain the unknown
function. We know the velocity and acceleration that is basically what this function returns so integrating we obtain
the position and velocity.
Args:
t : point in time (normally used in modified julian days) at which we want to calculate the derivative
y : The vector with the variables to solve the differential equation system
[0..3] delta_r
[3..6] delta_v (not used in this case)
r0 : Radio Vector of the object w.r.t. the Sun (AUs) at time t0
v0 : Velocity vector Elapsed time (AUs/days) at time t0
t0 : Initial point timme
Returns :
A vector vector of 6 positions with delta_v and delta_acc ()
"""
delta_r = y[0:3]
# The two-bodys orbit is calculated starting at r0,v0 and t-t0 as elapsed time
r_osc, _ = rv_from_r0v0(mu_Sun, r0, v0, t-t0)
# The radio vector perturbed is the two-bodys plus the delta_r
r_pert = r_osc + delta_r
F = 1 - pow(norm(r_osc)/norm(r_pert),3)
#TODO Check if this works, to avoid compute the difference between nearly equal numbers
#F = calc_F(norm(delta_r), norm(r_pert), norm(r_osc))
# The increment of accelration is calculated including the normal perturbed acceleartion
delta_acc = (-mu_Sun/pow(norm(r_osc),3))*(delta_r- F*r_pert)+calc_perturbed_accelaration(t, r_pert)
return np.concatenate((y[3:6],delta_acc))
def apply_enckes(eph, t_range, r0, v0):
"""
This is a utility function needed because the integration needs to be done in two intervals so this function
is called for each of these intervals. It applies the enckles's approach, i.e. calcualate the dr and dv
to modified the two bodys (osculating orbit)
Args:
eph : Ephemeris data (EphemrisInput)
t_range : A numpy vector with the time samples where each time sample defines a time interval.
The enckles method is applied in each one of this interval.
The time samples are modified julian days.
r0 : A numpy vector that indicates the initial radio vector (AUs)
v0 : A numpy vector that indicates the initial velocity vector (AUs/days)
r0 : Radio Vector of the object w.r.t. the Sun (AUs) at time t0
Returns :
A dictionary where the key is a time reference in days (modified julian days) and the
the value is the a tuple with two vectors, the radio vector r and the velocity vector at the time reference
"""
steps = np.diff(t_range)
result = dict()
clock_mjd = t_range[0]
for idx, step in enumerate(steps) :
#if (idx % 50) == 0 :
# print (f"Iteration: {idx}, Date : {jd2str_date(tc.mjd2jd(clock_mjd))}")
sol = solve_ivp(my_dfdt, (clock_mjd, clock_mjd+step), np.zeros(6), args=(r0, v0, clock_mjd) , rtol = 1e-12)
assert sol.success, "Integration was not OK!"
r_osc, v_osc = rv_from_r0v0 (mu_Sun, r0, v0, step)
# The last integration value is taken
r0 = r_osc + sol.y[:,-1][:3]
v0 = v_osc + sol.y[:,-1][3:6]
# If the clock is in the middle of the ephemeris time, it is inserted in the solution
if eph.from_mjd <= clock_mjd+step <= eph.to_mjd :
result[clock_mjd+step] = (r0, v0)
clock_mjd += step
return result
def calc_eph_by_enckes (body, eph, type='body'):
"""
Computes the ephemeris for a minor body using the Enckes method. This has more precission that
the Cowells but it takes more time to be calculated.
Args:
body : The orbital elements of the body, it can be an body or a comet
eph : Ephemeris data (EphemrisInput)
type : Indicates if the body is a asteroid ('body') or a comet ('comet')
Returns :
A dataframe with the result
"""
# This matrix just depends on the desired equinox to calculate the obliquity
# to pass from ecliptic coordinate system to equatorial
MTX_equatFeclip = co.mtx_equatFeclip(eph.T_eqx)
T_J2000 = 0.0
# This is to precess from J2000 to ephemeris equinox (normally this matrix will be identity matrix)
MTX_J2000_Teqx = co.mtx_eclip_prec(T_J2000,eph.T_eqx)
# The PQR mtx (from orbital plane to eclipt) is preccesed from equinox of body object to the desired equinox
MTX_J2000_PQR = co.mtx_eclip_prec(body.T_eqx0, T_J2000).dot(body.mtx_PQR)
# The initial conditions for doing the integration is calculated, i.e.,
# the r,v of the body at its epoch (in the example of Ceres, the epoch of
# book that epoch is 1983/09/23.00)
# The integration is done in the ecliptic plane and precessed in to J2000
# so the solution will be also ecliptic and precessed.
initial_mjd = body.epoch_mjd
if type == 'body' :
k_orbit = KeplerianOrbit.for_body(body)
else :
k_orbit = KeplerianOrbit.for_comet(body)
r0, v0 = k_orbit.calc_rv(initial_mjd)
# In the ecliptic.
r0 = MTX_J2000_PQR.dot(r0)
v0 = MTX_J2000_PQR.dot(v0)
if eph.from_mjd < initial_mjd < eph.to_mjd :
# If the epoch is in the middle, we need to integrate forward and backwards
#t_range = list(ut.frange(initial_mjd, eph.to_mjd, eph.step))
t_range = ut.my_range(initial_mjd, eph.to_mjd, eph.step)
result_1 = apply_enckes(eph, t_range, r0, v0)
# and backwards
#t_range = list(ut.frange(eph.from_mjd, initial_mjd, eph.step))
#if t_range[-1] != initial_mjd :
# t_range.append(initial_mjd)
t_range = list(reversed(ut.my_range(eph.from_mjd, initial_mjd, eph.step)))
result_2 = apply_enckes(eph, t_range, r0, v0)
solution = tz.merge([result_1,result_2])
elif initial_mjd < eph.from_mjd :
"""
# If the epoch is in the past, we need to integrate forward
t_range_1 = list(ut.frange(initial_mjd, eph.from_mjd, eph.step))
# The previous ensure that initial_mjd is included but the eph.from may be not included
# so we test the final value to know if we need to include manually
if t_range_1[-1] != eph.from_mjd :
t_range_1.append(eph.from_mjd)
"""
# [initial, from]
t_range_1 = ut.my_range(initial_mjd, eph.from_mjd, eph.step)
# [from+step, to]
t_range_2 = ut.my_range(eph.from_mjd+eph.step, eph.to_mjd, eph.step)
"""
t_range_2 = list(ut.frange(eph.from_mjd+eph.step, eph.to_mjd, eph.step))
if len(t_range_2) == 0 :
t_range_2.append(eph.to_mjd)
if t_range_2[-1] != eph.to_mjd :
t_range_2.append(eph.to_mjd)
"""
solution = apply_enckes(eph, t_range_1 + t_range_2, r0, v0)
else :
# If the epoch is in the future, we need to integrate backwards
# goes from the epoch backward toward the end value from
# the ephemeris and inital value of the ephemeris
#[initial_mjd ---> backwards to --> eph.to.mjd]
t_range_1 = ut.my_range(eph.to_mjd, initial_mjd, eph.step)
"""
t_range_1 = list(ut.frange(eph.to_mjd, initial_mjd, eph.step))
# The previous ensure that eph.to is included but the initial may be not included
# so we test the final value to know if we need to include manually
if t_range_1[-1] != initial_mjd :
t_range_1.append(initial_mjd)
"""
"""
t_range_2 = list(ut.frange(eph.from_mjd, eph.to_mjd, eph.step))
# the previous ensure that eph.from is included but the to_mjd may be included
# but we include in the previous so we need to remove it . We test the last element to check
# if we need to remove it
if t_range_2[-1] == eph.to_mjd :
t_range_2 = t_range_2[0:-1]
t_range = list(reversed(t_range_1)) + list(reversed(t_range_2))
"""
t_range_2 = ut.my_range(eph.from_mjd, eph.to_mjd-eph.step, eph.step)
t_range = list(reversed(t_range_2+t_range_1))
solution = apply_enckes(eph, t_range, r0, v0)
solution = {t:solution[t] for t in sorted(solution.keys())}
return ob.process_solution(solution, MTX_J2000_Teqx, MTX_equatFeclip)
def test_all():
eph = EphemrisInput(from_date="2020.05.25.0",
to_date = "2020.06.15.0",
step_dd_hh_hhh = "02 00.0",
equinox_name = "J2000")
for name in dc.DF_BODYS['Name']:
body = dc.read_body_elms_for(name,dc.DF_BODYS)
print ("Calculating for ",name)
print (calc_eph_by_enckes(body, eph))
def test_body():
eph = EphemrisInput(from_date="2020.05.15.0",
to_date = "2020.06.15.0",
step_dd_hh_hhh = "02 00.0",
equinox_name = "J2000")
CERES = dc.read_body_elms_for("Ceres",dc.DF_BODYS)
df = calc_eph_by_enckes(CERES, eph)
print (df[df.columns[0:8]])
def test_comet():
eph = EphemrisInput(from_date="1985.11.15.0",
to_date = "1986.04.05.0",
step_dd_hh_hhh = "10 00.0",
equinox_name = "J2000")
HALLEY_J2000 = dc.read_comet_elms_for("1P/Halley", dc.DF_COMETS)
df = calc_eph_by_enckes(HALLEY_J2000, eph, 'comet')
print (df[df.columns[0:8]])
def test_all_comets():
"""
eph = EphemrisInput(from_date="2020.05.25.0",
to_date = "2020.06.15.0",
step_dd_hh_hhh = "02 00.0",
equinox_name = "J2000")
"""
df_ = dc.DF_COMETS
df_ = df_.query("0.999 < e < 1.001")
for idx, name in enumerate(df_['Name']):
logger.warning(f"{idx+1} Calculating for {name} ")
body = dc.read_comet_elms_for(name,dc.DF_COMETS)
eph = EphemrisInput.from_mjds( body.epoch_mjd-25, body.epoch_mjd+25, "02 00.0", "J2000" )
print (f"{idx+1} Calculating for {name} ")
print (calc_eph_by_enckes(body, eph, 'comet'))
def test_several_comets():
names = [#"C/1988 L1 (Shoemaker-Holt-Rodriquez)", # Parabolic
"C/-146 P1"]
#"C/1848 P1 (Petersen)"]
for name in names :
body = dc.read_comet_elms_for(name,dc.DF_COMETS)
eph = EphemrisInput.from_mjds( body.epoch_mjd-50, body.epoch_mjd+50, "02 00.0", "J2000" )
print (f"Calculating for {name} ")
df = calc_eph_by_enckes(body, eph, 'comet')
print (df[df.columns[0:8]])
#print (calc_eph_by_enke(body, eph, 'comet'))
@ut.measure
def test_speed():
t = 49400.0
y = np.array([0., 0., 0., 0., 0., 0.])
r0 = np.array([-13.94097381, 11.4769406 , -5.72123976])
v0 = np.array([-0.00211453, 0.0030026 , -0.00107914])
t0 = 49400.0
my_dfdt(t, y, r0, v0, t0)
if __name__ == "__main__" :
#test_all()
#test_2()
#test_all_comets()
#test_comet()
#test_several_comets()
#test_body()
test_comet()
#test_speed()
| StarcoderdataPython |
3201119 | <reponame>kushbanga/phylib
# -*- coding: utf-8 -*-
from __future__ import print_function
"""Simple event system."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from contextlib import contextmanager
import logging
import string
import re
from functools import partial
logger = logging.getLogger(__name__)
#------------------------------------------------------------------------------
# Event system
#------------------------------------------------------------------------------
class EventEmitter(object):
"""Singleton class that emits events and accepts registered callbacks.
Example
-------
```python
class MyClass(EventEmitter):
def f(self):
self.emit('my_event', 1, key=2)
o = MyClass()
# The following function will be called when `o.f()` is called.
@o.connect
def on_my_event(arg, key=None):
print(arg, key)
```
"""
def __init__(self):
self.reset()
self.is_silent = False
def set_silent(self, silent):
"""Set whether to silence the events."""
self.is_silent = silent
def reset(self):
"""Remove all registered callbacks."""
self._callbacks = []
def _get_on_name(self, func):
"""Return `eventname` when the function name is `on_<eventname>()`."""
r = re.match("^on_(.+)$", func.__name__)
if r:
event = r.group(1)
else:
raise ValueError("The function name should be "
"`on_<eventname>`().")
return event
@contextmanager
def silent(self):
"""Prevent all callbacks to be called if events are raised
in the context manager.
"""
self.is_silent = not(self.is_silent)
yield
self.is_silent = not(self.is_silent)
def connect(self, func=None, event=None, sender=None, **kwargs):
"""Register a callback function to a given event.
To register a callback function to the `spam` event, where `obj` is
an instance of a class deriving from `EventEmitter`:
```python
@obj.connect(sender=sender)
def on_spam(sender, arg1, arg2):
pass
```
This is called when `obj.emit('spam', sender, arg1, arg2)` is called.
Several callback functions can be registered for a given event.
The registration order is conserved and may matter in applications.
"""
if func is None:
return partial(self.connect, event=event, sender=sender, **kwargs)
# Get the event name from the function.
if event is None:
event = self._get_on_name(func)
# We register the callback function.
self._callbacks.append((event, sender, func, kwargs))
return func
def unconnect(self, *items):
"""Unconnect specified callback functions or senders."""
self._callbacks = [
(event, sender, f, kwargs)
for (event, sender, f, kwargs) in self._callbacks
if f not in items and sender not in items and
getattr(f, '__self__', None) not in items]
def emit(self, event, sender, *args, **kwargs):
"""Call all callback functions registered with an event.
Any positional and keyword arguments can be passed here, and they will
be forwarded to the callback functions.
Return the list of callback return results.
"""
if self.is_silent:
return
sender_name = sender.__class__.__name__
logger.log(
5, "Emit %s.%s(%s, %s)", sender_name, event,
', '.join(map(str, args)), ', '.join('%s=%s' % (k, v) for k, v in kwargs.items()))
# Call the last callback if this is a single event.
single = kwargs.pop('single', None)
res = []
# Put `last=True` callbacks at the end.
callbacks = [c for c in self._callbacks if not c[-1].get('last', None)]
callbacks += [c for c in self._callbacks if c[-1].get('last', None)]
for e, s, f, k in callbacks:
if e == event and (s is None or s == sender):
f_name = getattr(f, '__qualname__', getattr(f, '__name__', str(f)))
s_name = s.__class__.__name__
logger.log(5, "Callback %s (%s).", f_name, s_name)
res.append(f(sender, *args, **kwargs))
if single:
return res[-1]
return res
#------------------------------------------------------------------------------
# Progress reporter
#------------------------------------------------------------------------------
class PartialFormatter(string.Formatter):
"""Prevent KeyError when a format parameter is absent."""
def get_field(self, field_name, args, kwargs):
try:
return super(PartialFormatter, self).get_field(field_name,
args,
kwargs)
except (KeyError, AttributeError):
return None, field_name
def format_field(self, value, spec):
"""Format a field."""
if value is None:
return '?'
try:
return super(PartialFormatter, self).format_field(value, spec)
except ValueError:
return '?'
def _default_on_progress(sender, message, value, value_max, end='\r', **kwargs):
if value_max == 0: # pragma: no cover
return
if value <= value_max:
progress = 100 * value / float(value_max)
fmt = PartialFormatter()
kwargs['value'] = value
kwargs['value_max'] = value_max
print(fmt.format(message, progress=progress, **kwargs), end=end)
def _default_on_complete(message, end='\n', **kwargs):
# Override the initializing message and clear the terminal
# line.
fmt = PartialFormatter()
print(fmt.format(message + '\033[K', **kwargs), end=end)
class ProgressReporter(object):
"""A class that reports progress done.
Example
-------
```python
pr = ProgressReporter()
pr.set_progress_message("Progress: {progress}%...")
pr.set_complete_message("Completed!")
pr.value_max = 10
for i in range(10):
pr.value += 1 # or pr.increment()
```
You can also add custom keyword arguments in `pr.increment()`: these
will be replaced in the message string.
Emits
-----
* `progress(value, value_max)`
* `complete()`
"""
def __init__(self):
super(ProgressReporter, self).__init__()
self._value = 0
self._value_max = 0
self._has_completed = False
def set_progress_message(self, message, line_break=False):
"""Set a progress message.
The string needs to contain `{progress}`.
"""
end = '\r' if not line_break else None
@connect(sender=self)
def on_progress(sender, value, value_max, **kwargs):
kwargs['end'] = None if value == value_max else end
_default_on_progress(sender, message, value, value_max, **kwargs)
def set_complete_message(self, message):
"""Set a complete message."""
@connect(sender=self)
def on_complete(sender, **kwargs):
_default_on_complete(message, **kwargs)
def _set_value(self, value, **kwargs):
if value < self._value_max:
self._has_completed = False
self._value = value
emit('progress', self, self._value, self._value_max, **kwargs)
if not self._has_completed and self._value >= self._value_max:
emit('complete', self, **kwargs)
self._has_completed = True
def increment(self, **kwargs):
"""Equivalent to `self.value += 1`.
Custom keywoard arguments can also be passed to be processed in the
progress message format string.
"""
self._set_value(self._value + 1, **kwargs)
def reset(self, value_max=None):
"""Reset the value to 0 and the value max to a given value."""
self._value = 0
if value_max is not None:
self._value_max = value_max
@property
def value(self):
"""Current value (integer)."""
return self._value
@value.setter
def value(self, value):
self._set_value(value)
@property
def value_max(self):
"""Maximum value (integer)."""
return self._value_max
@value_max.setter
def value_max(self, value_max):
if value_max > self._value_max:
self._has_completed = False
self._value_max = value_max
def is_complete(self):
"""Return whether the task has completed."""
return self._value >= self._value_max
def set_complete(self, **kwargs):
"""Set the task as complete."""
self._set_value(self.value_max, **kwargs)
@property
def progress(self):
"""Return the current progress as a float value in `[0, 1]`."""
return self._value / float(self._value_max)
#------------------------------------------------------------------------------
# Global event system
#------------------------------------------------------------------------------
_EVENT = EventEmitter()
emit = _EVENT.emit
connect = _EVENT.connect
unconnect = _EVENT.unconnect
silent = _EVENT.silent
set_silent = _EVENT.set_silent
reset = _EVENT.reset
| StarcoderdataPython |
1793854 | <filename>LevelSpectroscopy/main.py
execfile('interall.py')
execfile('../util.py/energy.py')
import numpy as np
L = 8
Delta = 0.5
deltas = np.linspace(0.0, 1.0, 21)
common_params = {
'model' : 'Spin',
'lattice' : 'chain lattice',
'method' : 'Lanczos',
'L' : L,
'2S' : 2,
'2Sz' : 0}
with open('res.dat', 'w') as output:
for delta in deltas:
generate_defs(common_params)
interall(L, delta, Delta, False)
subprocess.call(['../HPhi.sh', '-e', 'namelist.def'])
enes = energies()
ge = enes[0]
interall(L, delta, Delta, True)
subprocess.call(['../HPhi.sh', '-e', 'namelist.def'])
enes = energies()
output.write('{} {} {} {}\n'.format(delta, ge, enes[0], enes[1]))
| StarcoderdataPython |
3370383 | if __name__ == '__main__':
w = input()
# Setting all flags to False.
p = False;
q = False;
r = False;
s = False;
t = False;
# Looping through all the characters from the given input.
for i in w:
if not(p) and i.isalnum(): # Checking if character alpha numeric or not.
p = True;
if not(q) and i.isalpha(): # checking if character is alphabet or not
q = True;
if not(r) and i.isdigit(): # checking if character is number or not
r = True;
if not(s) and i.islower():
s = True;
if not(t) and i.isupper():
t = True;
# Printing the changed flags.
print(p)
print(q)
print(r)
print(s)
print(t) | StarcoderdataPython |
3242735 | test = {
'name': 'Problem 6',
'points': 2,
'suites': [
{
'cases': [
{
'answer': 'Grouping the restaurants into k clusters by location.',
'choices': [
'Grouping the restaurants into k clusters by location.',
'Finding the mean rating of restaurants for k categories.',
'Predicting the ratings for k restaurants.'
],
'hidden': False,
'locked': False,
'question': 'What are we using the k-means algorithm to achieve?'
},
{
'answer': 'Randomly initialize k centroids.',
'choices': [
'Randomly initialize k centroids.',
r"""
Create a cluster for each centroid consisting of all elements closest to
that centroid.
""",
'Find the centroid (average position) of each cluster.'
],
'hidden': False,
'locked': False,
'question': 'What is the first step of the k-means algorithm?'
},
{
'answer': 'Create a cluster for each centroid consisting of all elements closest to that centroid.',
'choices': [
'Randomly reassign centroids.',
'Group restaurants by latitude.',
r"""
Create a cluster for each centroid consisting of all elements closest to
that centroid.
""",
'Find the centroid (average position) of each cluster.'
],
'hidden': False,
'locked': False,
'question': r"""
After we randomly initialize k centroids, what is the first step
of the iterative portion of the k-means algorithm?
"""
},
{
'answer': 'Find the centroid (average position) of each cluster.',
'choices': [
'Randomly reassign centroids.',
'Group restaurants by latitude.',
r"""
Create a cluster for each centroid consisting of all elements closest to
that centroid.
""",
'Find the centroid (average position) of each cluster.'
],
'hidden': False,
'locked': False,
'question': r"""
What is the second step of the iterative portion of the k-means
algorithm?
"""
}
],
'scored': False,
'type': 'concept'
},
{
'cases': [
{
'code': r"""
>>> restaurants1 = [
... make_restaurant('A', [-3, -4], [], 3, [make_review('A', 2)]),
... make_restaurant('B', [1, -1], [], 1, [make_review('B', 1)]),
... make_restaurant('C', [2, -4], [], 1, [make_review('C', 5)]),
... ]
>>> centroids = k_means(restaurants1, 1)
>>> centroids # should be 2-element lists of decimals
[[0.0, -3.0]]
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> restaurants2 = [
... make_restaurant('D', [2, 3], [], 2, [make_review('D', 2)]),
... make_restaurant('E', [0, 3], [], 3, [make_review('E', 1)]),
... ]
>>> centroids = k_means(restaurants2, 1)
>>> centroids # should be 2-element lists of decimals
[[1.0, 3.0]]
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> restaurants1 = [
... make_restaurant('A', [-3, -4], [], 3, [make_review('A', 2)]),
... make_restaurant('B', [1, -1], [], 1, [make_review('B', 1)]),
... make_restaurant('C', [2, -4], [], 1, [make_review('C', 5)]),
... ]
>>> centroids = k_means(restaurants1, 2)
>>> centroids # should be 2-element lists of decimals
[[-3.0, -4.0], [1.5, -2.5]]
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> cluster1 = [
... make_restaurant('A', [-3, -4], [], 3, [make_review('A', 2)]),
... ]
>>> cluster2 = [
... make_restaurant('B', [1, -1], [], 1, [make_review('B', 1)]),
... make_restaurant('C', [2, -4], [], 1, [make_review('C', 5)]),
... make_restaurant('D', [2, 3], [], 2, [make_review('D', 2)]),
... make_restaurant('E', [0, 3], [], 3, [make_review('E', 1)]),
... make_restaurant('F', [-1, 0], [], 3, [make_review('F', 1)]),
... make_restaurant('G', [4, 2], [], 3, [make_review('E', 1)]),
... ]
>>> restaurants = cluster1 + cluster2
>>> centroids = k_means(restaurants, 2)
>>> [[round(x, 5), round(y, 5)] for x, y in centroids]
[[-3.0, -4.0], [1.33333, 0.5]]
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> import tests.test_functions as test
>>> import recommend
>>> old_sample = recommend.sample
>>> test.swap_implementations(recommend)
>>> recommend.sample = test.sample # deterministic sampling
>>> make_review, make_restaurant = recommend.make_review, recommend.make_restaurant
>>> k_means = recommend.k_means
""",
'teardown': r"""
>>> recommend.sample = old_sample
>>> test.restore_implementations(recommend)
""",
'type': 'doctest'
}
]
}
| StarcoderdataPython |
1725835 | """
Tests for the utilities module.
"""
from __future__ import (absolute_import, division, print_function)
import numpy as np
from gridded.pyugrid import util
class DummyArrayLike(object):
"""
Class that will look like an array to this function, even
though it won't work!
Just for tests. All it does is add a few expected attributes
This will need to be updated when the function is changed.
"""
must_have = ['dtype', 'shape', 'ndim', '__len__', '__getitem__', '__getattribute__']
# pretty kludgy way to do this..
def __new__(cls):
obj = object.__new__(cls)
for attr in cls.must_have:
setattr(obj, attr, None)
return obj
def test_dummy_array_like():
dum = DummyArrayLike()
print(dum)
print(dum.dtype)
for attr in DummyArrayLike.must_have:
assert hasattr(dum, attr)
def test_asarraylike_list():
"""
Passing in a list should return a np.ndarray.
"""
lst = [1, 2, 3, 4]
result = util.asarraylike(lst)
assert isinstance(result, np.ndarray)
assert np.array_equal(result, lst)
def test_asarraylike_array():
"""
Passing in a list should return a np.ndarray.
"""
arr = np.array([1, 2, 3, 4])
result = util.asarraylike(arr)
assert result is arr
def test_as_test_asarraylike_dummy():
dum = DummyArrayLike()
result = util.asarraylike(dum)
assert result is dum
| StarcoderdataPython |
3341653 | """
The MIT License (MIT)
Copyright (c) 2014 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from itertools import chain
import hashlib
import pysam
import pyfaidx
import random
import sys
from .constants import ENDCHAR, GAPCHAR, DELCHAR
from .constants import BAM_CMATCH, BAM_CINS, BAM_CDEL, BAM_CREF_SKIP,\
BAM_CSOFT_CLIP, BAM_CHARD_CLIP, BAM_CPAD, BAM_CEQUAL, BAM_CDIFF
def CheckBam(bamfile):
try:
br = pysam.Samfile(bamfile, "rb")
return True
except: return False
def GetSamplesFromBamFiles(bamfiles):
""" Return dictionary of sample -> list of bam files """
samplesToBam = {}
for bam in bamfiles:
try:
br = pysam.Samfile(bam, "rb")
except:
sys.stderr.write("ERROR: Could not open %s. Is this a valid bam file?\n"%bam)
continue
for r in br.header.get("RG", []):
ident = r["ID"]
sample = r.get("SM", ident)
if bam not in samplesToBam.get(sample, []):
samplesToBam[sample] = samplesToBam.get(sample, []) + [bam]
return samplesToBam
def GetDefaultLocation(bamfiles):
"""
Return default location to jump to if no location given.
Look at the first read we see and go there.
If no reads aligned, return 'error'
Args:
bamfiles (list): A list with paths to bamfiles
Returns:
position (string): A string with chromosome and position
"""
default_chrom = None
default_pos = None
aligned = False
position = 'error'
for bam in bamfiles:
try:
br = pysam.Samfile(bam, "rb")
except:
sys.stderr.write("ERROR: Could not open %s. Is this a valid bam file?\n"%bam)
continue
# Peak at the first hundred reads
read_count = 0
while not (aligned or read_count > 100):
try:
aligned_read = br.next()
except StopIteration:
continue
if not aligned_read.is_unmapped:
default_chrom = br.getrname(aligned_read.tid)
default_pos = str(aligned_read.pos)
aligned = True
position = ':'.join([default_chrom, default_pos])
break
else:
read_count += 1
return position
def HashSample(sample):
"""
Return sample hash
"""
return hashlib.sha256(sample.encode()).hexdigest()
def ParseCigar(cigar, nucs):
"""
Return list of strings, each item corresponding to a single reference position
"""
rep = []
currentpos = 0
wasinsert = False
for c in cigar:
if c[0] in [BAM_CMATCH, BAM_CEQUAL, BAM_CDIFF]: # match (M, X, =)
for i in range(c[1]):
if wasinsert:
rep[-1] = rep[-1] + nucs[currentpos]
else: rep.append(nucs[currentpos])
wasinsert = False
currentpos += 1
elif c[0] == BAM_CINS: # put insertion in next base position (I)
if wasinsert:
rep[-1] = rep[-1] + nucs[currentpos:currentpos+c[1]]
else:
rep.append(nucs[currentpos:currentpos+c[1]])
currentpos = currentpos+c[1]
wasinsert = True
elif c[0] in [BAM_CDEL, BAM_CREF_SKIP]: # deletion (D) or skipped region from the reference (N)
for i in range(c[1]):
if wasinsert:
rep[-1] = rep[-1] + DELCHAR
else: rep.append(DELCHAR)
wasinsert = False
elif c[0] in [BAM_CSOFT_CLIP, BAM_CHARD_CLIP]: # hard clipping or soft clipping
pass # do nothing
elif c[0] == 6: # padding (silent deletion from padded reference) (P)
if wasinsert:
rep[-1] = rep[-1] + DELCHAR*c[1]
else: rep.append(DELCHAR*c[1])
wasinsert = True
else:
sys.stderr.write("ERROR: Invalid CIGAR operation (%s) in read %s \n"%(c[0], read.qname))
return rep
def AddInsertionLocations(all_locs, new_locs):
for item in new_locs:
pos = item[0]
size = item[1]
all_locs[pos] = max(all_locs.get(pos, 0), size)
return all_locs
class AlignmentGrid(object):
"""
Class for storing a grid of alignments
"""
def __init__(self, _bamreaders, _read_groups, _ref, _chrom, _pos, _samples=[], _settings={}):
self.bamreaders = _bamreaders
self.read_groups = _read_groups
self.ref = _ref
# Keep track of shortened chromosome names fasta entry has longer chrom string
# e.g. "1 dna:chromosome" -> "1"
if self.ref:
self.refkeys = dict([(key.split()[0], key) for key in self.ref.keys()])
else: self.refkeys = {}
self.chrom = _chrom
self.startpos = _pos
self.settings = _settings
self.pos = self.startpos-int(self.settings["LOADCHAR"]*0.5)
if self.pos < 0: self.pos = 0
self.usesamples = _samples
self.samples = list(set(
chain.from_iterable(rg.itervalues() for rg in _read_groups)))
for item in _samples:
if item not in self.samples: sys.stderr.write("WARNING: %s not in BAM\n"%item)
if len(_samples) > 0:
self.samples = [item for item in _samples if item in self.samples]
self.grid_by_sample = dict([(sample, {}) for sample in self.samples])
self.alnkeys_by_sample = dict([(sample, []) for sample in self.samples])
self.LoadGrid()
def GetSamples(self):
"""
Return list of samples
"""
return self.samples
def GetSampleHashes(self):
"""
Return list of sample hashes
"""
return map(HashSample, self.samples)
def LoadGrid(self):
"""
Load grid of alignments with buffer around start pos
"""
# Get reference
if self.ref is None or self.refkeys.get(self.chrom,"") not in self.ref.keys():
reference = ["N"]*self.settings["LOADCHAR"]
else:
refchrom = self.refkeys[self.chrom]
chromlen = len(self.ref[refchrom])
if chromlen <= self.pos:
return
elif chromlen <= self.pos+self.settings["LOADCHAR"]:
reference = self.ref[refchrom][self.pos:]
else: reference = self.ref[refchrom][self.pos:self.pos+self.settings["LOADCHAR"]]
reference = [reference[i] for i in range(len(reference))]
griddict = {"position": range(self.pos, self.pos+len(reference)), "reference": reference}
# Get reads
region=str("%s:%s-%s"%(self.chrom, max(1, int(self.pos)), int(self.pos+self.settings["LOADCHAR"])))
aligned_reads = []
for bi, br in enumerate(self.bamreaders):
try:
reads = list(br.fetch(region=region))
pileup = br.pileup(region=region)
maxcov = 0
for pcol in br.pileup(region=region):
if pcol.n > maxcov: maxcov = pcol.n
if maxcov > self.settings["DOWNSAMPLE"]:
reads = [item for item in reads if random.random() < self.settings["DOWNSAMPLE"]/float(maxcov)]
aligned_reads.extend((bi, read) for read in reads)
except: pass
readindex = 0
read_properties = []
insertion_locations = {}
maxreadlength = 0
for bamindex, read in aligned_reads:
# get reference position
position = read.pos
# get nucleotides
nucs = read.query
# get CIGAR
cigar = read.cigar
# get strand
strand = not read.is_reverse
if not strand: nucs = nucs.lower()
# get sample
rg = self.read_groups[bamindex].get(
dict(read.tags).get("RG",""),"")
if rg not in self.usesamples: continue
# get representation
if cigar is None:
sys.stderr.write("WARNING: read %s has no CIGAR string. It will not be displayed.\n"%read.qname)
continue
read_properties.append({"pos": position,"sample":rg})
rep = ParseCigar(cigar, nucs)
readlen = len(rep)
if readlen > maxreadlength: maxreadlength = readlen
# Fix boundaries
ins_locs = [(i, len(rep[i])) for i in range(len(rep)) if len(rep[i])>1]
if position < self.pos:
rep = rep[self.pos-position:]
ins_locs = [(item[0] - (self.pos-position), item[1]) for item in ins_locs]
else:
rep = [ENDCHAR]*(position-self.pos)+rep
ins_locs = [(item[0]+(position-self.pos), item[1]) for item in ins_locs]
if len(rep) > len(reference):
rep = rep[0:len(reference)]
ins_locs = set([item for item in ins_locs if item[0] >= 0 and item[0] < len(reference)])
insertion_locations = AddInsertionLocations(insertion_locations, ins_locs)
rep = rep + [ENDCHAR]*(len(reference)-len(rep))
griddict["aln%s"%readindex] = rep
readindex += 1
# Fix insertions
alnkeys = [item for item in griddict.keys() if item != "position"]
for i in insertion_locations:
maxchars = insertion_locations[i]
for ak in alnkeys:
if i != 0: prev = griddict[ak][i-1]
else: prev = ENDCHAR
val = griddict[ak][i]
if len(val) < maxchars:
if ENDCHAR in val or prev[-1] == ENDCHAR: c = ENDCHAR
else: c = GAPCHAR
griddict[ak][i] = c*(maxchars-len(val))+val
# Split by sample
for sample in self.samples:
# if self.settings.get("SORT","bypos") == "bypos": # plan on adding other sort methods later
# Get keys in sorted order
alnkeys = [(read_properties[i]["pos"], "aln%s"%i) for i in range(readindex) if read_properties[i]["sample"] == sample]
alnkeys.sort()
alnkeys = [item[1] for item in alnkeys]
# Get columns we need for the grid
sample_dict = dict([(x, griddict[x]) for x in alnkeys+["position","reference"]])
# Read stacking
sample_dict_collapsed = self.CollapseGridByPosition(sample_dict, alnkeys, maxreadlength=maxreadlength)
self.alnkeys_by_sample[sample] = [item for item in alnkeys if item in sample_dict_collapsed.keys()]
self.grid_by_sample[sample] = sample_dict_collapsed
def MergeRows(self, row1, row2, start, end):
""" merge row2 into row1. row2 spans start-end """
return row1[0:start] + row2[start:]
def CollapseGridByPosition(self, griddict, alncols, maxreadlength=10000):
"""
If more than one read can fit on the same line, put it there
"""
cols_to_delete = set()
col_to_ends = {"dummy":float("inf")}
minend = col_to_ends["dummy"]
prevstart = 0
for col in alncols:
track = griddict[col]
start = prevstart
while start<len(track) and (track[start][0] == ENDCHAR or track[start][0] == GAPCHAR):
start = start + 1
if start >= len(track):
start = 0
end = 0
else:
x = [i for i in range(start, min(start+maxreadlength, len(track))) if track[i][0] != ENDCHAR and track[i][0] != GAPCHAR]
end = x[-1]
if start > minend:
# Find the first column we can add it to
for c in alncols:
if col_to_ends.get(c, float("inf")) < start:
mincol = c
break
# Reset that column with merged alignments
griddict[mincol] = self.MergeRows(griddict[mincol], griddict[col], start, end)
# Set that column for deletion and clear it in case we use it later
griddict[col][start:end+1] = [ENDCHAR*len(griddict[col][i]) for i in range(start, end+1)]
cols_to_delete.add(col)
# Reset end
t = griddict[mincol]
col_to_ends[mincol] = end
minend = min(col_to_ends.values())
col_to_ends[col] = 0
# Make sure we're not deleting mincol
cols_to_delete.discard(mincol)
else: col_to_ends[col] = end
if end < minend: minend = end
prevstart = start
for col in cols_to_delete: del griddict[col]
return griddict
def GetReferenceTrack(self, _pos):
"""
Return string for the reference track
"""
if len(self.grid_by_sample.keys()) == 0: return "N"*self.settings["LOADCHAR"]
refseries = self.grid_by_sample.values()[0]["reference"]
reference = ""
for i in range(len(refseries)):
reference = reference + refseries[i]
return reference.upper()
def GetPositions(self, _pos):
positions = []
if len(self.grid_by_sample.keys()) == 0: return range(self.pos, self.pos+self.settings["LOADCHAR"])
refseries = self.grid_by_sample.values()[0]["reference"]
for i in range(len(refseries)):
positions.extend([self.pos+i]*len(refseries[i]))
return positions
def GetAlignmentTrack(self, _pos):
"""
Return list of strings for the alignment track
"""
alignments_by_sample = {}
for sample in self.grid_by_sample:
grid = self.grid_by_sample[sample]
alignments = []
for col in self.alnkeys_by_sample[sample]:
alignments.append("".join(grid[col]))
alignments_by_sample[HashSample(sample)] = alignments
return alignments_by_sample
def __str__(self):
return "[AlignmentGrid: %s:%s]"%(self.chrom, self.pos)
class BamView(object):
"""
Class for storing view of Bam Alignments
"""
def __init__(self, _bamfiles, _reffile):
self.bamfiles = _bamfiles
self.bamreaders = []
for bam in self.bamfiles:
try:
br = pysam.Samfile(bam, "rb")
self.bamreaders.append(br)
except:
sys.stderr.write("ERROR: could not open %s. Is this a valid bam file?\n"%bam)
if _reffile != "":
try:
self.reference = pyfaidx.Fasta(_reffile, as_raw=True)
except:
self.reference = None
else: self.reference = None
self.alignment_grid = None
self.read_groups = self.LoadRGDictionary()
def GetSamples(self):
"""
Get list of samples
"""
return self.alignment_grid.GetSamples()
def GetSampleHashes(self):
"""
Get list of sample hashes
"""
return self.alignment_grid.GetSampleHashes()
def LoadRGDictionary(self):
return [
dict([(r["ID"], r.get("SM", r["ID"])) for r in br.header.get("RG", [])])
for br in self.bamreaders]
def GetPositions(self, start_pos):
"""
Get vector of positions for columns
"""
return self.alignment_grid.GetPositions(start_pos)
def GetIndex(self, coord):
"""
Get index into positions vector for a given coordinate
Return -1 if coord not in range
"""
positions = self.GetPositions(0)
if coord < positions[0] or coord > positions[-1]: return -1
return positions.index(coord)
def LoadAlignmentGrid(self, _chrom, _pos, _samples=[], _settings={}):
"""
Load an alignment grid for a view at a specific chr:pos
"""
self.alignment_grid = AlignmentGrid(self.bamreaders, self.read_groups, self.reference, \
_chrom, _pos, _samples=_samples, _settings=_settings)
def GetReferenceTrack(self, start_pos):
"""
Return string for the reference track
"""
return self.alignment_grid.GetReferenceTrack(start_pos)
def GetAlignmentTrack(self, start_pos):
"""
Return list of strings for the alignment tracks
"""
return self.alignment_grid.GetAlignmentTrack(start_pos)
def __str__(self):
return "[BamView: %s]"%self.bamfile
| StarcoderdataPython |
1618871 | <filename>NOL_model/diff_renderer.py
import tensorflow as tf
import dirt
import dirt.matrices as matrices
import dirt.lighting as lighting
import sys,os
import numpy as np
from tensorflow.keras.layers import Layer
def build_projection(cam, w=640, h=480, x0=0, y0=0, nc=0.1, fc=10.0):
q = -(fc + nc) / float(fc - nc)
qn = -2 * (fc * nc) / float(fc - nc)
proj = np.array([
[2 * cam[0, 0] / w, -2 * cam[0, 1] / w, (-2 * cam[0, 2] + w + 2 * x0) / w, 0],
[0, -2 * cam[1, 1] / h, (-2 * cam[1, 2] + h + 2 * y0) / h, 0],
[0, 0, q, qn], # This row is standard glPerspective and sets near and far planes
[0, 0, -1, 0]
])
proj[1, :] *= -1.0
return proj.T
class neural_rendering_crop_resize(Layer):
def __init__(self,img_h,img_w,cam_K,target_h=-1,target_w=-1,near=0.1,far=10.0,ch_dim=3,**kwargs):
self.img_h=img_h
self.img_w=img_w
if(target_h==-1):
self.target_h=img_h
self.target_w=img_w
else:
self.target_h=target_h
self.target_w=target_w
self.cam_K = cam_K
self.near = near
self.far = far
#self.right = ((self.img_w-1.)/2.)*near/self.focal
self.projection_matrix = tf.constant(build_projection(cam_K,w=self.img_w,h=self.img_h),tf.float32)
self.ch_dim=ch_dim
super(neural_rendering_crop_resize,self).__init__(**kwargs)
def build(self,input_shape):
super(neural_rendering_crop_resize,self).build(input_shape)
def call(self,x):
#N: num. of vertices, F: num of faces
vertices = x[0] #(batchxNx3) #same for each batch
uv_map = x[1] #(batchxNx2) #different for each batch
faces = tf.cast(x[2],tf.int32) #batchxFx3, same for each batch
texture = x[3] #batchxWxHxCH different for each batch
poses=x[4] #batch x n_target_poses x 4x4, same for each batch
bboxes=x[5] #batch x n_target_poses x 4 -> should be normalized by the full res
#ignore batch dimension of poses
vertices_mult = tf.tile(vertices,[tf.shape(poses)[1],1,1])
vert_uvs_mult = tf.tile(uv_map,[tf.shape(poses)[1],1,1])
faces_multi = tf.tile(faces,[tf.shape(poses)[1],1,1])
texture_multi = tf.tile(texture,[tf.shape(poses)[1],1,1,1])
poses_t=tf.transpose(poses,[1,0,2,3]) #posexbatchx4x4
poses_t=tf.reshape(poses_t,[-1,4,4])
bboxes_t = tf.transpose(bboxes,[1,0,2])
bboxes_t=tf.reshape(bboxes_t,[-1,4]) #(posexbatch)x4
# Transform vertices from camera to clip space
vertices_objects, vertices_cameras,vertices_clips,vertices_normals,view_matrices=\
tf.map_fn(self.transform_vertices,(vertices_mult,poses_t,faces_multi),dtype=(tf.float32,tf.float32,tf.float32,tf.float32,tf.float32))
gbuffer_temp = dirt.rasterise_batch(
background=tf.zeros([tf.shape(vertices_mult)[0],self.img_h, self.img_w, 3]),
vertices=vertices_clips,
vertex_colors=tf.concat([
tf.ones_like(vertices_objects[:,:, :1]), #1 mask
vert_uvs_mult
], axis=2),
faces=faces_multi,
height=self.img_h,
width=self.img_w,
channels=3
)
rendered_feature_raw = tf.map_fn(self.sample_texture,(texture_multi, gbuffer_temp[:,:,:,1:3] ),dtype=tf.float32)
#if both uv value is zero ->
uv_projection = gbuffer_temp[:,:,:,1:3]
mask_old = gbuffer_temp[:,:,:,:1] #regardless of the facts that each pixel was seen by the input images
if not(self.target_h==self.img_h and self.target_h==self.img_h):
#for the same pose -> same crop and resize area
mask_old = tf.image.crop_and_resize(mask_old,bboxes_t,
crop_size=(self.target_h,self.target_w),
box_indices=tf.range(0,tf.shape(rendered_feature_raw)[0]))
mask_old = tf.cast(tf.greater(mask_old,0.5),tf.float32)
mask_rend = tf.cast(tf.greater(tf.reduce_sum(gbuffer_temp[:,:,:,1:3],axis=3,keepdims=True),0),tf.float32)
mask_crop = tf.image.crop_and_resize(mask_rend,bboxes_t,
crop_size=(self.target_h,self.target_w),
box_indices=tf.range(0,tf.shape(rendered_feature_raw)[0]))
mask_new = tf.cast(tf.greater(mask_crop,0.5),tf.float32)
rendered_feature = tf.image.crop_and_resize(rendered_feature_raw ,bboxes_t,
crop_size=(self.target_h,self.target_w),
box_indices=tf.range(0,tf.shape(rendered_feature_raw)[0]))
uv_projection = tf.image.crop_and_resize(uv_projection ,bboxes_t,
crop_size=(self.target_h,self.target_w),
box_indices=tf.range(0,tf.shape(rendered_feature_raw)[0]))
else:
mask_new = tf.cast(tf.greater(tf.reduce_sum(gbuffer_temp[:,:,:,1:3],axis=3,keepdims=True),0),tf.float32)
rendered_feature = mask_new*rendered_feature_raw #remove backgrounds
concated_out = tf.concat([mask_new,rendered_feature,mask_old,uv_projection],axis=3) # P X B x H x W x CH
final_out = tf.reshape(concated_out, [tf.shape(poses)[1],-1,self.target_h,self.target_w,self.ch_dim+4])
#(batch*n_poses) x H x W x (ch+1) -> (n_poses x batch x H x W x (ch+1))
#pack each image in a pose
return final_out
def transform_vertices(self,inputs):
vertices = inputs[0]
pose = inputs[1]
faces= inputs[2]
cube_vertices_object = tf.concat([
vertices,
tf.ones_like(vertices[:, -1:])
], axis=1)
cube_normals_world = lighting.vertex_normals_pre_split(cube_vertices_object, faces)
transform_gl = tf.constant([[1,0,0],[0,-1,0],[0,0,-1]],tf.float32)
tensor_rot = tf.matmul(transform_gl,pose[:3,:3])
rot_list = tf.unstack(tf.reshape(tensor_rot,[-1]))
pose_list = tf.unstack(tf.reshape(pose,[-1]))
pose_list[0:3]=rot_list[0:3]
pose_list[4:7]=rot_list[3:6]
pose_list[8:11]=rot_list[6:9]
pose_list[7]=-pose_list[7]
pose_list[11]=-pose_list[11]
cam_pose = tf.stack(pose_list)
cam_pose = tf.reshape(cam_pose,(4,4))
view_matrix = tf.transpose(cam_pose)
cube_vertices_camera = tf.matmul(cube_vertices_object, view_matrix)
cube_vertices_clip = tf.matmul(cube_vertices_camera, self.projection_matrix)
return cube_vertices_object,cube_vertices_camera,cube_vertices_clip,cube_normals_world,view_matrix
def uvs_to_pixel_indices(self,uvs, texture_shape):
# Note that this assumes u = 0, v = 0 is at the top-left of the image -- different to OpenGL!
uvs = uvs[..., ::-1] # change x, y coordinates to y, x indices
#batchxhxwx2 vs [2]
texture_shape = tf.cast(texture_shape, tf.float32) #[h,w]
return tf.clip_by_value(uvs, 0., 1.) * texture_shape
def sample_texture(self,inputs):
texture=inputs[0]
uv_val_ori=inputs[1] #wxhx2
indices = self.uvs_to_pixel_indices(uv_val_ori, tf.shape(texture)[:2])
floor_indices = tf.floor(indices)
frac_indices = indices - floor_indices
floor_indices = tf.cast(floor_indices, tf.int32)
neighbours = tf.gather_nd(
texture,
tf.stack([
floor_indices,
floor_indices + [0, 1],
floor_indices + [1, 0],
floor_indices + [1, 1]
]),
)
top_left, top_right, bottom_left, bottom_right = tf.unstack(neighbours)
return \
top_left * (1. - frac_indices[..., 1:]) * (1. - frac_indices[..., :1]) + \
top_right * frac_indices[..., 1:] * (1. - frac_indices[..., :1]) + \
bottom_left * (1. - frac_indices[..., 1:]) * frac_indices[..., :1] + \
bottom_right * frac_indices[..., 1:] * frac_indices[..., :1]
def compute_output_shape(self,input_shape):
return (tuple([input_shape[4][1],input_shape[0][0],self.target_h,self.target_w,self.ch_dim+4]))
class neural_rendering_gbuffer(Layer):
def __init__(self,img_h,img_w,cam_K,near=0.1,far=10.0,ch_dim=3,**kwargs):
self.img_h=img_h
self.img_w=img_w
self.cam_K = cam_K
self.near = near
self.far = far
#self.right = ((self.img_w-1.)/2.)*near/self.focal
self.projection_matrix = tf.constant(build_projection(cam_K,w=self.img_w,h=self.img_h),tf.float32)
super(neural_rendering_gbuffer,self).__init__(**kwargs)
def build(self,input_shape):
super(neural_rendering_gbuffer,self).build(input_shape)
def call(self,x):
#N: num. of vertices, F: num of faces
vertices = x[0] #(1xNx3) #same for each batch
faces = tf.cast(x[1],tf.int32) #1xFx3, same for each batch
poses=x[2] #1 x 4x4, same for each batch
# Transform vertices from camera to clip space
vert_obj,vert_3d,vert_clip,normals= self.transform_vertices(vertices[0],poses[0],faces[0])
gbuffer_temp = dirt.rasterise(
background=tf.zeros([self.img_h, self.img_w, 11]),
vertices=vert_clip,
vertex_colors=tf.concat([
tf.ones_like(vert_obj[:, :1]), #1 mask
vert_3d,
normals,
vert_obj
], axis=1),
faces=faces[0],
height=self.img_h,
width=self.img_w,
channels=11
)
return tf.expand_dims(gbuffer_temp,axis=0)
def transform_vertices(self,vertices,pose,faces):
#vertices = inputs[0]
#pose = inputs[1]
#faces= inputs[2]
cube_vertices_object = tf.concat([
vertices,
tf.ones_like(vertices[:, -1:])
], axis=1)
cube_normals_world = lighting.vertex_normals_pre_split(cube_vertices_object, faces)
transform_gl = tf.constant([[1,0,0],[0,-1,0],[0,0,-1]],tf.float32)
tensor_rot = tf.matmul(transform_gl,pose[:3,:3])
rot_list = tf.unstack(tf.reshape(tensor_rot,[-1]))
pose_list = tf.unstack(tf.reshape(pose,[-1]))
pose_list[0:3]=rot_list[0:3]
pose_list[4:7]=rot_list[3:6]
pose_list[8:11]=rot_list[6:9]
pose_list[7]=-pose_list[7]
pose_list[11]=-pose_list[11]
cam_pose = tf.stack(pose_list)
cam_pose = tf.reshape(cam_pose,(4,4))
view_matrix = tf.transpose(cam_pose)
cube_vertices_camera = tf.matmul(cube_vertices_object, view_matrix)
cube_vertices_3d = tf.transpose(tf.matmul(pose[:3,:3], tf.transpose(cube_vertices_object[:,:3])))+tf.transpose(pose[:3,3:4]) #3xN
cube_vertices_clip = tf.matmul(cube_vertices_camera, self.projection_matrix)
return cube_vertices_object,cube_vertices_3d,cube_vertices_clip,cube_normals_world
def compute_output_shape(self,input_shape):
return (tuple([input_shape[0][0],self.img_h,self.img_w,7])) | StarcoderdataPython |
1645908 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
To prompt if login requires
"""
from PyInquirer import prompt
def login(crawler):
answer = prompt([
{
'type': 'confirm',
'name': 'login',
'message': 'Do you want to log in?',
'default': False
},
])
if answer['login']:
answer = prompt([
{
'type': 'input',
'name': 'email',
'message': 'Email:',
'validate': lambda val: True if len(val)
else 'Email address should be not be empty'
},
{
'type': 'password',
'name': 'password',
'message': 'Password:',
'validate': lambda val: True if len(val)
else 'Password should be not be empty'
},
])
crawler.login(answer['email'], answer['password'])
# end if
# end def
| StarcoderdataPython |
135631 | from collections import defaultdict
class Graph:
def __init__(self, vertices):
self.V = vertices
self.graph = []
def addEdge(self, u, v, w):
self.graph.append([u, v, w])
def find(self, parent, i):
if parent[i] == i:
return i
return self.find(parent, parent[i])
def union(self, parent, rank, x, y):
xroot = self.find(parent, x)
yroot = self.find(parent, y)
if rank[xroot] < rank[yroot]:
parent[xroot] = yroot
elif rank[xroot] > rank[yroot]:
parent[yroot] = xroot
else:
parent[yroot] = xroot
rank[xroot] += 1
def KruskalMST(self):
result = []
i = 0
e = 0
self.graph = sorted(self.graph,
key=lambda item: item[2])
parent = []
rank = []
for node in range(self.V):
parent.append(node)
rank.append(0)
while e < self.V - 1:
u, v, w = self.graph[i]
i = i + 1
x = self.find(parent, u)
y = self.find(parent, v)
if x != y:
e = e + 1
result.append([u, v, w])
self.union(parent, rank, x, y)
minimumCost = 0
print ("Edges in the constructed MST")
for u, v, weight in result:
minimumCost += weight
print("%d -- %d == %d" % (u, v, weight))
print("Minimum Spanning Tree" , minimumCost)
g = Graph(4)
g.addEdge(0, 1, 10)
g.addEdge(0, 2, 6)
g.addEdge(0, 3, 5)
g.addEdge(1, 3, 15)
g.addEdge(2, 3, 4)
# Function call
g.KruskalMST()
| StarcoderdataPython |
1754839 | from __future__ import with_statement
from difflib import SequenceMatcher
import os
from unittest import TestCase
import sys
import django
from django import template
from django.conf import settings
from django.template.loader import render_to_string
from django.template.engine import Engine
import pep8
from sekizai import context_processors
from sekizai.context import SekizaiContext
from sekizai.helpers import get_namespaces
from sekizai.helpers import get_varname
from sekizai.helpers import validate_template
from sekizai.helpers import Watcher
from sekizai.templatetags.sekizai_tags import import_processor
from sekizai.templatetags.sekizai_tags import validate_context
try:
unicode_compat = unicode
except NameError:
unicode_compat = str
try:
from io import StringIO
except ImportError:
from StringIO import StringIO
def null_processor(context, data, namespace):
return ''
def namespace_processor(context, data, namespace):
return namespace
class SettingsOverride(object):
"""
Overrides Django settings within a context and resets them to their initial
values on exit.
Example:
with SettingsOverride(DEBUG=True):
# do something
"""
class NULL:
pass
def __init__(self, **overrides):
self.overrides = overrides
def __enter__(self):
self.old = {}
for key, value in self.overrides.items():
self.old[key] = getattr(settings, key, self.NULL)
setattr(settings, key, value)
def __exit__(self, type, value, traceback):
for key, value in self.old.items():
if value is self.NULL:
delattr(settings, key)
else:
setattr(settings, key, value)
class CaptureStdout(object):
"""
Overrides sys.stdout with a StringIO stream.
"""
def __init__(self):
self.old = None
def __enter__(self):
self.old = sys.stdout
new = sys.stdout = StringIO()
return new
def __exit__(self, exc_type, exc_val, exc_tb):
sys.stdout = self.old
class Match(tuple): # pragma: no cover
@property
def a(self):
return self[0]
@property
def b(self):
return self[1]
@property
def size(self):
return self[2]
def _backwards_compat_match(thing): # pragma: no cover
if isinstance(thing, tuple):
return Match(thing)
return thing
class BitDiffResult(object):
def __init__(self, status, message):
self.status = status
self.message = message
class BitDiff(object):
"""
Visual aid for failing tests
"""
def __init__(self, expected):
self.expected = [repr(unicode_compat(bit)) for bit in expected]
def test(self, result):
result = [repr(unicode_compat(bit)) for bit in result]
if self.expected == result:
return BitDiffResult(True, "success")
else: # pragma: no cover
longest = max(
[len(x) for x in self.expected] +
[len(x) for x in result] +
[len('Expected')]
)
sm = SequenceMatcher()
sm.set_seqs(self.expected, result)
matches = sm.get_matching_blocks()
lasta = 0
lastb = 0
data = []
for match in [_backwards_compat_match(match) for match in matches]:
unmatcheda = self.expected[lasta:match.a]
unmatchedb = result[lastb:match.b]
unmatchedlen = max([len(unmatcheda), len(unmatchedb)])
unmatcheda += ['' for x in range(unmatchedlen)]
unmatchedb += ['' for x in range(unmatchedlen)]
for i in range(unmatchedlen):
data.append((False, unmatcheda[i], unmatchedb[i]))
for i in range(match.size):
data.append((
True, self.expected[match.a + i], result[match.b + i]
))
lasta = match.a + match.size
lastb = match.b + match.size
padlen = (longest - len('Expected'))
padding = ' ' * padlen
line1 = '-' * padlen
line2 = '-' * (longest - len('Result'))
msg = '\nExpected%s | | Result' % padding
msg += '\n--------%s-|---|-------%s' % (line1, line2)
for success, a, b in data:
pad = ' ' * (longest - len(a))
if success:
msg += '\n%s%s | | %s' % (a, pad, b)
else:
msg += '\n%s%s | ! | %s' % (a, pad, b)
return BitDiffResult(False, msg)
def update_template_debug(debug=True):
"""
Helper method for updating the template debug option based on
the django version. Use the results of this function as the context.
:return: SettingsOverride object
"""
# Create our overridden template settings with debug turned off.
templates_override = settings.TEMPLATES
templates_override[0]['OPTIONS'].update({'debug': debug})
# Engine gets created based on template settings initial value so
# changing the settings after the fact won't update, so do it
# manually. Necessary when testing validate_context
# with render method and want debug off.
Engine.get_default().debug = debug
return SettingsOverride(TEMPLATES=templates_override)
class SekizaiTestCase(TestCase):
def _render(self, tpl, ctx=None, sekizai_context=True):
ctx = dict(ctx) if ctx else {}
if sekizai_context:
ctx.update(context_processors.sekizai())
return render_to_string(tpl, ctx)
def _get_bits(self, tpl, ctx=None, sekizai_context=True):
ctx = ctx or {}
rendered = self._render(tpl, ctx, sekizai_context)
bits = [
bit for bit in [bit.strip('\n')
for bit in rendered.split('\n')] if bit
]
return bits, rendered
def _test(self, tpl, res, ctx=None, sekizai_context=True):
"""
Helper method to render template and compare it's bits
"""
ctx = ctx or {}
bits, rendered = self._get_bits(tpl, ctx, sekizai_context)
differ = BitDiff(res)
result = differ.test(bits)
self.assertTrue(result.status, result.message)
return rendered
def test_pep8(self):
sekizai_dir = os.path.dirname(os.path.abspath(__file__))
pep8style = pep8.StyleGuide()
with CaptureStdout() as stdout:
result = pep8style.check_files([sekizai_dir])
errors = stdout.getvalue()
self.assertEqual(
result.total_errors, 0,
"Code not PEP8 compliant:\n{0}".format(errors)
)
def test_basic_dual_block(self):
"""
Basic dual block testing
"""
bits = [
'my css file', 'some content', 'more content', 'final content',
'my js file'
]
self._test('basic.html', bits)
def test_named_endaddtoblock(self):
"""
Testing with named endaddblock
"""
bits = ["mycontent"]
self._test('named_end.html', bits)
def test_eat_content_before_render_block(self):
"""
Testing that content gets eaten if no render_blocks is available
"""
bits = ["mycontent"]
self._test("eat.html", bits)
def test_sekizai_context_required(self):
"""
Test that the template tags properly fail if not used with either
SekizaiContext or the context processor.
"""
with self.assertRaises(template.TemplateSyntaxError):
self._render('basic.html', {}, sekizai_context=False)
def test_complex_template_inheritance(self):
"""
Test that (complex) template inheritances work properly
"""
bits = [
"head start",
"some css file",
"head end",
"include start",
"inc add js",
"include end",
"block main start",
"extinc",
"block main end",
"body pre-end",
"inc js file",
"body end"
]
self._test("inherit/extend.html", bits)
"""
Test that blocks (and block.super) work properly with sekizai
"""
bits = [
"head start",
"visible css file",
"some css file",
"head end",
"include start",
"inc add js",
"include end",
"block main start",
"block main base contents",
"more contents",
"block main end",
"body pre-end",
"inc js file",
"body end"
]
self._test("inherit/super_blocks.html", bits)
def test_namespace_isolation(self):
"""
Tests that namespace isolation works
"""
bits = ["the same file", "the same file"]
self._test('namespaces.html', bits)
def test_variable_namespaces(self):
"""
Tests variables and filtered variables as block names.
"""
bits = ["file one", "file two"]
self._test('variables.html', bits, {'blockname': 'one'})
def test_invalid_addtoblock(self):
"""
Tests that template syntax errors are raised properly in templates
rendered by sekizai tags
"""
self.assertRaises(
template.TemplateSyntaxError,
self._render, 'errors/failadd.html'
)
def test_invalid_renderblock(self):
self.assertRaises(
template.TemplateSyntaxError,
self._render, 'errors/failrender.html'
)
def test_invalid_include(self):
self.assertRaises(
template.TemplateSyntaxError,
self._render, 'errors/failinc.html'
)
def test_invalid_basetemplate(self):
self.assertRaises(
template.TemplateSyntaxError,
self._render, 'errors/failbase.html'
)
def test_invalid_basetemplate_two(self):
self.assertRaises(
template.TemplateSyntaxError,
self._render, 'errors/failbase2.html'
)
def test_with_data(self):
"""
Tests the with_data/add_data tags.
"""
bits = ["1", "2"]
self._test('with_data.html', bits)
def test_easy_inheritance(self):
self.assertEqual('content', self._render("easy_inherit.html").strip())
def test_validate_context(self):
sekizai_ctx = SekizaiContext()
django_ctx = template.Context()
self.assertRaises(
template.TemplateSyntaxError,
validate_context, django_ctx
)
self.assertEqual(validate_context(sekizai_ctx), True)
with update_template_debug(debug=False):
self.assertEqual(validate_context(django_ctx), False)
self.assertEqual(validate_context(sekizai_ctx), True)
bits = ['some content', 'more content', 'final content']
self._test('basic.html', bits, sekizai_context=False)
def test_post_processor_null(self):
bits = ['header', 'footer']
self._test('processors/null.html', bits)
def test_post_processor_namespace(self):
bits = ['header', 'footer', 'js']
self._test('processors/namespace.html', bits)
def test_import_processor_failfast(self):
self.assertRaises(TypeError, import_processor, 'invalidpath')
def test_unique(self):
bits = ['unique data']
self._test('unique.html', bits)
def test_strip(self):
tpl = template.Template("""
{% load sekizai_tags %}
{% addtoblock 'a' strip %} test{% endaddtoblock %}
{% addtoblock 'a' strip %}test {% endaddtoblock %}
{% render_block 'a' %}""")
context = SekizaiContext()
output = tpl.render(context)
self.assertEqual(output.count('test'), 1, output)
def test_addtoblock_processor_null(self):
bits = ['header', 'footer']
self._test('processors/addtoblock_null.html', bits)
def test_addtoblock_processor_namespace(self):
bits = ['header', 'footer', 'js']
self._test('processors/addtoblock_namespace.html', bits)
class HelperTests(TestCase):
def test_validate_template_js_css(self):
self.assertTrue(validate_template('basic.html', ['js', 'css']))
def test_validate_template_js(self):
self.assertTrue(validate_template('basic.html', ['js']))
def test_validate_template_css(self):
self.assertTrue(validate_template('basic.html', ['css']))
def test_validate_template_empty(self):
self.assertTrue(validate_template('basic.html', []))
def test_validate_template_notfound(self):
self.assertFalse(validate_template('basic.html', ['notfound']))
def test_get_namespaces_easy_inherit(self):
self.assertEqual(get_namespaces('easy_inherit.html'), ['css'])
def test_get_namespaces_chain_inherit(self):
self.assertEqual(get_namespaces('inherit/chain.html'), ['css', 'js'])
def test_get_namespaces_space_chain_inherit(self):
self.assertEqual(
get_namespaces('inherit/spacechain.html'),
['css', 'js']
)
def test_get_namespaces_var_inherit(self):
self.assertEqual(get_namespaces('inherit/varchain.html'), [])
def test_get_namespaces_sub_var_inherit(self):
self.assertEqual(get_namespaces('inherit/subvarchain.html'), [])
def test_get_namespaces_null_ext(self):
self.assertEqual(get_namespaces('inherit/nullext.html'), [])
def test_deactivate_validate_template(self):
with SettingsOverride(SEKIZAI_IGNORE_VALIDATION=True):
self.assertTrue(validate_template('basic.html', ['js', 'css']))
self.assertTrue(validate_template('basic.html', ['js']))
self.assertTrue(validate_template('basic.html', ['css']))
self.assertTrue(validate_template('basic.html', []))
self.assertTrue(validate_template('basic.html', ['notfound']))
def test_watcher_add_namespace(self):
context = SekizaiContext()
watcher = Watcher(context)
varname = get_varname()
context[varname]['key'].append('value')
changes = watcher.get_changes()
self.assertEqual(changes, {'key': ['value']})
def test_watcher_add_data(self):
context = SekizaiContext()
varname = get_varname()
context[varname]['key'].append('value')
watcher = Watcher(context)
context[varname]['key'].append('value2')
changes = watcher.get_changes()
self.assertEqual(changes, {'key': ['value2']})
| StarcoderdataPython |
173965 | <gh_stars>0
import os
import sys
import time
import uuid
from datetime import datetime
from numpy import mean
from numpy.core import long
from ios_device.servers.DTXSever import InstrumentRPCParseError
from ios_device.servers.Instrument import InstrumentServer
from ios_device.util.utils import kperf_data
sys.path.append(os.getcwd())
NANO_SECOND = 1e9 # ns
MOVIE_FRAME_COST = 1 / 24
def graphics_display(rpc):
def dropped_message(res):
print("[DROP]", res.parsed, res.raw.channel_code)
last_frame = None
last_1_frame_cost, last_2_frame_cost, last_3_frame_cost = 0, 0, 0
jank_count = 0
big_jank_count = 0
jank_time_count = 0
mach_time_factor = 125 / 3
frame_count = 0
time_count = 0
last_time = datetime.now().timestamp()
count_time = datetime.now().timestamp()
_list = []
def on_graphics_message(res):
nonlocal frame_count, last_frame, last_1_frame_cost, last_2_frame_cost, last_3_frame_cost, time_count, mach_time_factor, last_time, \
jank_count, big_jank_count, jank_time_count, _list, count_time
if type(res.plist) is InstrumentRPCParseError:
for args in kperf_data(res.raw.get_selector()):
_time, code = args[0], args[7]
if code == 830472984:
if not last_frame:
last_frame = long(_time)
else:
this_frame_cost = (long(_time) - last_frame) * mach_time_factor
if all([last_3_frame_cost != 0, last_2_frame_cost != 0, last_1_frame_cost != 0]):
if this_frame_cost > mean([last_3_frame_cost, last_2_frame_cost, last_1_frame_cost]) * 2 \
and this_frame_cost > MOVIE_FRAME_COST * NANO_SECOND * 2:
jank_count += 1
jank_time_count += this_frame_cost
if this_frame_cost > mean(
[last_3_frame_cost, last_2_frame_cost, last_1_frame_cost]) * 3 \
and this_frame_cost > MOVIE_FRAME_COST * NANO_SECOND * 3:
big_jank_count += 1
last_3_frame_cost, last_2_frame_cost, last_1_frame_cost = last_2_frame_cost, last_1_frame_cost, this_frame_cost
time_count += this_frame_cost
last_frame = long(_time)
frame_count += 1
else:
time_count = (datetime.now().timestamp() - count_time) * NANO_SECOND
if time_count > NANO_SECOND:
print(
f"{datetime.now().timestamp() - last_time} FPS: {frame_count / time_count * NANO_SECOND} jank: {jank_count} big_jank: {big_jank_count} stutter: {jank_time_count / time_count}")
jank_count = 0
big_jank_count = 0
jank_time_count = 0
frame_count = 0
time_count = 0
count_time = datetime.now().timestamp()
# else:
# last_time = datetime.now().timestamp()
rpc.register_unhandled_callback(dropped_message)
rpc.register_channel_callback("com.apple.instruments.server.services.coreprofilesessiontap", on_graphics_message)
# 获取mach time比例
machTimeInfo = rpc.call("com.apple.instruments.server.services.deviceinfo", "machTimeInfo").parsed
mach_time_factor = machTimeInfo[1] / machTimeInfo[2]
print("set", rpc.call("com.apple.instruments.server.services.coreprofilesessiontap", "setConfig:",
{'rp': 10,
'tc': [{'kdf2': {630784000, 833617920, 830472456},
'tk': 3,
'uuid': str(uuid.uuid4()).upper()}],
'ur': 500}).parsed)
print("start",
rpc.call("com.apple.instruments.server.services.coreprofilesessiontap", "start").parsed)
try:
while 1:
time.sleep(10)
except:
pass
print("stop", rpc.call("com.apple.instruments.server.services.coreprofilesessiontap", "stop").parsed)
rpc.stop()
if __name__ == '__main__':
rpc = InstrumentServer().init()
graphics_display(rpc)
rpc.stop()
| StarcoderdataPython |
3261864 | <gh_stars>1-10
import numpy as np
COLORS = np.array([[1, 0, 1], [0, 0, 1], [0, 1, 1], [0, 1, 0], [1, 1, 0], [1, 0, 0]])
IMG_EXTENSIONS = (".jpg", ".png")
| StarcoderdataPython |
1769218 | #!/usr/bin/env python3
import argparse
import json
import uuid
import requests
import sys
from string import Formatter
from errors import UserError, RequestError
from request_utils import Requests
from environment_utils import Environments
class Cli(object):
def __init__(
self,
requests_filename='requests.json',
environments_filename='envs.json',
print_all_responses=False
):
self.requests = Requests(filename=requests_filename)
self.environments = Environments(filename=environments_filename)
self.print_all_responses = print_all_responses
def make_call(self, request_name, env_name):
request = self.requests.find(request_name)
environment = self.environments.find(env_name)
parsed_req = self._parse_request(request, env_name)
url = f"{environment['base_url']}{parsed_req['endpoint']}"
headers = {'content-type': 'application/json'}
if 'headers' in environment:
headers = {**headers, **environment['headers']}
if parsed_req['type'] == 'POST':
response = requests.post(url, data=json.dumps(parsed_req['body']), headers=headers)
elif parsed_req['type'] == 'PUT':
response = requests.put(url, data=json.dumps(parsed_req['body']), headers=headers)
elif parsed_req['type'] == 'GET':
response = requests.get(url, params=parsed_req['body'], headers=headers)
else:
raise UserError(f'Unknown HTTP method {parsed_req["type"]}')
response_json = response.json()
if response.status_code != 200:
raise RequestError(
f'{response.status_code} returned when calling {request_name} with response '
f'{response_json}. Expected status code 200.'
)
if self.print_all_responses:
print(f'Response for call to {request_name}:')
print(response_json)
return response_json
def _parse_request(self, request, base_url):
if isinstance(request, str):
parameters = [
parameter for _, parameter, _, _ in Formatter().parse(request) if parameter
]
values = {}
for parameter in parameters:
new_request = parameter.split('[')[0]
values[new_request] = self._populate_parameter(new_request, base_url)
return request.format(**values)
if isinstance(request, list):
parsed = []
for value in request:
parsed.append(self._parse_request(value, base_url))
return parsed
parsed = {}
for attribute, value in request.items():
parsed[attribute] = self._parse_request(value, base_url)
return parsed
def _populate_parameter(self, parameter_name, base_url):
if parameter_name == 'uuid':
return str(uuid.uuid4())
else:
return self.make_call(parameter_name, base_url)
def setup_argparse():
parser = argparse.ArgumentParser(description='Create and retrieve objects in a rest API.')
parser.add_argument(
'request',
metavar='request_name',
help='the name of a request to make',
nargs='?'
)
parser.add_argument(
'-l',
'--list',
help='list all objects and exit',
action='store_true'
)
parser.add_argument(
'-rf',
'--requests_file',
help='the requests file location',
)
parser.add_argument(
'-ef',
'--environments_file',
help='the environments file location',
)
parser.add_argument(
'-o',
'--output_all_requests',
help='prints all the requests being made',
action='store_true'
)
parser.add_argument(
'-e',
'--env',
'--environment',
metavar='env_name',
help='the name of environment to use',
)
return parser
if __name__ == '__main__':
parser = setup_argparse()
args = parser.parse_args()
cli = None
try:
cli = Cli(
requests_filename=args.requests_file or 'requests.json',
environments_filename=args.environments_file or 'envs.json',
print_all_responses=args.output_all_requests
)
except UserError as e:
print(str(e))
sys.exit(1)
if args.list:
cli.requests.print_request_list()
elif args.request:
try:
print(cli.make_call(args.request, args.env or 'default'))
except (UserError, RequestError) as e:
print(str(e))
sys.exit(1)
else:
print('Type -h for help')
| StarcoderdataPython |
3263452 | import sys
n = int(input())
for i in range(n):
x = int(input())
#if x / 4 gives rest 0, then the polygon is beautiful
if (x%4 == 0):
print('YES')
else:
print('NO')
| StarcoderdataPython |
24834 | <reponame>zacespinosa/homicidal_chauffeur
import random as random
import numpy as np
from dynamics import Simulator, Pursuer, Evader
def test_evader():
num_d_states = 25
num_phi_states = 20
num_phi_d_states = 20
num_actions = 10
num_states = num_d_states*num_phi_states*num_phi_d_states
num_epochs = 1000
p = Pursuer()
e = Evader(num_d_states, num_phi_states, num_phi_d_states, num_actions, np.array([10,10]), learning='Q-learning', load_q=True)
s = Simulator(p, e, num_d_states, num_phi_states, num_phi_d_states, num_actions, verbose=True)
while s.restarts < 1:
# execute optimal pursuer strategy while training evader
a_p = p.optimal_strategy(e.pos, p.pos)
# a_e = e.optimal_strategy(e.pos, p.pos, p.R_p)
# execute Q Learning policy for evader
state = e.s
a_e = e.qLearningPolicy(state)
p_info, e_info = s.simulate(a_p, a_e, discrete_p_action=False, discrete_e_action=True)
if s.end_game: s.restart_game()
new_state = e_info[0]
r_e = e_info[1]
print("Evader captured: ", s.num_captures, "/", s.restarts, " times.")
test_evader()
| StarcoderdataPython |
3250331 | <filename>Demo/Code/main.py
# -*- coding: UTF-8 -*-
import sys
from PyQt5 import QtWidgets, QtCore, QtGui
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import (QWidget, QPushButton, QApplication)
import math
from run import deep_rnn_annotate
# coordinates of the selected object
record = [0, 0, 0, 0]
# selected mode
select = ''
# instance number
cur_ins_id = 0
# screen shot
class WScreenShot(QWidget):
win = ''
# 自定义图片保存路径
save_path = "../images/save.jpg"
@classmethod
def run(cls, x, y): # screenshot
cls.win = cls(x, y)
cls.win.show()
def __init__(self, x, y, parent=None):
super(WScreenShot, self).__init__(parent)
self.bias = QPoint(x, y)
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
self.setStyleSheet('''background-color:black; ''')
self.setWindowOpacity(0.6)
desktopRect = QDesktopWidget().screenGeometry()
self.setGeometry(x, y, 1024, 512)
self.setCursor(Qt.CrossCursor)
self.blackMask = QBitmap(desktopRect.size())
self.blackMask.fill(Qt.black)
self.mask = self.blackMask.copy()
self.isDrawing = False
self.startPoint = QPoint()
self.endPoint = QPoint()
# 自定义绘画事件
def paintEvent(self, event):
if self.isDrawing:
self.mask = self.blackMask.copy()
pp = QPainter(self.mask)
pen = QPen()
pen.setStyle(Qt.NoPen)
pp.setPen(pen)
brush = QBrush(Qt.white)
pp.setBrush(brush)
pp.drawRect(QRect(self.startPoint, self.endPoint))
self.setMask(QBitmap(self.mask))
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
self.startPoint = event.pos()
self.endPoint = self.startPoint
self.isDrawing = True
def mouseMoveEvent(self, event):
if self.isDrawing:
self.endPoint = event.pos()
self.update()
def mouseReleaseEvent(self, event):
if event.button() == Qt.LeftButton:
self.endPoint = event.pos()
# record the coordinates of the selected object
record[0] = (self.startPoint.x())
record[1] = (self.startPoint.y())
record[2] = (self.endPoint.x())
record[3] = (self.endPoint.y())
screenshot = QApplication.primaryScreen().grabWindow(QApplication.desktop().winId())
rect = QRect(self.startPoint + self.bias, self.endPoint + self.bias)
outputRegion = screenshot.copy(rect)
# save the selected object
outputRegion.save(self.save_path, format='JPG', quality=100)
self.close()
class MyLabel(QLabel):
def __init__(self, parent=None):
QLabel.__init__(self, parent)
self.points_list = []
self.threshold = 5
self.flag = False
self.choose_index = -1
self.choose_polygon = -1
def addPolygon(self, points):
self.points_list.append(points)
def mousePressEvent(self, event):
x = event.x()
y = event.y()
min_distance = 100000000
min_poly = -1
min_index = -1
for i in range(len(self.points_list)):
for j in range(len(self.points_list[i])):
distance = math.sqrt((x - self.points_list[i][j].x()) ** 2 + (y - self.points_list[i][j].y()) ** 2)
if distance < min_distance:
min_distance = distance
min_poly = i
min_index = j
if event.buttons() == Qt.LeftButton:
if min_poly != -1 and min_index != -1 and min_distance < self.threshold:
self.flag = True
self.choose_polygon = min_poly
self.choose_index = min_index
elif event.buttons() == Qt.RightButton:
print("....")
if min_index != -1 and min_distance < self.threshold:
del self.points_list[min_poly][min_index]
self.update()
def mouseReleaseEvent(self, event):
self.flag = False
self.choose_polygon = -1
self.choose_index = -1
def mouseMoveEvent(self, event):
if self.flag:
x = event.x()
y = event.y()
self.points_list[self.choose_polygon][self.choose_index].setX(x)
self.points_list[self.choose_polygon][self.choose_index].setY(y)
self.update()
def paintEvent(self, event):
super().paintEvent(event)
painter = QPainter(self)
for i in range(len(self.points_list)):
painter.setPen(QPen(Qt.red, 3, Qt.SolidLine))
painter.drawPolygon(QPolygon(self.points_list[i]))
painter.setPen(QPen(Qt.black, 8, Qt.SolidLine))
painter.drawPoints(QPolygon(self.points_list[i]))
class MyWindow(QMainWindow):
def __init__(self):
super(MyWindow, self).__init__()
self.resize(1250, 600)
menubar = self.menuBar()
menubar.addMenu('&Menu')
menubar.addMenu('&Mode')
menubar.addMenu('&Instruction')
self.setWindowTitle("Deep RNN Annotator")
self.setWindowIcon(QIcon('icon.png'))
self.setFixedSize(self.width(), self.height())
self.label = MyLabel(self)
self.label.setText(" Waiting to load image...")
self.label.setFixedSize(1024, 512)
self.label.move(10, 50)
self.label.setStyleSheet("QLabel{background:white;}"
"QLabel{color:rgb(0,0,0);font-size:40px;font-weight:bold;font-family:宋体;}"
)
font = QtGui.QFont()
font.setFamily('微软雅黑')
font.setBold(True)
font.setPointSize(12)
font.setWeight(60)
self.combo = QComboBox(self)
self.combo.addItem('Local Mode')
self.combo.addItem('Server Mode')
self.combo.setFont(font)
self.combo.setGeometry(QtCore.QRect(1060, 60, 150, 40))
btn = QPushButton(self)
btn.setText("Select Image")
# btn.move(1060, 140)
btn.clicked.connect(self.openimage)
btn.setFont(font)
btn.setGeometry(QtCore.QRect(1060, 140, 150, 40))
btn2 = QPushButton(self)
btn2.setText("Choose Object")
# btn2.move(1060, 380)
btn2.clicked.connect(self.screenshot)
btn2.setFont(font)
btn2.setGeometry(QtCore.QRect(1060, 220, 150, 40))
btn3 = QPushButton(self)
btn3.setText("Annotate")
# btn3.move(1060, 460)
btn3.clicked.connect(self.labelling)
btn3.setFont(font)
btn3.setGeometry(QtCore.QRect(1060, 300, 150, 40))
self.pic_x = 0
self.pic_y = 0
def openimage(self):
imgName, imgType = QFileDialog.getOpenFileName(self, "Open Image", "", "*.jpg;;*.png;;All Files(*)")
jpg = QtGui.QPixmap(imgName).scaled(self.label.width(), self.label.height())
self.label.points_list = []
self.label.setPixmap(jpg)
def screenshot(self):
self.x = self.label.x() + self.geometry().x()
self.y = self.label.y() + self.geometry().y()
a = WScreenShot(self.x, self.y)
a.run(self.x, self.y)
def getPoints(self, lt):
points = []
temp = 0
print(len(lt))
for i in range(len(lt)):
if i % 2:
points.append(QPoint(temp, lt[i]))
else:
temp = lt[i]
return points
def labelling(self):
global cur_ins_id
global record
global select
cur_ins_id += 1
select = self.combo.currentText()
ret = deep_rnn_annotate()
for i in range(0, len(ret), 2):
ret[i] = ret[i] + record[0]
ret[i+1] = ret[i+1] + record[1]
points = self.getPoints(ret)
self.label.addPolygon(points)
self.repaint()
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
my = MyWindow()
my.show()
sys.exit(app.exec_())
| StarcoderdataPython |
4839502 | <gh_stars>0
#Reduce deixou de ser uma função integrada entao temos que chamar importando functools
#recebe 2 parametros, Função e iteravel
#So utilize reduce se for necessariamente precisa dela. Seria melhor utilizar um loop FOR
#Para entender o reduce:
#imagine uma coleção de dados:
# Dados = a1,a2,a3,a4,a5...an
#e voce tem uma função que recebe dois parametros:
#def function (x,y):
# return x*y
#A função reduce funciona da seguinte forma
#PASSO 1 -> res1= f(a1,a2) #aplica a função nos 2 primeiros elementos e guarda o resultado
#PASSO 2 -> res2= f(res1,a3) #aplica a função com o resultado anterior e o prox elemento
#PASSO 3 -> res3= f(res2,a4) #ele ira fazer o resultado do passo anterior com o elemento do proximo passo
#PASSO N -> resM= f(resn,an)
#Outra representação seria:
#funcao(funcao(funcao(funcao(funcao(a1,a2),a3),a4),a5),a6)
from functools import reduce
dados=[2,4,3,8,5,4,7,9,6,1]
#usando REDUCE
aux = lambda x,y: x*y
prod=reduce(aux,dados)
print(prod)
#Usando for
prod1=1
for n in dados:
prod1=prod1*n
print(prod1)
| StarcoderdataPython |
21282 | <filename>tests/test_engine.py
import re
import pytest
from hiku import query as q
from hiku.graph import Graph, Node, Field, Link, Option, Root
from hiku.types import Record, Sequence, Integer, Optional, TypeRef
from hiku.utils import listify
from hiku.engine import Engine, pass_context, Context
from hiku.builder import build, Q
from hiku.executors.sync import SyncExecutor
from .base import check_result, ANY, Mock
@listify
def id_field(fields, ids):
for i in ids:
yield [i for _ in fields]
OPTION_BEHAVIOUR = [
(Option('op', None), {'op': 1812}, {'op': 1812}),
(Option('op', None, default=None), {}, {'op': None}),
(Option('op', None, default=None), {'op': 2340}, {'op': 2340}),
(Option('op', None, default=3914), {}, {'op': 3914}),
(Option('op', None, default=4254), {'op': None}, {'op': None}),
(Option('op', None, default=1527), {'op': 8361}, {'op': 8361}),
]
def execute(graph, query_, ctx=None):
engine = Engine(SyncExecutor())
return engine.execute(graph, query_, ctx=ctx)
def test_root_fields():
f1 = Mock(return_value=['boiardo'])
f2 = Mock(return_value=['isolde'])
graph = Graph([
Root([
Field('a', None, f1),
Field('b', None, f2),
]),
])
result = execute(graph, build([Q.a, Q.b]))
check_result(result, {'a': 'boiardo', 'b': 'isolde'})
f1.assert_called_once_with([q.Field('a')])
f2.assert_called_once_with([q.Field('b')])
def test_node_fields():
f1 = Mock(return_value=[1])
f2 = Mock(return_value=[['harkis']])
f3 = Mock(return_value=[['slits']])
graph = Graph([
Node('a', [
Field('b', None, f2),
Field('c', None, f3),
]),
Root([
Link('d', Sequence[TypeRef['a']], f1, requires=None),
]),
])
result = execute(graph, build([Q.d[Q.b, Q.c]]))
check_result(result, {'d': [{'b': 'harkis', 'c': 'slits'}]})
f1.assert_called_once_with()
f2.assert_called_once_with([q.Field('b')], [1])
f3.assert_called_once_with([q.Field('c')], [1])
def test_node_complex_fields():
f1 = Mock(return_value=[1])
f2 = Mock(return_value=[[{'f': 'marshes'}]])
f3 = Mock(return_value=[[{'g': 'colline'}]])
f4 = Mock(return_value=[[[{'h': 'magi'}]]])
graph = Graph([
Node('a', [
Field('b', Optional[Record[{'f': Integer}]], f2),
Field('c', Record[{'g': Integer}], f3),
Field('d', Sequence[Record[{'h': Integer}]], f4),
]),
Root([
Link('e', Sequence[TypeRef['a']], f1, requires=None),
]),
])
check_result(
execute(graph, build([Q.e[Q.b[Q.f], Q.c[Q.g], Q.d[Q.h]]])),
{'e': [{'b': {'f': 'marshes'},
'c': {'g': 'colline'},
'd': [{'h': 'magi'}]}]},
)
f1.assert_called_once_with()
f2.assert_called_once_with(
[q.Link('b', q.Node([q.Field('f')]))], [1],
)
f3.assert_called_once_with(
[q.Link('c', q.Node([q.Field('g')]))], [1],
)
f4.assert_called_once_with(
[q.Link('d', q.Node([q.Field('h')]))], [1],
)
def test_links():
fb = Mock(return_value=[1])
fc = Mock(return_value=[2])
fi = Mock(return_value=[3])
fd = Mock(return_value=[['boners']])
fe = Mock(return_value=[['julio']])
graph = Graph([
Node('a', [
Field('d', None, fd),
Field('e', None, fe),
]),
Root([
Field('i', None, fi),
Link('b', Sequence[TypeRef['a']], fb, requires=None),
Link('c', Sequence[TypeRef['a']], fc, requires='i'),
]),
])
result = execute(graph, build([Q.b[Q.d], Q.c[Q.e]]))
check_result(result, {'b': [{'d': 'boners'}],
'c': [{'e': 'julio'}]})
fi.assert_called_once_with([q.Field('i')])
fb.assert_called_once_with()
fc.assert_called_once_with(3)
fd.assert_called_once_with([q.Field('d')], [1])
fe.assert_called_once_with([q.Field('e')], [2])
@pytest.mark.parametrize('option, args, result', OPTION_BEHAVIOUR)
def test_field_option_valid(option, args, result):
f = Mock(return_value=['baking'])
graph = Graph([
Root([
Field('auslese', None, f, options=[option]),
]),
])
check_result(execute(graph, build([Q.auslese(**args)])),
{'auslese': 'baking'})
f.assert_called_once_with([q.Field('auslese', options=result)])
def test_field_option_unknown():
test_field_option_valid(
Option('inked', None), {'inked': 2340, 'unknown': 8775}, {'inked': 2340}
)
def test_field_option_missing():
graph = Graph([
Root([
Field('poofy', None, Mock(), options=[Option('mohism', None)]),
]),
])
with pytest.raises(TypeError) as err:
execute(graph, build([Q.poofy]))
err.match(r'^Required option "mohism" for Field\(\'poofy\', '
r'(.*) was not provided$')
@pytest.mark.parametrize('option, args, result', OPTION_BEHAVIOUR)
def test_link_option_valid(option, args, result):
f1 = Mock(return_value=[1])
f2 = Mock(return_value=[['aunder']])
graph = Graph([
Node('a', [
Field('c', None, f2),
]),
Root([
Link('b', Sequence[TypeRef['a']], f1, requires=None,
options=[option]),
]),
])
check_result(execute(graph, build([Q.b(**args)[Q.c]])),
{'b': [{'c': 'aunder'}]})
f1.assert_called_once_with(result)
f2.assert_called_once_with([q.Field('c')], [1])
def test_link_option_unknown():
test_link_option_valid(
Option('oleic', None), {'oleic': 2340, 'unknown': 8775}, {'oleic': 2340}
)
def test_link_option_missing():
graph = Graph([
Node('slices', [
Field('papeete', None, Mock()),
]),
Root([
Link('eclairs', Sequence[TypeRef['slices']], Mock(), requires=None,
options=[Option('nocks', None)]),
]),
])
with pytest.raises(TypeError) as err:
execute(graph, build([Q.eclairs[Q.papeete]]))
err.match(r'^Required option "nocks" for Link\(\'eclairs\', '
r'(.*) was not provided$')
def test_pass_context_field():
f = pass_context(Mock(return_value=['boiardo']))
graph = Graph([
Root([
Field('a', None, f),
]),
])
check_result(execute(graph, build([Q.a]), {'vetch': 'shadier'}),
{'a': 'boiardo'})
f.assert_called_once_with(ANY, [q.Field('a')])
ctx = f.call_args[0][0]
assert isinstance(ctx, Context)
assert ctx['vetch'] == 'shadier'
with pytest.raises(KeyError) as err:
_ = ctx['invalid'] # noqa
err.match('is not specified in the query context')
def test_pass_context_link():
f1 = pass_context(Mock(return_value=[1]))
f2 = Mock(return_value=[['boners']])
graph = Graph([
Node('a', [
Field('b', None, f2),
]),
Root([
Link('c', Sequence[TypeRef['a']], f1, requires=None),
]),
])
result = execute(graph, build([Q.c[Q.b]]), {'fibs': 'dossil'})
check_result(result, {'c': [{'b': 'boners'}]})
f1.assert_called_once_with(ANY)
f2.assert_called_once_with([q.Field('b')], [1])
ctx = f1.call_args[0][0]
assert isinstance(ctx, Context)
assert ctx['fibs'] == 'dossil'
with pytest.raises(KeyError) as err:
_ = ctx['invalid'] # noqa
err.match('is not specified in the query context')
def test_node_link_without_requirements():
f1 = Mock(return_value=[1])
f2 = Mock(return_value=[2])
f3 = Mock(return_value=[['arnhild']])
graph = Graph([
Node('a', [
Field('c', None, f3),
]),
Node('b', [
Link('d', Sequence[TypeRef['a']], f2, requires=None),
]),
Root([
Link('e', Sequence[TypeRef['b']], f1, requires=None),
]),
])
result = execute(graph, build([Q.e[Q.d[Q.c]]]))
check_result(result, {'e': [{'d': [{'c': 'arnhild'}]}]})
f1.assert_called_once_with()
f2.assert_called_once_with()
f3.assert_called_once_with([q.Field('c')], [2])
@pytest.mark.parametrize('value', [1, [], [1, 2]])
def test_root_field_func_result_validation(value):
with pytest.raises(TypeError) as err:
execute(
Graph([
Root([
Field('a', None, Mock(return_value=value)),
]),
]),
build([Q.a]),
)
err.match(re.escape(
"Can't store field values, node: '__root__', fields: ['a'], "
"expected: list (len: 1), returned: {value!r}"
.format(value=value)
))
@pytest.mark.parametrize('value', [1, [], [1, 2], [[], []], [[1], []],
[[], [2]]])
def test_node_field_func_result_validation(value):
with pytest.raises(TypeError) as err:
execute(
Graph([
Node('a', [
Field('b', None, Mock(return_value=value)),
]),
Root([
Link('c', Sequence[TypeRef['a']], Mock(return_value=[1, 2]),
requires=None),
]),
]),
build([Q.c[Q.b]]),
)
err.match(re.escape(
"Can't store field values, node: 'a', fields: ['b'], "
"expected: list (len: 2) of lists (len: 1), returned: {value!r}"
.format(value=value)
))
def test_root_link_many_func_result_validation():
with pytest.raises(TypeError) as err:
execute(
Graph([
Node('a', [
Field('b', None, Mock(return_value=[[3], [4]])),
]),
Root([
Link('c', Sequence[TypeRef['a']], Mock(return_value=123),
requires=None),
]),
]),
build([Q.c[Q.b]]),
)
err.match(re.escape(
"Can't store link values, node: '__root__', link: 'c', "
"expected: list, returned: 123"
))
@pytest.mark.parametrize('value', [1, [], [1, 2, 3]])
def test_node_link_one_func_result_validation(value):
with pytest.raises(TypeError) as err:
execute(
Graph([
Node('a', [
Field('b', None, Mock(return_value=[[1], [2]]))
]),
Node('c', [
Field('d', None, Mock(return_value=[[3], [4]])),
Link('e', TypeRef['a'], Mock(return_value=value),
requires='d'),
]),
Root([
Link('f', Sequence[TypeRef['c']], Mock(return_value=[1, 2]),
requires=None),
]),
]),
build([Q.f[Q.e[Q.b]]]),
)
err.match(re.escape(
"Can't store link values, node: 'c', link: 'e', expected: "
"list (len: 2), returned: {!r}".format(value)
))
@pytest.mark.parametrize('value', [1, [], [1, []], [[], 2], [[], [], []]])
def test_node_link_many_func_result_validation(value):
with pytest.raises(TypeError) as err:
execute(
Graph([
Node('a', [
Field('b', None, Mock(return_value=[[1], [2]]))
]),
Node('c', [
Field('d', None, Mock(return_value=[[3], [4]])),
Link('e', Sequence[TypeRef['a']], Mock(return_value=value),
requires='d'),
]),
Root([
Link('f', Sequence[TypeRef['c']], Mock(return_value=[1, 2]),
requires=None),
]),
]),
build([Q.f[Q.e[Q.b]]]),
)
err.match(re.escape(
"Can't store link values, node: 'c', link: 'e', expected: "
"list (len: 2) of lists, returned: {!r}".format(value)
))
def test_root_field_alias():
data = {'a': 42}
def root_fields(fields):
return [data[f.name] for f in fields]
graph = Graph([
Root([
Field('a', None, root_fields),
]),
])
result = execute(graph, q.Node([
q.Field('a', alias='a1'),
q.Field('a', alias='a2'),
]))
check_result(result, {'a1': 42, 'a2': 42})
def test_node_field_alias():
data = {'x1': {'a': 42}}
@listify
def x_fields(fields, ids):
for i in ids:
yield [data[i][f.name] for f in fields]
graph = Graph([
Node('X', [
Field('a', None, x_fields),
]),
Root([
Link('x', TypeRef['X'], lambda: 'x1', requires=None),
]),
])
result = execute(graph, q.Node([
q.Link('x', q.Node([
q.Field('a', alias='a1'),
q.Field('a', alias='a2'),
])),
]))
check_result(result, {'x': {'a1': 42, 'a2': 42}})
def test_root_link_alias():
data = {
'xN': {'a': 1, 'b': 2},
}
@listify
def x_fields(fields, ids):
for i in ids:
yield [data[i][f.name] for f in fields]
graph = Graph([
Node('X', [
Field('a', None, x_fields),
Field('b', None, x_fields),
]),
Root([
Link('x', TypeRef['X'], lambda: 'xN', requires=None),
]),
])
result = execute(graph, q.Node([
q.Link('x', q.Node([q.Field('a')]), alias='x1'),
q.Link('x', q.Node([q.Field('b')]), alias='x2'),
]))
check_result(result, {
'x1': {'a': 1},
'x2': {'b': 2},
})
def test_node_link_alias():
data = {
'yN': {'a': 1, 'b': 2},
}
x2y = {'xN': 'yN'}
@listify
def y_fields(fields, ids):
for i in ids:
yield [data[i][f.name] for f in fields]
graph = Graph([
Node('Y', [
Field('a', None, y_fields),
Field('b', None, y_fields),
]),
Node('X', [
Field('id', None, id_field),
Link('y', TypeRef['Y'],
lambda ids: [x2y[i] for i in ids],
requires='id'),
]),
Root([
Link('x', TypeRef['X'], lambda: 'xN', requires=None),
]),
])
result = execute(graph, q.Node([
q.Link('x', q.Node([
q.Link('y', q.Node([q.Field('a')]), alias='y1'),
q.Link('y', q.Node([q.Field('b')]), alias='y2'),
])),
]))
check_result(result, {
'x': {
'y1': {'a': 1},
'y2': {'b': 2},
}
})
def test_conflicting_fields():
x_data = {'xN': {'a': 42}}
@listify
def x_fields(fields, ids):
for i in ids:
yield ['{}-{}'.format(x_data[i][f.name], f.options['k'])
for f in fields]
graph = Graph([
Node('X', [
Field('a', None, x_fields, options=[Option('k', Integer)]),
]),
Root([
Link('x1', TypeRef['X'], lambda: 'xN', requires=None),
Link('x2', TypeRef['X'], lambda: 'xN', requires=None),
]),
])
result = execute(graph, q.Node([
q.Link('x1', q.Node([q.Field('a', options={'k': 1})])),
q.Link('x2', q.Node([q.Field('a', options={'k': 2})])),
]))
check_result(result, {
'x1': {'a': '42-1'},
'x2': {'a': '42-2'},
})
def test_conflicting_links():
data = {
'yA': {'a': 1, 'b': 2},
'yB': {'a': 3, 'b': 4},
'yC': {'a': 5, 'b': 6},
}
x2y = {'xN': ['yA', 'yB', 'yC']}
@listify
def y_fields(fields, ids):
for i in ids:
yield [data[i][f.name] for f in fields]
@listify
def x_to_y_link(ids, options):
for i in ids:
yield [y for y in x2y[i] if y not in options['exclude']]
graph = Graph([
Node('Y', [
Field('a', None, y_fields),
Field('b', None, y_fields),
]),
Node('X', [
Field('id', None, id_field),
Link('y', Sequence[TypeRef['Y']], x_to_y_link, requires='id',
options=[Option('exclude', None)]),
]),
Root([
Link('x1', TypeRef['X'], lambda: 'xN', requires=None),
Link('x2', TypeRef['X'], lambda: 'xN', requires=None),
]),
])
result = execute(graph, q.Node([
q.Link('x1', q.Node([
q.Link('y', q.Node([q.Field('a')]),
options={'exclude': ['yA']}),
])),
q.Link('x2', q.Node([
q.Link('y', q.Node([q.Field('b')]),
options={'exclude': ['yC']}),
])),
]))
check_result(result, {
'x1': {'y': [{'a': 3}, {'a': 5}]},
'x2': {'y': [{'b': 2}, {'b': 4}]},
})
def test_process_ordered_node():
ordering = []
def f1(fields):
names = tuple(f.name for f in fields)
ordering.append(names)
return names
def f2(fields):
return f1(fields)
def f3():
ordering.append('x1')
return 'x1'
@listify
def f4(fields, ids):
for i in ids:
yield ['{}-e'.format(i) for _ in fields]
graph = Graph([
Node('X', [
Field('e', None, f4),
]),
Root([
Field('a', None, f1),
Field('b', None, f1),
Field('c', None, f2),
Field('d', None, f2),
Link('x', TypeRef['X'], f3, requires=None),
]),
])
query = q.Node([
q.Field('d'),
q.Field('b'),
q.Field('a'),
q.Link('x', q.Node([
q.Field('e'),
])),
q.Field('c'),
], ordered=True)
engine = Engine(SyncExecutor())
result = engine.execute(graph, query)
check_result(result, {
'a': 'a',
'b': 'b',
'c': 'c',
'd': 'd',
'x': {
'e': 'x1-e',
},
})
assert ordering == [('d',), ('b', 'a'), 'x1', ('c',)]
| StarcoderdataPython |
3307166 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
from azure.cli.core.commands import CliCommandType
from ._client_factory import (cf_maintenance_configurations, cf_maintenance_updates, cf_configuration_assignments, cf_apply_updates)
def load_command_table(self, _):
maintenance_configurations_mgmt_util = CliCommandType(
operations_tmpl='azext_maintenance.vendored_sdks.operations.maintenance_configurations_operations#MaintenanceConfigurationsOperations.{}',
client_factory=cf_maintenance_configurations
)
maintenance_updates_mgmt_util = CliCommandType(
operations_tmpl='azext_maintenance.vendored_sdks.operations.updates_operations#UpdatesOperations.{}',
client_factory=cf_maintenance_updates
)
configuration_assignments_mgmt_util = CliCommandType(
operations_tmpl='azext_maintenance.vendored_sdks.operations.configuration_assignments_operations#ConfigurationAssignmentsOperations.{}',
client_factory=cf_configuration_assignments
)
apply_updates_mgmt_util = CliCommandType(
operations_tmpl='azext_maintenance.vendored_sdks.operations.apply_updates_operations#ApplyUpdatesOperations.{}',
client_factory=cf_apply_updates
)
with self.command_group('maintenance configuration', maintenance_configurations_mgmt_util, client_factory=cf_maintenance_configurations) as g:
g.custom_command('create', 'cli_configuration_create')
g.command('delete', 'delete')
g.custom_command('update', 'cli_configuration_create')
g.show_command('show', 'get')
g.command('list', 'list')
with self.command_group('maintenance update', maintenance_updates_mgmt_util, client_factory=cf_maintenance_updates) as g:
g.custom_command('list', 'cli_update_list')
with self.command_group('maintenance assignment', configuration_assignments_mgmt_util, client_factory=cf_configuration_assignments) as g:
g.custom_command('create', 'cli_assignment_create')
g.custom_command('delete', 'cli_assignment_delete')
g.custom_command('list', 'cli_assignment_list')
with self.command_group('maintenance applyupdate', apply_updates_mgmt_util, client_factory=cf_apply_updates) as g:
g.custom_command('create', 'cli_applyupdate_create')
g.custom_command('get', 'cli_applyupdate_get', deprecate_info=g.deprecate(redirect='maintenance applyupdate show'))
g.custom_command('show', 'cli_applyupdate_get')
| StarcoderdataPython |
174505 | # BLOGSTER by <NAME>
# a.k.a. "The Black Unicorn" a.k.a. "<NAME>".
# Licensed under the MIT license.
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'blogster.settings')
application = get_wsgi_application()
| StarcoderdataPython |
3278646 | <gh_stars>1-10
import requests
from pprint import pprint
import json
# input your information
user = {'userid': '', 'password': ''}
r = requests.post("http://127.0.0.1:8000/post/get_tasks",
params=user) # POST user data
print(json.dumps(r.json(), ensure_ascii=False))
| StarcoderdataPython |
3325382 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import random
sys.path.append('.')
from twisted.internet import reactor
from . import driver
from . import multiplexer
from . import record_layer
from . import updater
from . import conf
EVENT_LOOP_FREQUENCY_S = 0.01
AUTOUPDATE_DELAY = 5
class Server(object):
factory = None
def __init__(self, format_name):
self.multiplexer_outgoing_ = multiplexer.BufferOutgoing()
self.multiplexer_incoming_ = multiplexer.BufferIncoming()
self.multiplexer_incoming_.addCallback(self.process_cell)
self.factory_instances = {}
if self.check_for_update():
self.do_update()
self.set_driver(format_name)
self.reload_ = False
def set_driver(self, format_name):
self.format_name_ = format_name
self.driver_ = driver.ServerDriver("server")
self.driver_.set_multiplexer_incoming(self.multiplexer_incoming_)
self.driver_.set_multiplexer_outgoing(self.multiplexer_outgoing_)
self.driver_.setFormat(self.format_name_)
def execute(self, reactor):
if not self.driver_.isRunning():
if self.reload_:
self.set_driver(self.format_name_)
self.reload_ = False
self.driver_.execute(reactor)
reactor.callLater(EVENT_LOOP_FREQUENCY_S, self.execute, reactor)
def process_cell(self, cell_obj):
cell_type = cell_obj.get_cell_type()
stream_id = cell_obj.get_stream_id()
if cell_type == record_layer.END_OF_STREAM:
self.factory_instances[stream_id].connectionLost()
del self.factory_instances[stream_id]
elif cell_type == record_layer.NORMAL:
if not self.factory_instances.get(stream_id):
stream = multiplexer.MarionetteStream(
self.multiplexer_incoming_, self.multiplexer_outgoing_,
stream_id)
self.factory_instances[stream_id] = self.factory()
self.factory_instances[stream_id].connectionMade(stream)
payload = cell_obj.get_payload()
if payload:
self.factory_instances[stream_id].dataReceived(payload)
# call this function if you want reload formats from disk
# at the next possible time
def reload_driver(self):
self.reload_ = True
def check_for_update(self):
# uncomment the following line to check for updates every N seconds
# instead of just on startup
# reactor.callLater(N, self.check_for_update, reactor)
if conf.get("general.autoupdate"):
self.do_update(self.reload_driver)
def do_update(self, callback):
# could be replaced with code that updates from a different
# source (e.g., local computations)
update_server = conf.get("general.update_server")
updater = updater.FormatUpdater(update_server, use_marionette=False, callback=callback)
return updater.do_update()
| StarcoderdataPython |
133031 | <filename>penin/core/mdns/__init__.py
"""Support for discovering local devices and services."""
from netdisco.discovery import NetworkDiscovery
def discover_devices():
"""Discover local devices and services."""
data = {}
netdis = NetworkDiscovery()
netdis.scan()
for device_type in netdis.discover():
data[device_type] = netdis.get_info(device_type)
return data
netdis.stop()
| StarcoderdataPython |
3340147 | '''import pygame
pygame.init()
pygame.mixer.music.load('musica.mp3')
pygame.mixer.music.play()
pygame.event.wait()'''
from pygame import mixer
mixer.init()
mixer.music.load('musica.mp3')
mixer.music.play()
import time
time.sleep(360)
| StarcoderdataPython |
3368079 | <gh_stars>0
import os
from joblib import Parallel, delayed
from os.path import join
from pathlib import Path
import random
import shutil
import scraper
import remove_applause
import re
import subprocess
import num2words
import pydub
from pydub import AudioSegment
from tqdm import tqdm
import logging
logger = logging.getLogger()
logging.basicConfig(level="INFO", format="%(levelname)s: %(filename)s: %(message)s")
random.seed(42)
AUDIO_EXTENSION = 'mp3'
os.chdir(os.path.dirname(__file__))
corpus_path = '../data/TedSrt'
txt_norm = 'tedsrt-norm.txt'
txt_vocab = 'tedsrt-vocab.txt'
processed_data_path = join(corpus_path, 'processed_data') # store all data in this folder before splitting
txt_norm_path = join(corpus_path, txt_norm)
txt_vocab_path = join(corpus_path, txt_vocab)
src_path = 'data/' # raw data source
ratio=[.6, .2, .2] # train test split
# filter audio and srt
duration_diff = 7 # seconds
repeated_occurrence = 50
def clean_text(text):
'''
Text processing to clean text before saving as label
to lowercase, convert years to words, convert digits to words, remove symbols
'''
text = text.lower().strip('\n')
text = re.sub(r'[^\w\s]', ' ', text)
text = ' '.join([num2words.num2words(i, to='year') if (i.isdigit() & (len(i) == 4)) else i for i in text.split()]) # year to words
text = ' '.join([num2words.num2words(i) if i.isdigit() else i for i in text.split()]) # num to words
text = re.sub(' +', ' ', text) # remove redundant spaces
text = text.replace('-', ' ')
return text
def normalize_text(text):
'''
Text processing to normalize text for language model training
should transform word numeral to numeric, normalize date formats, lemmatization; most not implemented currently
'''
text = text.lower().strip('\n')
text = re.sub(r'[^\w\s]', ' ', text)
text = re.sub(' +', ' ', text) # remove redundant spaces
text = text.replace('-', ' ')
return text
def to_ms(string):
'''
Convert string '00:00:00,000' to milliseconds
to be used for audio slicing
'''
string = string.replace(',','')
hour, minute, second = string.split(':')
second = int(second)
second += int(hour) * 3600 * 1000
second += int(minute) * 60 * 1000
second = second
return second
def txt_to_trans(txt_file, file_name):
'''
Convert txt file to transcript format ready to be read into Dataset
lines formatted as 'filename-idx text_label'
return lines and time_slices
'''
file = open(txt_file, 'r')
lines = file.readlines()
file.close()
transcript = [] # label for audio
txt_src = [] # label for language model
time_slices = []
for i in range(len(lines)):
idx = re.search('^[\d]+$', lines[i].strip('\ufeff'))
if idx:
idx = idx[0]
time_frame = re.findall('[0-9]{2}:[0-9]{2}:[0-9]{2},[0-9]{3}', lines[i+1])
if time_frame:
start, end = to_ms(time_frame[0]), to_ms(time_frame[1])
time_slices.append((idx, (start, end)))
audio_label = lines[i+2]
audio_label = clean_text(audio_label)
new_line = f"{file_name}-{idx} {audio_label}"
transcript.append(new_line)
lm_label = normalize_text(audio_label)
txt_src.append(lm_label)
return transcript, time_slices, txt_src
def save_txt(txt, output_path):
'''
Save transcript to output_path
'''
if not os.path.exists(os.path.dirname(output_path)):
os.makedirs(os.path.dirname(output_path))
with open(output_path, 'a+') as f:
for line in txt:
f.write(f"{line}\n")
f.close()
def check_ms_accuracy(time_slices):
'''
Check whether srt is accurate, remove those not accurate to milliseconds, check repeated occurrence of 820
'''
for time_slice in time_slices:
occurrence_820 = sum(1 for elem in time_slices if elem[1][0] % 1000 == 820)
if occurrence_820 > repeated_occurrence:
return occurrence_820
return 0
def check_intro_timing(audio_duration, audio_srt_duration):
'''
Check whether srt matches audio, remove those without taking water drop intro into account
'''
duration_diff = audio_duration - audio_srt_duration
if duration_diff > duration_diff:
return duration_diff
return 0
def build_dataset(src_path=src_path, sample=100):
'''
Build dataset from raw data scraper/data/
split audio into slices
convert srt to trans.txt ready for training
'''
# logger.disabled = True
shutil.rmtree(corpus_path, ignore_errors=True)
folders = os.listdir(src_path)[:100]
for idx, curr_folder in enumerate(tqdm(folders, desc="Building dataset from scraped data")):
# logging.info('\n')
file_name = str(idx) #save the transcript as num, can be changed to folder name
output_path = join(processed_data_path, file_name)
txt_output_path = join(output_path, file_name + '.trans.txt')
# logging.info(f"{idx}. Creating transcript for {curr_folder}...")
try:
txt_path = list(Path(join(src_path, curr_folder)).rglob('*.txt'))[0]
audio_path = list(Path(join(src_path, curr_folder)).rglob('*.' + AUDIO_EXTENSION))[0]
except:
tqdm.write(f'Data not complete {idx} {curr_folder}')
continue
transcript, time_slices, txt_src = txt_to_trans(txt_path, file_name)
# logging.info(f"{idx}. Slicing audio for {curr_folder}...")
audio_file = AudioSegment.from_file(audio_path, AUDIO_EXTENSION)
# check timing accuracy by milliseconds
ms_not_accurate = check_ms_accuracy(time_slices)
if ms_not_accurate:
# logging.warning(f"{idx}. Srt not accurate with {ms_not_accurate} 820s. Deleting entry {curr_folder}")
shutil.rmtree(output_path, ignore_errors=True)
continue
# check timing of audio intro
audio_duration = audio_file.duration_seconds
audio_srt_duration = time_slices[-1][-1][-1] / 1000
intro_not_matched = check_intro_timing(audio_duration, audio_srt_duration)
if intro_not_matched:
# logging.warning(f"{idx}. Srt not matching with time slices. Deleting entry {curr_folder}")
shutil.rmtree(output_path, ignore_errors=True)
continue
# writing output
save_txt(transcript, txt_output_path)
save_txt(txt_src, txt_norm_path)
for idx, time_slice in time_slices:
audio_slice = audio_file[time_slice[0]:time_slice[1]]
audio_output_path = join(output_path, f"{file_name}-{idx}.{AUDIO_EXTENSION}")
audio_slice.export(audio_output_path, format=AUDIO_EXTENSION)
tqdm.write(f'Successfully created {idx} {curr_folder}')
# print(f'Successfully created {idx} {curr_folder}')
def move_folders(folders, source, destination):
'''
Move a list of folders from source to destination
'''
logger.info(f'{destination.split("/")[-1]}: {folders}')
for folder in folders:
shutil.move(join(source, folder), join(destination, folder))
def split_dataset(ratio=[.6, .2, .2]):
'''
Split dataset to train dev test according to ratio
'''
folders = os.listdir(processed_data_path)
folders.sort()
random.shuffle(folders)
split_1 = int(ratio[0] * len(folders))
split_2 = int((ratio[0] + ratio[1]) * len(folders))
move_folders(folders[:split_1], processed_data_path, join(corpus_path, 'train'))
move_folders(folders[split_1:split_2], processed_data_path, join(corpus_path, 'dev'))
move_folders(folders[split_2:], processed_data_path, join(corpus_path, 'test'))
shutil.rmtree(processed_data_path, ignore_errors=True)
def main():
print('Scraping data...')
scraper.main(number_of_talks=500, starting_video_id=2000) # 1000-1500
print('\nStart preprocessing...')
build_dataset(src_path, sample=100)
split_dataset(ratio=ratio)
print("\nRemoving laughter and applause audio files...")
remove_applause.main(corpus_path)
if __name__ == "__main__":
main() | StarcoderdataPython |
1746480 | <reponame>prdonahue/overholt
# -*- coding: utf-8 -*-
"""
overholt.api.users
~~~~~~~~~~~~~~~~~~
User endpoints
"""
from flask import Blueprint
from flask_login import current_user
from ..services import users
from . import route
bp = Blueprint('users', __name__, url_prefix='/users')
@route(bp, '/')
def whoami():
"""Returns the user instance of the currently authenticated user."""
return current_user._get_current_object()
@route(bp, '/<user_id>')
def show(user_id):
"""Returns a user instance."""
return users.get_or_404(user_id)
| StarcoderdataPython |
190855 | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='bifs',
version='0.2.0',
description='Implementation of Bayesian Imaging in Fourier Space (BIFS)',
long_description=readme,
author='<NAME>, <NAME>, <NAME>',
author_email='<EMAIL>,<EMAIL>',
url='https://github.com/bifs',
license=license,
packages=['bifs'],
install_requires=['imageio',
'jsonpickle',
'matplotlib',
'nibabel',
'numpy',
'PyQt5',
'scipy']
)
| StarcoderdataPython |
3349334 | <gh_stars>0
"""
TMVA reader runs with additional information
"""
from __future__ import division, print_function, absolute_import
import sys
import array
import pandas
from root_numpy.tmva import evaluate_reader
from . import tmva
import six
from six.moves import cPickle as pickle
__author__ = '<NAME>'
def tmva_process(info, data):
"""
Create TMVA classification factory, train, test and evaluate all methods
:param rep.estimators.tmva._AdditionalInformationPredict info: additional information
:param pandas.DataFrame data: test data
"""
import ROOT
reader = ROOT.TMVA.Reader()
for feature in data.columns:
reader.AddVariable(feature, array.array('f', [0.]))
model_type, sigmoid_function = info.model_type
reader.BookMVA(info.method_name, info.xml_file)
signal_efficiency = None
if model_type == 'classification' and sigmoid_function is not None and 'sig_eff' in sigmoid_function:
signal_efficiency = float(sigmoid_function.strip().split('=')[1])
assert 0.0 <= signal_efficiency <= 1., 'signal efficiency must be in [0, 1], not {}'.format(
signal_efficiency)
if signal_efficiency is not None:
predictions = evaluate_reader(reader, info.method_name, data, aux=signal_efficiency)
else:
predictions = evaluate_reader(reader, info.method_name, data)
return predictions
def main():
# Python 2 dumps in text mode. Python 3 in binary.
if six.PY2:
stdin = sys.stdin
else:
stdin = sys.stdin.buffer
# Reading the configuration from stdin
info = pickle.load(stdin)
data = pickle.load(stdin)
assert isinstance(info, tmva._AdditionalInformationPredict)
assert isinstance(data, pandas.DataFrame)
predictions = tmva_process(info, data)
with open(info.result_filename, 'wb') as predictions_file:
pickle.dump(predictions, predictions_file)
| StarcoderdataPython |
69871 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
from math import ceil
from typing import Optional, Tuple, Type, cast
from ax.core.parameter import ChoiceParameter, ParameterType, RangeParameter
from ax.core.search_space import SearchSpace
from ax.modelbridge.generation_strategy import GenerationStep, GenerationStrategy
from ax.modelbridge.registry import Cont_X_trans, Models, Y_trans
from ax.modelbridge.transforms.base import Transform
from ax.modelbridge.transforms.winsorize import Winsorize
from ax.utils.common.logger import get_logger
logger: logging.Logger = get_logger(__name__)
def _make_sobol_step(
num_arms: int = -1,
min_arms_observed: Optional[int] = None,
enforce_num_arms: bool = True,
recommended_max_parallelism: Optional[int] = None,
seed: Optional[int] = None,
) -> GenerationStep:
"""Shortcut for creating a Sobol generation step."""
return GenerationStep(
model=Models.SOBOL,
num_arms=num_arms,
# NOTE: ceil(-1 / 2) = 0, so this is safe to do when num arms is -1.
min_arms_observed=min_arms_observed or ceil(num_arms / 2),
enforce_num_arms=enforce_num_arms,
recommended_max_parallelism=recommended_max_parallelism,
model_kwargs={"deduplicate": True, "seed": seed},
)
def _make_botorch_step(
num_arms: int = -1,
min_arms_observed: Optional[int] = None,
enforce_num_arms: bool = True,
recommended_max_parallelism: Optional[int] = None,
winsorize: bool = False,
winsorization_limits: Optional[Tuple[Optional[float], Optional[float]]] = None,
) -> GenerationStep:
"""Shortcut for creating a BayesOpt generation step."""
if (winsorize and winsorization_limits is None) or (
winsorization_limits is not None and not winsorize
):
raise ValueError( # pragma: no cover
"To apply winsorization, specify `winsorize=True` and provide the "
"winsorization limits."
)
model_kwargs = None
if winsorize:
assert winsorization_limits is not None
model_kwargs = {
"transforms": [cast(Type[Transform], Winsorize)] + Cont_X_trans + Y_trans,
"transform_configs": {
"Winsorize": {
"winsorization_lower": winsorization_limits[0],
"winsorization_upper": winsorization_limits[1],
}
},
}
return GenerationStep(
model=Models.GPEI,
num_arms=num_arms,
# NOTE: ceil(-1 / 2) = 0, so this is safe to do when num arms is -1.
min_arms_observed=min_arms_observed or ceil(num_arms / 2),
enforce_num_arms=enforce_num_arms,
recommended_max_parallelism=recommended_max_parallelism,
model_kwargs=model_kwargs,
)
def _should_use_gp(search_space: SearchSpace, num_trials: Optional[int] = None) -> bool:
"""We should use only Sobol and not GPEI if:
1. there are less continuous parameters in the search space than the sum of
options for the choice parameters,
2. the number of total iterations in the optimization is known in advance and
there are less distinct points in the search space than the known intended
number of total iterations.
"""
num_continuous_parameters, num_discrete_choices, num_possible_points = 0, 0, 1
all_range_parameters_are_int = True
for parameter in search_space.parameters.values():
if isinstance(parameter, ChoiceParameter):
num_discrete_choices += len(parameter.values)
num_possible_points *= len(parameter.values)
if isinstance(parameter, RangeParameter):
num_continuous_parameters += 1
if parameter.parameter_type != ParameterType.INT:
all_range_parameters_are_int = False
else:
num_possible_points *= int(parameter.upper - parameter.lower)
if ( # If number of trials is known and it enough to try all possible points,
num_trials is not None # we should use Sobol and not BO.
and all_range_parameters_are_int
and num_possible_points <= num_trials
):
return False
return num_continuous_parameters >= num_discrete_choices
def choose_generation_strategy(
search_space: SearchSpace,
arms_per_trial: int = 1,
enforce_sequential_optimization: bool = True,
random_seed: Optional[int] = None,
winsorize_botorch_model: bool = False,
winsorization_limits: Optional[Tuple[Optional[float], Optional[float]]] = None,
no_bayesian_optimization: bool = False,
num_trials: Optional[int] = None,
) -> GenerationStrategy:
"""Select an appropriate generation strategy based on the properties of
the search space.
Args:
search_space: SearchSpace, based on the properties of which to select the
generation strategy.
arms_per_trial: If a trial is batched, how many arms will be in each batch.
Defaults to 1, which corresponds to a regular, non-batched, `Trial`.
enforce_sequential_optimization: Whether to enforce that the generation
strategy needs to be updated with `min_arms_observed` observations for
a given generation step before proceeding to the next one.
random_seed: Fixed random seed for the Sobol generator.
winsorize_botorch_model: Whether to apply the winsorization transform
prior to applying other transforms for fitting the BoTorch model.
winsorization_limits: Bounds for winsorization, if winsorizing, expressed
as percentile. Usually only the upper winsorization trim is used when
minimizing, and only the lower when maximizing.
no_bayesian_optimization: If True, Bayesian optimization generation
strategy will not be suggested and quasi-random strategy will be used.
num_trials: Total number of trials in the optimization, if
known in advance.
"""
# If there are more discrete choices than continuous parameters, Sobol
# will do better than GP+EI.
if not no_bayesian_optimization and _should_use_gp(
search_space=search_space, num_trials=num_trials
):
# Ensure that number of arms per model is divisible by batch size.
sobol_arms = max(5, len(search_space.parameters))
if arms_per_trial != 1: # pragma: no cover
# If using batches, ensure that initialization sample is divisible by
# the batch size.
sobol_arms = ceil(sobol_arms / arms_per_trial) * arms_per_trial
gs = GenerationStrategy(
steps=[
_make_sobol_step(
num_arms=sobol_arms,
enforce_num_arms=enforce_sequential_optimization,
seed=random_seed,
),
_make_botorch_step(
recommended_max_parallelism=3,
winsorize=winsorize_botorch_model,
winsorization_limits=winsorization_limits,
),
]
)
logger.info(
f"Using Bayesian Optimization generation strategy: {gs}. Iterations "
f"after {sobol_arms} will take longer to generate due to model-fitting."
)
return gs
logger.info(f"Using Sobol generation strategy.")
return GenerationStrategy(steps=[_make_sobol_step(seed=random_seed)])
| StarcoderdataPython |
96402 | # Sprite classes for platform game
import pygame
import random
from settings import *
vec = pygame.math.Vector2
class Spritesheet1:
# Utility class for loading and parsing spritesheets
def __init__(self, filename):
self.spritesheet1 = pygame.image.load(filename).convert()
def get_image(self, x, y, width, height):
# Grab an image out of a spritesheet
image = pygame.Surface((width, height))
image.blit(self.spritesheet1, (0, 0), (x, y, width, height))
# We divide width and height by 2 cuz the spritesheet is too big for us
image = pygame.transform.scale(image, (width // 2, height // 2))
return image
def Get_image_res(image, resize_ratio):
width, height = image.get_size()
image = pygame.transform.scale(image, (width // resize_ratio, height // resize_ratio))
return image
class Button(pygame.sprite.Sprite):
def __init__(self, game, x, y):
self.groups = game.menu
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = self.game.spritesheet1.get_image(0, 96, 380, 94)
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.game.screen.blit(self.image, self.rect)
def draw_txt(self, text, size, color):
font = pygame.font.Font('fonts/AmaticSC-Bold.ttf', size)
text_surface = font.render(text, True, color)
text_rect = text_surface.get_rect()
text_rect.center = self.rect.center
self.game.screen.blit(text_surface, text_rect)
def update(self):
pygame.transform.scale(self.image, (380 * 2, 94 * 2))
class Player(pygame.sprite.Sprite):
def __init__(self, game):
self._layer = PLAYER_LAYER
self.groups = game.all_sprites
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
# Move properties
self.walking = False
self.jumping = False
# Pow properties
self.has_bubble = False
self.has_jetpack = False
self.has_wings = False
# Jet properties
self.acceleration = False
self.still = False
self.losing_wings = False
# Anmation properties
self.current_frame = 0
self.last_update = 0
self.load_images()
self.image = self.standing_frames[1]
self.rect = self.image.get_rect()
self.rect.center = (40, HEIGHT - 100)
self.pos = vec(40, HEIGHT - 100)
self.vel = vec(0, 0)
self.acc = vec(0, 0)
self.friction = PLAYER_FRICTION
def load_images(self):
# Standing frames for 2 cases:default and invincible
self.standing_frames = (self.game.spritesheet1.get_image(614, 1063, 120, 191),
self.game.spritesheet1.get_image(690, 406, 120, 201))
self.standing_frames_inv = (Get_image_res(pygame.image.load('graphics/bunny1_inv_stand.png'), 2),
Get_image_res(pygame.image.load('graphics/bunny1_inv_ready.png'), 2))
# Clearing the black square around the frames
for frame in self.standing_frames:
frame.set_colorkey(BLACK)
for frame in self.standing_frames_inv:
pygame.transform.scale(frame, (211 // 2, 215 // 2))
# Walking frames for 2 cases
self.walking_frames_R = (self.game.spritesheet1.get_image(678, 860, 120, 201),
self.game.spritesheet1.get_image(692, 1458, 120, 207))
self.walking_frames_inv_R = (Get_image_res(pygame.image.load('graphics/bunny1_inv_walk1.png'), 2),
Get_image_res(pygame.image.load('graphics/bunny1_inv_walk2.png'), 2))
self.walking_frames_L = []
self.walking_frames_inv_L = []
# Applying the L frames in both cases
for frame in self.walking_frames_R:
frame.set_colorkey(BLACK)
# 1 - horisontal , 2 - vertical
self.walking_frames_L.append(pygame.transform.flip(frame, True, False))
for frame in self.walking_frames_inv_R:
pygame.transform.scale(frame, (211 // 2, 215 // 2))
# 1 - horisontal , 2 - vertical
self.walking_frames_inv_L.append(pygame.transform.flip(frame, True, False))
# Player/jetpack images
self.jet_start_frames = (Get_image_res(pygame.image.load('graphics/player_jet_start1.png'), 2),
Get_image_res(pygame.image.load('graphics/player_jet_start2.png'), 2))
for image in self.jet_start_frames:
image.set_colorkey(BLACK)
self.jet_go_frames = (Get_image_res(pygame.image.load('graphics/player_jet1.png'), 2),
Get_image_res(pygame.image.load('graphics/player_jet2.png'), 2))
for image in self.jet_go_frames:
image.set_colorkey(BLACK)
# Player with wings images
self.has_wings_frames = (Get_image_res(pygame.image.load('graphics/player_fly1.png'), 2),
Get_image_res(pygame.image.load('graphics/player_fly2.png'), 2),
Get_image_res(pygame.image.load('graphics/player_fly3.png'), 2),
Get_image_res(pygame.image.load('graphics/player_fly4.png'), 2),
Get_image_res(pygame.image.load('graphics/player_fly5.png'), 2))
# Jump frames
self.jumping_frame = self.game.spritesheet1.get_image(382, 763, 150, 181)
self.jumping_frame.set_colorkey(BLACK)
self.jumping_frame_inv = Get_image_res(pygame.image.load('graphics/bunny1_inv_jump.png'), 2)
def jump(self):
# Jump only if standing on a platform and without a pow
self.rect.x += 2
hits = pygame.sprite.spritecollide(self, self.game.platforms, False)
self.rect.x -= 2
if hits and not self.jumping and not self.has_jetpack and not self.has_wings and not self.has_bubble:
self.jumping = True
self.vel.y = -PLAYER_JUMP_V
self.game.jump_sound.play()
def jump_cut(self):
# The code that cuts the jump
if self.jumping and not self.has_jetpack and not self.has_wings and not self.has_bubble:
if self.vel.y < -10:
self.vel.y = -10
def update(self):
self.animation()
# Applying gravity
if self.has_bubble or self.has_jetpack or self.has_wings:
self.acc = vec(0, 0)
else:
self.acc = vec(0, PLAYER_GRAV)
# Applying movement
keys = pygame.key.get_pressed()
# With wings
if self.has_wings:
if keys[pygame.K_a]:
self.acc.x = -PLAYER_FLY_ACC
if keys[pygame.K_d]:
self.acc.x = PLAYER_FLY_ACC
# Without wings
else:
if keys[pygame.K_a]:
self.acc.x = -PLAYER_ACC
if keys[pygame.K_d]:
self.acc.x = PLAYER_ACC
# Apply friction
self.acc.x += self.vel.x * self.friction
# Equations of motion
self.vel += self.acc
if abs(self.vel.x) < 0.5: # If the vel < 0.5 we stop
self.vel.x = 0
self.pos += self.vel + 0.5 * self.acc
# Wrap around the sides of the screen
if self.pos.x > WIDTH + self.rect.width / 2:
self.pos.x = 0 - self.rect.width / 2
if self.pos.x < 0 - self.rect.width / 2:
self.pos.x = WIDTH + self.rect.width / 2
# If player has wings we move him down the screen
if self.has_wings:
self.pos.y += 3
if self.pos.y >= SCR_CHANGE_H_FLY + 10:
self.pos.y = SCR_CHANGE_H_FLY + 10
# If player is about to lose wings we move him up the screen so he does not fall immediately after wing loss
if self.losing_wings:
self.pos.y -= 4
if self.pos.y <= SCR_CHANGE_H:
self.pos.y = SCR_CHANGE_H
self.rect.midbottom = self.pos
def animation(self):
time_passed = pygame.time.get_ticks()
# We define when the player is considered to be moving
if self.vel.x != 0:
self.walking = True
else:
self.walking = False
# Show walking animation
if self.walking and not self.has_jetpack and not self.has_wings:
if time_passed - self.last_update > 140:
self.last_update = time_passed
if self.has_bubble:
self.current_frame = (self.current_frame + 1) % len(self.walking_frames_inv_L)
else:
self.current_frame = (self.current_frame + 1) % len(self.walking_frames_L)
rect_bottom = self.rect.bottom
if self.vel.x > 0:
if self.has_bubble:
self.image = self.walking_frames_inv_R[self.current_frame]
else:
self.image = self.walking_frames_R[self.current_frame]
else:
if self.has_bubble:
self.image = self.walking_frames_inv_L[self.current_frame]
else:
self.image = self.walking_frames_L[self.current_frame]
self.rect = self.image.get_rect()
self.rect.bottom = rect_bottom
# Show jumping animation
if self.jumping and not self.walking and not self.has_jetpack and not self.has_wings:
rect_bottom = self.rect.bottom
if self.has_bubble:
self.image = self.jumping_frame_inv
else:
self.image = self.jumping_frame
self.rect = self.image.get_rect()
self.rect.bottom = rect_bottom
# Show standing animation
if not self.jumping and not self.walking and not self.has_jetpack and not self.has_wings:
if time_passed - self.last_update > 350:
self.last_update = time_passed
if self.has_bubble:
self.current_frame = (self.current_frame + 1) % len(self.standing_frames_inv)
else:
self.current_frame = (self.current_frame + 1) % len(self.standing_frames)
rect_bottom = self.rect.bottom
if self.has_bubble:
self.image = self.standing_frames_inv[self.current_frame]
else:
self.image = self.standing_frames[self.current_frame]
self.rect = self.image.get_rect()
self.rect.bottom = rect_bottom
# Show jetpack animation
if self.has_jetpack and not self.has_wings:
if time_passed - self.last_update > 50:
self.last_update = time_passed
if self.acceleration:
self.current_frame = (self.current_frame + 1) % len(self.jet_start_frames)
self.image = self.jet_start_frames[self.current_frame]
if self.still:
self.current_frame = (self.current_frame + 1) % len(self.jet_go_frames)
self.image = self.jet_go_frames[self.current_frame]
# Show wing animation
if self.has_wings:
if time_passed - self.last_update > 90:
self.last_update = time_passed
self.current_frame = (self.current_frame + 1) % len(self.has_wings_frames)
rect_center = self.rect.centerx
self.image = self.has_wings_frames[self.current_frame]
self.rect = self.image.get_rect()
self.rect.centerx = rect_center
self.mask = pygame.mask.from_surface(self.image)
class Platform(pygame.sprite.Sprite):
def __init__(self, game, x, y):
self._layer = PLATFORM_LAYER
self.groups = game.all_sprites, game.platforms
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.type = None
# Move properties
self.on_move = False
self.on_move_x = False
self.on_move_y = False
# Mob properties
self.has_spikey = False
self.has_cloud = False
self.has_pow = False
self.has_coin = False
self.has_wingman = False
self.has_mob = False
# Respawn property
self.respawn = False
# Plat speed properties
self.vel_x = 1
self.vel_y = 1
self.count_vel_y = 0
if self.has_spikey or self.has_cloud or self.has_wingman:
self.has_mob = True
# Applying the chances of spawning a moving plat
if random.randrange(100) < MOVING_PLAT_SPAWN_RATIO and self.game.score > 200:
self.on_move = True
# Defining the move type
if self.on_move:
if random.randrange(100) < 90:
self.on_move_x = True
else:
self.on_move_y = True
# Platform images
snowy_images = (self.game.spritesheet1.get_image(0, 768, 380, 94),
self.game.spritesheet1.get_image(213, 1764, 201, 100))
icy_images = (Get_image_res(pygame.image.load('graphics/ice_plat_l.png'), 2),
Get_image_res(pygame.image.load('graphics/ice_plat_s.png'), 2))
normal_images = (self.game.spritesheet1.get_image(0, 288, 380, 94),
self.game.spritesheet1.get_image(213, 1662, 201, 100))
stone_images = (self.game.spritesheet1.get_image(0, 96, 380, 94),
self.game.spritesheet1.get_image(382, 408, 200, 100))
wood_images = (self.game.spritesheet1.get_image(0, 960, 380, 94),
self.game.spritesheet1.get_image(218, 1558, 200, 100))
pink_images = (self.game.spritesheet1.get_image(0, 576, 380, 94),
self.game.spritesheet1.get_image(218, 1456, 201, 100))
sand_images = (self.game.spritesheet1.get_image(0, 672, 380, 94),
self.game.spritesheet1.get_image(208, 1879, 201, 100))
# Platform choices
if PLAT_STONE_START > self.game.score >= PLAT_NM_START:
if random.randrange(100) < 90:
self.type = 'normal'
else:
self.type = 'sand'
# self.type = choice(['wooden', 'snowy'])
if PLAT_PINK_START > self.game.score >= PLAT_STONE_START:
if random.randrange(100) < 90:
self.type = 'stone'
else:
self.type = 'sand'
# self.type = choice(['stone', 'snowy'])
if PLAT_SNOW_START > self.game.score >= PLAT_PINK_START:
if random.randrange(100) < 90:
self.type = 'pink'
else:
self.type = 'icy'
# self.type = choice(['pink', 'snowy'])
if self.game.score >= PLAT_SNOW_START:
self.type = random.choice(['icy', 'snowy'])
# Platform images attachment
if self.type == 'normal':
self.image = random.choice(normal_images)
elif self.type == 'wooden':
self.image = random.choice(wood_images)
elif self.type == 'sand':
self.image = random.choice(sand_images)
elif self.type == 'stone':
self.image = random.choice(stone_images)
elif self.type == 'pink':
self.image = random.choice(pink_images)
elif self.type == 'snowy':
self.image = random.choice(snowy_images)
elif self.type == 'icy':
self.image = random.choice(icy_images)
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
# Applying the sprites spawning on platform if wing pow is not initiated
if not self.game.player.has_wings:
if random.randrange(100) < POW_SPAWN_RATIO and not game.player.has_bubble and not game.player.has_jetpack \
and len(self.game.powerups) == 0 and self.game.score != 0 and not self.on_move_y:
Powerup(self.game, self)
self.has_pow = True
if random.randrange(100) < COIN_SPAWN_RATIO:
Coin(self.game, self)
self.has_coin = True
# There shouldn't be 2 much mobs
if len(self.game.mobs) < 3:
if random.randrange(100) < SPIKEY_SPAWN_RATIO and self.image == normal_images[0] and not self.on_move \
and not self.has_mob and PLAT_SNOW_START > self.game.score > SPIKEY_SPAWN_SCORE:
Spikey(self.game, self)
self.has_spikey = True
self.has_mob = True
if random.randrange(100) < CLOUD_SPAWN_RATIO and not self.on_move and not self.has_mob and \
self.game.score > PLAT_STONE_START:
Cloud(self.game, self)
self.has_cloud = True
self.has_mob = True
if random.randrange(100) < WM_SPAWN_RATIO and (self.image == pink_images[0] or self.image == snowy_images[0]) and not self.on_move \
and not self.has_mob and self.game.score > PLAT_PINK_START:
Wingman(self.game, self)
self.has_wingman = True
self.has_mob = True
def update(self, *args):
# Moving left/right
if self.on_move_x:
self.rect.x += self.vel_x
if self.rect.right > WIDTH - 15:
self.vel_x = -1
if self.rect.left < 15:
self.vel_x = 1
# Moving up/down
if self.on_move_y:
self.rect.y += self.vel_y
self.count_vel_y += self.vel_y
if self.count_vel_y > 130:
self.vel_y = -1
if self.count_vel_y < 0:
self.vel_y = 1
class Powerup(pygame.sprite.Sprite):
def __init__(self, game, plat):
self._layer = POW_LAYER
self.groups = game.all_sprites, game.powerups
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.plat = plat
# We define the type as boost and then we change it if needed
self.type = 'boost'
self.spawn_score = 0
self.spawn_ratio = random.randrange(100)
if 20 < self.spawn_ratio < 50:
self.type = 'bubble'
elif 7 < self.spawn_ratio < 20:
self.type = 'wings'
elif 0 < self.spawn_ratio < 7:
self.type = 'jetpack'
if self.type == 'boost':
self.image = self.game.spritesheet1.get_image(820, 1805, 71, 70)
elif self.type == 'bubble':
self.image = self.game.spritesheet1.get_image(826, 134, 71, 70)
elif self.type == 'jetpack':
self.image = self.game.spritesheet1.get_image(852, 1089, 65, 77)
elif self.type == 'wings':
self.image = self.game.spritesheet1.get_image(826, 1292, 71, 70)
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
# Position of the pow
self.rect.centerx = self.plat.rect.centerx
self.rect.bottom = self.plat.rect.top - 2
# Jumping var
self.jumpCount = 1.2
def update(self):
self.rect.centerx = self.plat.rect.centerx
# Jetpack does not jump
if self.type != 'jetpack':
# Checking if the powerup is out of the screen or on it
if self.rect.y >= 0:
if self.jumpCount >= -2:
self.jumpCount -= 0.1
self.rect.y -= (self.jumpCount * abs(self.jumpCount)) * 0.5
else:
self.jumpCount = 1.2
self.rect.bottom = self.plat.rect.top - 2
# Else if the powerup is above the screen we change the signs
else:
if self.jumpCount >= 2:
self.jumpCount -= 0.1
self.rect.y -= (self.jumpCount * abs(self.jumpCount)) * 0.5
else:
self.jumpCount = 1.2
self.rect.bottom = self.plat.rect.top - 2
# Jetpack always is still
else:
self.rect.bottom = self.plat.rect.top
# Killing the sprite
if not self.game.platforms.has(self.plat):
self.kill()
self.plat.has_pow = False
class Coin(pygame.sprite.Sprite):
def __init__(self, game, plat):
self._layer = POW_LAYER
self.groups = game.all_sprites, game.coins
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.plat = plat
# Animation properties
self.last_update = 0
self.current_frame = 0
self.load_images()
self.image = self.gold_images[0]
self.rect = self.image.get_rect()
# Position
self.rect.centerx = self.plat.rect.centerx
self.rect.bottom = self.plat.rect.top - 5
# Images depending on the score
if PLAT_STONE_START > self.game.score >= 0:
self.type = 'bronze'
elif PLAT_PINK_START > self.game.score > PLAT_STONE_START:
self.type = 'silver'
else:
self.type = 'gold'
def load_images(self):
self.gold_images = (self.game.spritesheet1.get_image(698, 1931, 84, 84),
self.game.spritesheet1.get_image(829, 0, 66, 84),
self.game.spritesheet1.get_image(897, 1574, 50, 84),
self.game.spritesheet1.get_image(645, 651, 15, 84),
pygame.transform.flip(self.game.spritesheet1.get_image(897, 1574, 50, 84), True, False),
pygame.transform.flip(self.game.spritesheet1.get_image(829, 0, 66, 84), True, False))
for image in self.gold_images:
image.set_colorkey(BLACK)
self.silver_images = (self.game.spritesheet1.get_image(584, 406, 84, 84),
self.game.spritesheet1.get_image(852, 1003, 66, 84),
self.game.spritesheet1.get_image(899, 1219, 50, 84),
self.game.spritesheet1.get_image(662, 651, 14, 84),
pygame.transform.flip(self.game.spritesheet1.get_image(899, 1219, 50, 84), True, False),
pygame.transform.flip(self.game.spritesheet1.get_image(852, 1003, 66, 84), True, False))
for image in self.silver_images:
image.set_colorkey(BLACK)
self.bronze_images = (self.game.spritesheet1.get_image(707, 296, 84, 84),
self.game.spritesheet1.get_image(826, 206, 66, 84),
self.game.spritesheet1.get_image(899, 116, 50, 84),
self.game.spritesheet1.get_image(670, 406, 14, 84),
pygame.transform.flip(self.game.spritesheet1.get_image(899, 116, 50, 84), True, False),
pygame.transform.flip(self.game.spritesheet1.get_image(826, 206, 66, 84), True, False))
for image in self.bronze_images:
image.set_colorkey(BLACK)
def update(self):
time_passed = pygame.time.get_ticks()
self.rect.centerx = self.plat.rect.centerx
self.rect.bottom = self.plat.rect.top - 5
if time_passed - self.last_update > 100:
self.last_update = time_passed
self.current_frame = (self.current_frame + 1) % len(self.gold_images)
if self.type == 'bronze':
self.image = self.bronze_images[self.current_frame]
elif self.type == 'silver':
self.image = self.silver_images[self.current_frame]
else:
self.image = self.gold_images[self.current_frame]
self.rect = self.image.get_rect()
self.rect.centerx = self.plat.rect.centerx
self.rect.bottom = self.plat.rect.top - 5
# We kill the sprite when the plat is killed
if not self.game.platforms.has(self.plat):
self.kill()
self.plat.has_coin = False
class Flyman(pygame.sprite.Sprite):
def __init__(self, game):
self._layer = MOB_LAYER
self.groups = game.all_sprites, game.mobs
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
# Images and animation
self.image_up = self.game.spritesheet1.get_image(566, 510, 122, 139)
self.image_up.set_colorkey(BLACK)
self.image_down = self.game.spritesheet1.get_image(568, 1534, 122, 135)
self.image_down.set_colorkey(BLACK)
self.image = self.image_up
self.rect = self.image.get_rect()
# Position
self.rect.centerx = random.choice([-100, WIDTH + 100])
self.rect.y = HEIGHT / 3
# Move properties
self.velx = random.randrange(1, 4)
self.vely = 0
self.dy = 0.5
def update(self):
# We apply movement
self.rect.x += self.velx
self.vely += self.dy
self.rect.y += self.vely
# We apply up and down movement
if self.vely > 3 or self.vely < -3:
self.dy *= -1
rect_center = self.rect.center
# We apply animation
if self.dy < 0:
self.image = self.image_up
else:
self.image = self.image_down
self.rect = self.image.get_rect()
self.mask = pygame.mask.from_surface(self.image)
self.rect.center = rect_center
# The sprite moves left and right until it is off HEIGHT
if self.rect.left > WIDTH + 100 or self.rect.right < -100:
self.velx *= -1
# Killing the sprite
if self.rect.centery > HEIGHT + 100:
self.game.has_flyman = False
self.kill()
class CloudBG(pygame.sprite.Sprite):
def __init__(self, game):
self._layer = CLOUD_LAYER
self.groups = game.all_sprites, game.clouds
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = random.choice(self.game.cloud_images)
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
scale = random.randrange(50, 100) / 100
self.image = pygame.transform.scale(self.image, (int(self.rect.width * scale), int(self.rect.height * scale)))
self.rect.x = random.randrange(WIDTH - self.rect.width)
self.rect.y = random.randrange(-500, -50)
def update(self):
if self.rect.top > HEIGHT * 2:
self.kill()
class Spikey(pygame.sprite.Sprite):
def __init__(self, game, plat):
self._layer = MOB_LAYER
self.groups = game.all_sprites, game.mobs
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.plat = plat
self.load_images()
self.current_frame = 0
self.last_update = 0
self.image = self.images_R[0]
self.rect = self.image.get_rect()
self.rect.centerx = self.plat.rect.centerx
self.rect.bottom = self.plat.rect.top - 1
self.acc_x = SPIKEY_ACC
self.facing_left = False
self.facing_right = True
def load_images(self):
self.images_R = (self.game.spritesheet1.get_image(704, 1256, 120, 159),
self.game.spritesheet1.get_image(812, 296, 90, 155))
for image in self.images_R:
image.set_colorkey(BLACK)
self.images_L = (pygame.transform.flip(self.game.spritesheet1.get_image(704, 1256, 120, 159), True, False),
pygame.transform.flip(self.game.spritesheet1.get_image(812, 296, 90, 155), True, False))
for image in self.images_L:
image.set_colorkey(BLACK)
def update(self):
self.animation()
if self.game.platforms.has(self.plat):
self.rect.bottom = self.plat.rect.top - 1
# Applying constant movement
if self.facing_left or self.facing_right:
self.rect.x += self.acc_x
# Moving from right to left
if self.rect.right > self.plat.rect.right:
self.facing_right = False
self.facing_left = True
self.acc_x = -SPIKEY_ACC
# Moving from left to right
if self.rect.left < self.plat.rect.left:
self.facing_right = True
self.facing_left = False
self.acc_x = SPIKEY_ACC
# Killing the sprite when it disappears off the screen
if self.rect.top > HEIGHT:
self.kill()
self.plat.has_spikey = False
self.plat.has_mob = False
def animation(self):
time_passed = pygame.time.get_ticks()
if time_passed - self.last_update > SPIKEY_FRAME_TIME:
self.last_update = time_passed
self.current_frame = (self.current_frame + 1) % len(self.images_R)
rect_bottom = self.rect.bottom
centerx = self.rect.centerx
if self.facing_right:
self.image = self.images_R[self.current_frame]
self.rect.x += self.acc_x
if self.facing_left:
self.image = self.images_L[self.current_frame]
self.rect.x += self.acc_x
self.rect = self.image.get_rect()
self.rect.centerx = centerx
self.rect.bottom = rect_bottom
class Cloud(pygame.sprite.Sprite):
def __init__(self, game, plat):
self._layer = 4
self.groups = game.all_sprites, game.passive_mobs
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.plat = plat
# Defining the images
self.images = (self.game.spritesheet1.get_image(0, 1152, 260, 134),
Get_image_res(pygame.image.load('graphics/Cloud1.png'), 2),
Get_image_res(pygame.image.load('graphics/Cloud2.png'), 2),
Get_image_res(pygame.image.load('graphics/Cloud3.png'), 2),
Get_image_res(pygame.image.load('graphics/Cloud4.png'), 2))
self.image = self.images[0]
self.rect = self.image.get_rect()
self.rect.centerx = self.plat.rect.centerx
self.rect.bottom = self.plat.rect.top - 60
self.last_update = 0
self.last_struck = False
self.current_frame = 0
# The first image is from the spritesheet so we set the colorkey to black
if self.image == self.images[0]:
self.image.set_colorkey(BLACK)
def update(self, *args):
self.rect.centerx = self.plat.rect.centerx
if self.game.platforms.has(self.plat):
self.rect.bottom = self.plat.rect.top - 60
# Setting the animation
time_passed = pygame.time.get_ticks()
if time_passed - self.last_update > 500:
self.last_update = time_passed
self.current_frame = (self.current_frame + 1) % len(self.images)
self.image = self.images[self.current_frame]
# Spawning the lightining at the peak image
if self.image == self.images[4] and len(self.game.lightinings) < 4:
self.game.lightining_sound.play()
Lightining(self.game, self)
# Killing the sprite when it dissapears off the screen
if self.rect.top > HEIGHT:
self.kill()
self.plat.has_cloud = False
self.plat.has_mob = False
# Spawns ony with a cloud
class Lightining(pygame.sprite.Sprite):
def __init__(self, game, cloud):
self._layer = MOB_LAYER
self.groups = game.all_sprites, game.mobs, game.lightinings
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.cloud = cloud
self.image = self.game.spritesheet1.get_image(895, 453, 55, 114)
# Rare gold lightining
if random.randrange(100.0) < 1.5:
self.image = self.game.spritesheet1.get_image(897, 0, 55, 114)
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.top = self.cloud.rect.bottom - 2
self.rect.centerx = self.cloud.rect.centerx - 5
def update(self, *args):
# Kill if the peak image is gone
if self.cloud.image != self.cloud.images[4] or self.rect.top > HEIGHT:
self.kill()
class Wingman(pygame.sprite.Sprite):
def __init__(self, game, plat):
self._layer = MOB_LAYER
self.groups = game.all_sprites, game.flying_mobs
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.plat = plat
self.images = (self.game.spritesheet1.get_image(382, 635, 174, 126),
self.game.spritesheet1.get_image(0, 1879, 206, 107),
self.game.spritesheet1.get_image(0, 1559, 216, 101),
self.game.spritesheet1.get_image(0, 1456, 216, 101),
self.game.spritesheet1.get_image(382, 510, 182, 123),
self.game.spritesheet1.get_image(0, 1456, 216, 101),
self.game.spritesheet1.get_image(0, 1559, 216, 101),
self.game.spritesheet1.get_image(0, 1879, 206, 107))
for image in self.images:
image.set_colorkey(BLACK)
self.image = self.images[0]
self.rect = self.image.get_rect()
self.rect.centerx = self.plat.rect.centerx
self.rect.centery = self.plat.rect.centery
# Move properties
self.acc_y = WM_ACC_UP
self.vel_y = 0
self.current_frame = 0
self.last_update = 0
self.facing_up = True
self.facing_down = False
def update(self, *args):
self.animation()
self.rect.centerx = self.plat.rect.centerx
# We apply constant movement
if self.facing_up or self.facing_down:
self.rect.top += self.acc_y
self.acc_y += self.vel_y
# We apply the borders and change the animation properties
# Going up
if self.rect.y > self.plat.rect.y + 80:
self.acc_y = WM_ACC_UP
self.vel_y = 0
self.facing_up = True
self.facing_down = False
self.current_frame = 0
# We slow down the falling sprite to make it look more natural
if self.plat.rect.y + 80 > self.rect.y > self.plat.rect.y + 40 and self.facing_down:
self.vel_y = -WM_VEL
# We fall and we speed up as we do it
if self.rect.y < self.plat.rect.y - 120:
self.acc_y = WM_ACC_DOWN
self.vel_y = WM_VEL
if self.acc_y >= 4:
self.acc_y = 4
self.facing_down = True
self.facing_up = False
# Killing the sprite when it is out of the screen
if not self.game.platforms.has(self.plat):
if self.rect.y > HEIGHT:
self.kill()
self.plat.has_wingman = False
self.plat.has_mob = False
def animation(self):
# Animation up and down
time_passed = pygame.time.get_ticks()
if self.facing_up:
if time_passed - self.last_update > WM_FRAME_TIME:
self.last_update = time_passed
self.current_frame = (self.current_frame + 1) % len(self.images)
centery = self.rect.centery
self.image = self.images[self.current_frame]
self.rect = self.image.get_rect()
self.rect.centery = centery
else:
self.image = pygame.transform.flip(self.images[4], False, True)
class Sun(pygame.sprite.Sprite):
def __init__(self, game):
self._layer = MOB_LAYER
self.groups = game.all_sprites, game.flying_mobs
pygame.sprite.Sprite.__init__(self, self.groups)
self.game = game
# Applying sun types according to the score
self.type = 'sun'
if 1500 > self.game.score > SUN_SPAWN_SCORE:
self.type = 'moon'
if self.type == 'sun':
self.images = (self.game.spritesheet1.get_image(534, 913, 142, 148),
self.game.spritesheet1.get_image(421, 1390, 148, 142))
elif self.type == 'moon':
self.images = (self.game.spritesheet1.get_image(534, 763, 142, 148),
self.game.spritesheet1.get_image(464, 1122, 148, 141))
for image in self.images:
image.set_colorkey(BLACK)
self.image = self.images[0]
self.rect = self.image.get_rect()
# Applying y and x pos
if self.game.player.has_wings:
self.rect.centerx = random.randrange(70, WIDTH - 70)
else:
self.rect.centerx = random.choice([-100, WIDTH + 100])
self.rect.y = random.choice([-100, -75])
# The vel is so that when everything gets moved to the bottom our sun moves slower to make the game challenging
# The vel isn't found anywhere in the class.It is in the game in flying mobs
self.vel_y = -PLAYER_JUMP_V // 3.5
self.vel_x = SUN_VEL
self.current_frame = 0
self.last_update = 0
def update(self, *args):
# Apply animation
self.animation()
if self.game.player.has_wings:
self.vel_y = 0
else:
# Apply constant movement
self.rect.x += self.vel_x
# Changing the direction
if self.rect.right > WIDTH - 5:
self.vel_x = -SUN_VEL
if self.rect.left < 5:
self.vel_x = SUN_VEL
# Killing the sprite if it is off the screen
if self.rect.y > HEIGHT:
self.kill()
self.game.has_sun = False
def animation(self):
time_passed = pygame.time.get_ticks()
if time_passed - self.last_update > SUN_FRAME_CHANGE:
self.last_update = time_passed
self.current_frame = (self.current_frame + 1) % len(self.images)
self.image = self.images[self.current_frame]
| StarcoderdataPython |
119910 | import os
from pprint import pprint
from typing import List, Tuple
from logger import log_settings
from local_db import LocalDb, local_db_name
from dir_tree import CreateTree
app_log = log_settings()
local_path = "k:\\data\\paper_dtdt\\some_other\\"
class ParseFiles:
def __init__(self, tb_item):
self._root = local_path
self._name = tb_item.name
self._surname = tb_item.surname
self._files: str = ""
def __repr__(self):
return "ParseFiles_" + self.name + "_" + self.surname
@property
def name(self):
return self._name.lower()
@property
def surname(self):
return self._surname.lower()
@property
def root(self):
return self._root
@root.setter
def root(self, value):
self._root = value
@property
def full_path(self):
return os.path.join(self.root, self._files)
def walk_files(self):
for self.root, dirs, file_list in os.walk(local_path):
for files in file_list:
yield files
@property
def get_title(self):
return self._files.split(".mp4")[0].lower()
def find_equals(self):
app_log.info(f"parsing for {repr(self)}")
for self._files in self.walk_files():
if self.name in self.get_title and self.surname in self.get_title:
yield self._files, self.full_path
if __name__ == "__main__":
app_log.info("Main app starts.")
datab = LocalDb(db_name=local_db_name)
datab.open_session()
inst = CreateTree()
for table_item in datab.select_all:
for movie in ParseFiles(tb_item=table_item).find_equals():
inst.main(tb_item=table_item,
filename=movie[0],
path_from=movie[1])
inst.copy_pool()
inst.remove_set()
datab.close_session()
datab.close_engine()
app_log.info("Main app ends.")
| StarcoderdataPython |
4834155 | # Generated by Django 2.2.3 on 2019-07-15 17:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('movieapi', '0002_auto_20190715_1735'),
]
operations = [
migrations.AddField(
model_name='movie',
name='director',
field=models.CharField(default='', max_length=50),
),
migrations.AddField(
model_name='movie',
name='writer',
field=models.CharField(default='', max_length=50),
),
]
| StarcoderdataPython |
50031 | from hivemind import app
from flask import flash, redirect, render_template, request, url_for
from mcipc.query import Client as QClient
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
with QClient("diseased.horse", 25565) as q:
stats = q.full_stats
return render_template(
"index.html",
status="Unknown",
online_players=stats["num_players"],
total_players=stats["max_players"],
motd=status["hostname"],
players=["OO"],
)
else:
return str(request.form)
| StarcoderdataPython |
1798441 | <gh_stars>0
from django.shortcuts import render
from InputOutputFiles import Speech_to_Text as listen
from InputOutputFiles import Text_to_Speech as speak
import base
def index(request):
return render(request, 'search/search.html', {'query':"", 'output':""})
def listenSearchQuery(request):
speak.say("Speak your query")
query = listen.listenInput()
output = base.getAPIOutput(query, "Search")
speak.say(output)
if output == "Project doesnt exist":
to_do = 0
in_progress = 0
done = 0
else:
to_do = base.getToDoIssueNum()
in_progress = base.getInProgressIssueNum()
done = base.getDoneIssueNum()
return render(request, 'search/search.html', {'query' : query, 'output':output, 'to_do':to_do, 'in_progress':in_progress, 'done':done})
| StarcoderdataPython |
115756 | <reponame>jovanbrakus/cherrypy-example
__author__ = '<NAME> <<EMAIL>>'
__contact__ = '<EMAIL>'
__date__ = '31 May 2012' | StarcoderdataPython |
1758562 | <filename>convert_scores_to_average.py<gh_stars>1-10
import glob
import sys
import os
pattern = sys.argv[1]
def file_to_scores(f):
new_f = open(f,'r')
s = ''
for i in new_f.readlines():
s = s + i
j = s.split('\n')
rouge_1_f = float(j[2].split(':')[1].split()[0]) * 100
rouge_2_f = float(j[7].split(':')[1].split()[0]) * 100
rouge_l_f = float(j[12].split(':')[1].split()[0]) * 100
return rouge_1_f, rouge_2_f, rouge_l_f
rouge_1 = 0
rouge_2 = 0
rouge_l = 0
k = len(glob.glob(pattern + '*'))
for i in glob.glob(pattern + '*'):
rouge_path = os.path.join(glob.glob(os.path.join(i,'decode_val_*'))[0], 'ROUGE_results.txt')
r1, r2, rl = file_to_scores(rouge_path)
rouge_1 += r1
rouge_2 += r2
rouge_l += rl
rouge_1 = rouge_1/k
rouge_2 = rouge_2/k
rouge_l = rouge_l/k
with open(pattern, 'w') as f:
f.write("%s\t%f\t%f\t%f"%(pattern,rouge_1,rouge_2,rouge_l))
print("%s\t%f\t%f\t%f"%(pattern,rouge_1,rouge_2,rouge_l))
| StarcoderdataPython |
39293 | #Title: Notification Processor
#Tags:plyer,python
#Can process notification of your choice
#plyer:built in module help you to find more information
from plyer import notification
def notifyme(title, message):
notification.notify(
title=title,
message=message,
app_icon='Write your icon address here',
timeout=5
)
notifyme("Title of notification box", "Message in notification")
| StarcoderdataPython |
4817067 | <filename>HackerRank/Beautiful_Triplets.py
```py
def beautifulTriplets(d, arr):
c = 0
for i in arr:
if i + d in arr and i + d*2 in arr:
c += 1
return c
if __name__ == '__main__':
first_multiple_input = input().rstrip().split()
n, d = int(first_multiple_input[0]), int(first_multiple_input[1])
arr = list(map(int, input().rstrip().split()))
print(beautifulTriplets(d, arr))
```
| StarcoderdataPython |
3393422 | import os
os.environ['AIRFLOW__CORE__UNIT_TEST_MODE'] = 'True'
| StarcoderdataPython |
1708926 | import sys
import os
from datetime import datetime, timedelta
import numpy as np
import xarray as xr
path = str(sys.argv[1])
name = str(sys.argv[2])
level = str(sys.argv[3])
member = int(sys.argv[4])
path_wrfref = os.getenv("PATH_WRFREF")
f = xr.open_dataset(path).squeeze()
initialization = datetime.strptime(f.initialization, "%Y-%m-%d %H:%M:%S")
forecast_hour = int(f.forecast_hour)
valid = initialization + timedelta(hours=forecast_hour)
domain = int(f.domain)
ref = xr.open_dataset("{}/wrfoutREFd0{}".format(path_wrfref, domain)).squeeze()
if level == "Surface":
sel = {"member": member}
else:
sel = {"member": member, "pressure": int(level)}
# Extract the forecast field from the dataset, convert to *DOUBLE* floating point
# precision (float64) as required by MET, and round to avoid adding random noise.
try:
fcst_field = np.asarray(f[name].sel(sel), dtype=float).round(5)
met_data = np.flip(fcst_field, axis=0).copy()
except KeyError as err:
sys.stderr.write("{}: KeyError: {}".format(sys.argv[0], err))
sys.exit(1)
# =====
# Create attributes dictionary as specified in MET user's guide under Python embedding
# =====
try:
xlat = ref.variables['XLAT'].data
except KeyError:
sys.stderr.write("{}: KeyError: {}".format(sys.argv[0], varkey))
sys.exit(1)
try:
xlong = ref.variables['XLONG'].data
except KeyError:
sys.stderr.write("{}: KeyError: {}".format(sys.argv[0], varkey))
sys.exit(1)
grid_attrs = {
'type': 'Lambert Conformal',
'hemisphere': 'N',
'name': 'TTU WRF',
'lat_pin': float(xlat[0, 0]),
'lon_pin': float(xlong[0, 0]),
'x_pin': 0.0,
'y_pin': 0.0,
'r_km': 6371.2,
'scale_lat_1': float(ref.attrs['TRUELAT1']),
'scale_lat_2': float(ref.attrs['TRUELAT2']),
'lon_orient': float(ref.attrs['STAND_LON']),
'd_km': float(ref.attrs['DX']) / 1000.,
'nx': int(ref.attrs['WEST-EAST_GRID_DIMENSION']),
'ny': int(ref.attrs['SOUTH-NORTH_GRID_DIMENSION']),
}
attrs = {
'valid': valid.strftime("%Y%m%d_%H"),
'init': initialization.strftime("%Y%m%d_%H"),
'lead': str(forecast_hour),
'accum': '0',
'name': name,
'long_name': name,
'level': level,
'units': str(f[name].units),
'grid': grid_attrs,
}
| StarcoderdataPython |
4836961 | <reponame>bkenan/rl_offline<gh_stars>1-10
from abc import abstractmethod
from typing import Optional, Sequence, Tuple
import numpy as np
import torch
from ...gpu import Device
from ...preprocessing import ActionScaler, RewardScaler, Scaler
from ...torch_utility import (
eval_api,
get_state_dict,
map_location,
set_state_dict,
to_cpu,
to_cuda,
torch_api,
)
from ..base import DynamicsImplBase
class TorchImplBase(DynamicsImplBase):
_observation_shape: Sequence[int]
_action_size: int
_scaler: Optional[Scaler]
_action_scaler: Optional[ActionScaler]
_reward_scaler: Optional[RewardScaler]
_device: str
def __init__(
self,
observation_shape: Sequence[int],
action_size: int,
scaler: Optional[Scaler],
action_scaler: Optional[ActionScaler],
reward_scaler: Optional[RewardScaler],
):
self._observation_shape = observation_shape
self._action_size = action_size
self._scaler = scaler
self._action_scaler = action_scaler
self._reward_scaler = reward_scaler
self._device = "cpu:0"
@eval_api
@torch_api(scaler_targets=["x"], action_scaler_targets=["action"])
def predict(
self, x: torch.Tensor, action: torch.Tensor, indices: torch.Tensor,
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
with torch.no_grad():
observation, reward, variance = self._predict(x, action, indices)
if self._scaler:
observation = self._scaler.reverse_transform(observation)
if self._reward_scaler:
reward = self._reward_scaler.reverse_transform(reward)
observation = observation.cpu().detach().numpy()
reward = reward.cpu().detach().numpy()
variance = variance.cpu().detach().numpy()
return observation, reward, variance
@abstractmethod
def _predict(
self,
x: torch.Tensor,
action: torch.Tensor,
indices: Optional[torch.Tensor],
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
pass
def to_gpu(self, device: Device = Device()) -> None:
self._device = f"cuda:{device.get_id()}"
to_cuda(self, self._device)
def to_cpu(self) -> None:
self._device = "cpu:0"
to_cpu(self)
def save_model(self, fname: str) -> None:
torch.save(get_state_dict(self), fname)
def load_model(self, fname: str) -> None:
chkpt = torch.load(fname, map_location=map_location(self._device))
set_state_dict(self, chkpt)
@property
def observation_shape(self) -> Sequence[int]:
return self._observation_shape
@property
def action_size(self) -> int:
return self._action_size
@property
def device(self) -> str:
return self._device
@property
def scaler(self) -> Optional[Scaler]:
return self._scaler
@property
def action_scaler(self) -> Optional[ActionScaler]:
return self._action_scaler
@property
def reward_scaler(self) -> Optional[RewardScaler]:
return self._reward_scaler
| StarcoderdataPython |
3285603 | from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
import collections
import logging
import os
import sys
import tempfile
from flexget import plugin
from flexget.event import event
log = logging.getLogger('subtitles')
try:
from subliminal.extensions import provider_manager
PROVIDERS = provider_manager.names()
except ImportError:
PROVIDERS = [
'opensubtitles',
'thesubdb',
'podnapisi',
'addic7ed',
'tvsubtitles'
]
AUTHENTICATION_SCHEMA = dict((provider, {'type': 'object'}) for provider in PROVIDERS)
class PluginSubliminal(object):
"""
Search and download subtitles using Subliminal by <NAME>
(https://pypi.python.org/pypi/subliminal).
Example (complete task)::
subs:
find:
path:
- d:\media\incoming
regexp: '.*\.(avi|mkv|mp4)$'
recursive: yes
accept_all: yes
subliminal:
languages:
- ita
alternatives:
- eng
exact_match: no
providers: addic7ed, opensubtitles
single: no
directory: /disk/subtitles
hearing_impaired: yes
authentication:
addic7ed:
username: myuser
passsword: <PASSWORD>
"""
schema = {
'type': 'object',
'properties': {
'languages': {'type': 'array', 'items': {'type': 'string'}, 'minItems': 1},
'alternatives': {'type': 'array', 'items': {'type': 'string'}},
'exact_match': {'type': 'boolean', 'default': True},
'providers': {'type': 'array', 'items': {'type': 'string', 'enum': PROVIDERS}},
'single': {'type': 'boolean', 'default': True},
'directory': {'type:': 'string'},
'hearing_impaired': {'type': 'boolean', 'default': False},
'authentication': {'type': 'object', 'properties': AUTHENTICATION_SCHEMA},
},
'required': ['languages'],
'additionalProperties': False
}
def on_task_start(self, task, config):
if list(sys.version_info) < [2, 7]:
raise plugin.DependencyError('subliminal', 'Python 2.7', 'Subliminal plugin requires python 2.7.')
try:
import babelfish
except ImportError as e:
log.debug('Error importing Babelfish: %s', e)
raise plugin.DependencyError('subliminal', 'babelfish', 'Babelfish module required. ImportError: %s' % e)
try:
import subliminal
except ImportError as e:
log.debug('Error importing Subliminal: %s', e)
raise plugin.DependencyError('subliminal', 'subliminal', 'Subliminal module required. ImportError: %s' % e)
def on_task_output(self, task, config):
"""
Configuration::
subliminal:
languages: List of languages (as IETF codes) in order of preference. At least one is required.
alternatives: List of second-choice languages; subs will be downloaded but entries rejected.
exact_match: Use file hash only to search for subs, otherwise Subliminal will try to guess by filename.
providers: List of providers from where to download subtitles.
single: Download subtitles in single mode (no language code added to subtitle filename).
directory: Path to directory where to save the subtitles, default is next to the video.
hearing_impaired: Prefer subtitles for the hearing impaired when available
authentication: >
Dictionary of configuration options for different providers.
Keys correspond to provider names, and values are dictionaries, usually specifying `username` and
`password`.
"""
if not task.accepted:
log.debug('nothing accepted, aborting')
return
from babelfish import Language
from dogpile.cache.exception import RegionAlreadyConfigured
import subliminal
from subliminal.cli import MutexLock
from subliminal.score import episode_scores, movie_scores
try:
subliminal.region.configure('dogpile.cache.dbm',
arguments={
'filename': os.path.join(tempfile.gettempdir(), 'cachefile.dbm'),
'lock_factory': MutexLock,
})
except RegionAlreadyConfigured:
pass
# Let subliminal be more verbose if our logger is set to DEBUG
if log.isEnabledFor(logging.DEBUG):
logging.getLogger("subliminal").setLevel(logging.INFO)
else:
logging.getLogger("subliminal").setLevel(logging.CRITICAL)
logging.getLogger("dogpile").setLevel(logging.CRITICAL)
logging.getLogger("enzyme").setLevel(logging.WARNING)
try:
languages = set([Language.fromietf(s) for s in config.get('languages', [])])
alternative_languages = set([Language.fromietf(s) for s in config.get('alternatives', [])])
except ValueError as e:
raise plugin.PluginError(e)
# keep all downloaded subtitles and save to disk when done (no need to write every time)
downloaded_subtitles = collections.defaultdict(list)
providers_list = config.get('providers', None)
provider_configs = config.get('authentication', None)
# test if only one language was provided, if so we will download in single mode
# (aka no language code added to subtitle filename)
# unless we are forced not to by configuration
# if we pass 'yes' for single in configuration but choose more than one language
# we ignore the configuration and add the language code to the
# potentially downloaded files
single_mode = config.get('single', '') and len(languages | alternative_languages) <= 1
hearing_impaired = config.get('hearing_impaired', False)
with subliminal.core.ProviderPool(providers=providers_list, provider_configs=provider_configs) as provider_pool:
for entry in task.accepted:
if 'location' not in entry:
log.warning('Cannot act on entries that do not represent a local file.')
continue
if not os.path.exists(entry['location']):
entry.fail('file not found: %s' % entry['location'])
continue
if '$RECYCLE.BIN' in entry['location']: # ignore deleted files in Windows shares
continue
try:
entry_languages = set(entry.get('subtitle_languages', [])) or languages
video = subliminal.scan_video(entry['location'])
# use metadata refiner to get mkv metadata
refiner = ('metadata',)
subliminal.core.refine(video, episode_refiners=refiner, movie_refiners=refiner)
existing_subtitles = set(subliminal.core.search_external_subtitles(entry['location']).values())
video.subtitle_languages |= existing_subtitles
if isinstance(video, subliminal.Episode):
title = video.series
hash_scores = episode_scores['hash']
else:
title = video.title
hash_scores = movie_scores['hash']
log.info('Name computed for %s was %s', entry['location'], title)
msc = hash_scores if config['exact_match'] else 0
if entry_languages.issubset(video.subtitle_languages) or (single_mode and video.subtitle_languages):
log.debug('All preferred languages already exist for "%s"', entry['title'])
entry['subtitles_missing'] = set()
continue # subs for preferred lang(s) already exists
else:
# Gather the subtitles for the alternative languages too, to avoid needing to search the sites
# again. They'll just be ignored if the main languages are found.
all_subtitles = provider_pool.list_subtitles(video, entry_languages | alternative_languages)
subtitles = provider_pool.download_best_subtitles(all_subtitles, video, entry_languages,
min_score=msc,
hearing_impaired=hearing_impaired)
if subtitles:
downloaded_subtitles[video].extend(subtitles)
log.info('Subtitles found for %s', entry['location'])
else:
# only try to download for alternatives that aren't alread downloaded
subtitles = provider_pool.download_best_subtitles(all_subtitles, video,
alternative_languages, min_score=msc,
hearing_impaired=hearing_impaired)
if subtitles:
downloaded_subtitles[video].extend(subtitles)
entry.fail('subtitles found for a second-choice language.')
else:
entry.fail('cannot find any subtitles for now.')
downloaded_languages = set([Language.fromietf(str(l.language))
for l in subtitles])
if entry_languages:
entry['subtitles_missing'] = entry_languages - downloaded_languages
if len(entry['subtitles_missing']) > 0:
entry.fail('Subtitles for all primary languages not found')
except ValueError as e:
log.error('subliminal error: %s', e)
entry.fail()
if downloaded_subtitles:
if task.options.test:
log.verbose('Test mode. Found subtitles:')
# save subtitles to disk
for video, subtitle in downloaded_subtitles.items():
if subtitle:
_directory = config.get('directory')
if _directory:
_directory = os.path.expanduser(_directory)
if task.options.test:
log.verbose(' FOUND LANGUAGES %s for %s', [str(l.language) for l in subtitle], video.name)
continue
subliminal.save_subtitles(video, subtitle, single=single_mode, directory=_directory)
@event('plugin.register')
def register_plugin():
plugin.register(PluginSubliminal, 'subliminal', api_ver=2)
| StarcoderdataPython |
4835389 | <reponame>RusticiSoftware/SCORMCloud_GoogAppEngApp<filename>cron/autoexpire.py
#!/usr/bin/env python
# encoding: utf-8
"""
reminders.py
Copyright (c) 2010 <NAME>. All rights reserved.
"""
import cgi
import os
import datetime
from datetime import timedelta
from google.appengine.dist import use_library
use_library('django', '1.1')
from models import *
from modelutils import *
from assignutils import *
from viewutils import *
if GetEnableAutoExpire():
# application setup - add new pages here...
logging.getLogger().setLevel(logging.DEBUG)
logging.info("Kicking off the scheduled Auto-Expire process.")
assignments = GetExpirableAssignments()
if assignments is not None:
totaldeactivated = 0
for assignment in assignments:
assignment.active = False;
#setting the autoexpire to False to make the future GetAssignments calls faster
assignment.autoexpire = False;
assignment.put()
totaldeactivated = totaldeactivated + 1
logging.info(str(totaldeactivated) + " assignments were deactivated automatically at expiration.")
logging.info("Ending the scheduled Auto-Expire process.")
| StarcoderdataPython |
4808085 | from django.http import Http404
from django.shortcuts import render, get_object_or_404, redirect
from .models import Product
from .forms import ProductForm, RawProductForm
def product_create_view(request):
form = ProductForm(request.POST or None)
if form.is_valid():
form.save()
form = ProductForm()
context = {
'form': form
}
return render(request, 'products/product_create.html', context)
def product_update_view(request, my_id):
obj = get_object_or_404(Product, id=my_id)
form = ProductForm(request.POST or None, instance=obj)
if form.is_valid():
form.save()
context = {
'form': form
}
return render(request, 'products/product_create.html', context)
def product_delete_view(request, my_id):
obj = get_object_or_404(Product, id=my_id)
if request.method == 'POST':
obj.delete()
return redirect('../../')
context = {
'object' : obj
}
return render(request, 'products/product_delete.html', context)
def dynamic_product_detail_view(request, my_id):
try:
obj = Product.objects.get(id=my_id)
except Product.DoesNotExist:
raise Http404
context = {
'object' : obj
}
return render(request, 'products/product_detail.html', context)
def product_list_view(request):
qs = Product.objects.all()
context = {
'object_list' : qs
}
return render(request, 'products/product_list.html', context) | StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.