id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
149215
|
import os
from flask import Blueprint, request
from flask_cors import CORS
from service.astra_service import astra_service
credentials_controller = Blueprint('credentials_controller', __name__)
CORS(credentials_controller)
# This controller handles the functionality for connecting to the database
#
# Here we define the REST API endpoints and call our Astra Service
# to send the request to the underlying Data Access Objects
@credentials_controller.route("/api/credentials", methods=['GET', 'POST'])
def connect():
if request.method == 'POST':
temp_zip_path = os.path.abspath('temp_bundle.zip')
temp_zip = open(temp_zip_path, 'wb')
with temp_zip as f:
f.write(request.get_data())
try:
astra_service.save_credentials(request.args['username'], request.args['password'],
request.args['keyspace'], temp_zip_path)
astra_service.connect()
finally:
os.remove(temp_zip_path)
return {'connected': True}, 200
if request.method == 'GET':
resp = astra_service.check_connection()
if resp is True:
status_code = 200
else:
status_code = 401
return str(resp), status_code
@credentials_controller.route("/api/credentials/test", methods=['POST'])
def test_credentials():
temp_zip_path = os.path.abspath('temp_bundle.zip')
temp_zip = open(temp_zip_path, 'wb')
with temp_zip as f:
f.write(request.get_data())
resp = {'success': False}
status_code = 400
try:
test_connection = astra_service.test_credentials(request.args['username'], request.args['password'],
request.args['keyspace'], temp_zip_path)
resp = {'success': test_connection}
if resp['success'] is True:
status_code = 200
else:
status_code = 401
finally:
os.remove(temp_zip_path)
return resp, status_code
|
StarcoderdataPython
|
6508174
|
from kivymd.theming import ThemableBehavior
from kivymd.uix.screen import MDScreen
class CraneSleepScreen(ThemableBehavior, MDScreen):
pass
|
StarcoderdataPython
|
3571412
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
"""
Criado em 17 de Agosto de 2016.
Descricao: esta biblioteca possui as seguintes funcoes:
geraDados: esta funcao gera um histograma dos dados fornecida uma imagem em formato numpy (imagem esta resultante da combinacao de uma amostra de imagens da serie de dados). Sobre esses dados e calculado a media, mediana, desvio padrao e desvio padrao absoluto. Alem disso, e gerado um segundo histograma normalizado em relacao a media e desvio padrao obtidos para servir de comparacao. Um intervalo de 7 sigmas e estipulado ao redor da mediana para o calculo do histograma e de suas informacoes.
plothist: esta funcao e responsavel pelo plot do histograma dos dados e do shitograma normalizado; sobre ele sao expressao informacoes como valor medio e intervalo de valores dentro da media +/- sigma. Essas dados sao obtidos atraves de uma interpolacao cubica, retornando-os a um vetor para posterior exibicao.
returnIndex: esta funcao retorna o indice de um vetor para qual seu valor seja igual a de um parametro fornecido.
drawLine: esta funcao desenha uma linha vertical sobre o grafico, identificando seu valor do par coordenado.
histograma: esta funcao faz a chamada de todas as outras funcoes para gerar o histograma da imagens.
@author: <NAME> & <NAME>
Laboratorio Nacional de Astrofisica, Brazil.
"""
__version__ = "1.0"
__copyright__ = """
Copyright (c) ... All rights reserved.
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import returnMax
import copy
from scipy.stats import mode
from scipy.interpolate import interp1d
from algarismoSig import algarismoSig
import astropy.io.fits as fits
import RNhistograma
listImage = ['DC_00.fits','DC_10.fits','DC_14.fits']
imagens=[]
for img in listImage:
imagens.append(fits.getdata(img)[0])
plt.figure(figsize=(10,10))
mean, median, std, stdAbs, value, base, x, y = RNhistograma.calcHistograma(imagens[1])
RNhistograma.plothist(base, value, x, y, mean, median, std, stdAbs)
plt.show()
|
StarcoderdataPython
|
1879829
|
#This problem was asked by Amazon.[EASY}
#Given n numbers, find the greatest common denominator between them.
#For example, given the numbers [42, 56, 14], return 14.
#Solution using numpy
import numpy as np
A = [14,56,42]
print(np.gcd.reduce(A))
#Solution: Scaling for any number of input values
n = int(input('HOW MANY NUMBERS YOU WANT TO CALCULATE GCD?: '))
a = list(map(int,input('ENTER THE NUMBER TO COMPUTE GCD: ').strip().split()))[:n]
def compute_gcd(num1,num2):
x = 1
while x:
if max(num1,num2) % min(num1,num2) == 0:
return min(num1,num2)
x = 0
else :
r = max(num1,num2)%min(num1,num2)
return compute_gcd(max(num1,num2),r)
while True:
a[0] = compute_gcd(a[0],a[1])
a.pop(1)
if len(set(a))>2:
a.pop(2)
if len(set(a)) == 1:
break
a = set(a)
print(f"GCD OF {n} NUMBERS IS {a}")
#Solution by Euclidean algo with loops for more than 2 numbers
def compute_gcd(x, y):
while(y):
x, y = y, x % y
return x
# Driver Code
l = [ 42, 56, 14]
num1 = l[0]
num2 = l[1]
gcd = compute_gcd(num1, num2)
for i in range(2, len(l)):
gcd = compute_gcd(gcd, l[i])
print(f"the gcd among the numbers {l} is {gcd}")
#Solution using functools:
import functools as f
A = [56, 42, 14]
g = lambda a,b:a if b==0 else g(b,a%b) #Gcd for two numbers
print(f.reduce(lambda x,y:g(x,y),A)) #Calling gcd function throughout the list.
# Solution using maths in fixed set of numbers:
import math
A=[56,42,14]
b=A[0]
for j in range(1,len(A)):
s=math.gcd(b,A[j])
b=s
print(f'GCD of array elements is {b}.')
# Solution using maths.gcd for n numbers
import math
def compute_gcd(lst):
if len(lst) == 0: # trivial case
return -1
while len(lst) > 1:
a = lst.pop()
b = lst.pop()
c = math.gcd(a,b) if a >= b else math.gcd(b, a)
lst.append(c)
return lst.pop()
def test_gcd():
assert compute_gcd([42, 56, 14]) == 14
assert compute_gcd([3, 6]) == 3
assert compute_gcd([1]) == 1
assert compute_gcd([]) == -1
if __name__ == "__main__":
test_gcd()
|
StarcoderdataPython
|
5092868
|
<gh_stars>1-10
# Copyright 2020 Hamal, Inc.
|
StarcoderdataPython
|
3215789
|
# CamJam EduKit 3 - Robotics
# Worksheet 7 - Controlling the motors with PWM
import time # Import the Time library
from gpiozero import CamJamKitRobot # Import the CamJam GPIO Zero Library
robot = CamJamKitRobot()
# Set the relative speeds of the two motors, between 0.0 and 1.0
motorspeed = 0.5
motorforward = (motorspeed, motorspeed)
motorbackward = (-motorspeed, -motorspeed)
motorleft = (motorspeed, 0)
motorright = (0, motorspeed)
robot.value = motorforward
time.sleep(1)
robot.value = motorbackward
time.sleep(1) # Pause for 1 second
robot.value = motorleft
time.sleep(1) # Pause for 1 second
robot.value = motorright
time.sleep(1) # Pause for 1 second
robot.stop()
|
StarcoderdataPython
|
5101645
|
class MetricInterface(object):
ID = 'NONE'
def __init__(self, **kwargs):
pass
def evaluate(self, individual, framework):
raise NotImplementedError("Function evaluateIndividual has to be inplemented!")
@staticmethod
def getMetricByName(name='NONE'):
stack = [MetricInterface]
while stack:
cls = stack.pop(0)
if cls.__name__ == name:
return cls
stack.extend(cls.__subclasses__())
raise Exception("Couldn't find class with name: " + name)
|
StarcoderdataPython
|
12834551
|
import copy
import crcmod
from opendbc.can.can_define import CANDefine
from selfdrive.car.tesla.values import CANBUS
class TeslaCAN:
def __init__(self, dbc_name, packer):
self.can_define = CANDefine(dbc_name)
self.packer = packer
self.crc = crcmod.mkCrcFun(0x11d, initCrc=0x00, rev=False, xorOut=0xff)
@staticmethod
def checksum(msg_id, dat):
# TODO: get message ID from name instead
ret = (msg_id & 0xFF) + ((msg_id >> 8) & 0xFF)
ret += sum(dat)
return ret & 0xFF
def create_steering_control(self, angle, enabled, frame):
values = {
"DAS_steeringAngleRequest": -angle,
"DAS_steeringHapticRequest": 0,
"DAS_steeringControlType": 1 if enabled else 0,
"DAS_steeringControlCounter": (frame % 16),
}
data = self.packer.make_can_msg("DAS_steeringControl", CANBUS.chassis, values)[2]
values["DAS_steeringControlChecksum"] = self.checksum(0x488, data[:3])
return self.packer.make_can_msg("DAS_steeringControl", CANBUS.chassis, values)
def create_action_request(self, msg_stw_actn_req, cancel, bus, counter):
values = copy.copy(msg_stw_actn_req)
if cancel:
values["SpdCtrlLvr_Stat"] = 1
values["MC_STW_ACTN_RQ"] = counter
data = self.packer.make_can_msg("STW_ACTN_RQ", bus, values)[2]
values["CRC_STW_ACTN_RQ"] = self.crc(data[:7])
return self.packer.make_can_msg("STW_ACTN_RQ", bus, values)
|
StarcoderdataPython
|
11265867
|
class EventParser:
def __init__():
pass
def Parse(self, packet):
if packet[0] == 'E':
print("Hurrah")
class Event:
def __init__(command, args):
pass
|
StarcoderdataPython
|
3332263
|
<filename>src/tests/test_node.py
import unittest
from gerel.genome.edge import Edge
from gerel.genome.node import Node
import itertools
class TestNodeClass(unittest.TestCase):
def setUp(self):
# reset innovation number
Node.innov_iter = itertools.count()
Edge.innov_iter = itertools.count()
Node.registry = {}
Edge.registry = {}
def test_node_init(self):
n1 = Node(0, 0, 0)
n2 = Node(0, 1, 0)
n3 = Node(1, 0, 0)
n4 = Node(1, 1, 0)
# Check correct nodes
self.assertEqual(n1.innov, 0)
self.assertEqual(n2.innov, 1)
self.assertEqual(n3.innov, 2)
self.assertEqual(n4.innov, 3)
def test_node_copy(self):
n = Node(0, 0, 0)
m = Node.copy(n)
self.assertNotEqual(n, m)
self.assertEqual(n.innov, m.innov)
self.assertEqual(n.layer_num, m.layer_num)
self.assertEqual(n.layer_ind, m.layer_ind)
|
StarcoderdataPython
|
6514772
|
<filename>skadi/__init__.py<gh_stars>10-100
from __future__ import absolute_import
import collections as c
import copy
import io as _io
import itertools as it
import math
def enum(**enums):
_enum = type('Enum', (), enums)
_enum.tuples = enums
return _enum
Peek = c.namedtuple('Peek', 'tick, kind, tell, size, compressed')
from skadi import *
from skadi.engine import string_table as stab
from skadi.engine.dt import prop as dt_p
from skadi.engine.dt import send as dt_s
from skadi.index.demo import prologue as id_prologue
from skadi.index.embed import packet as ie_packet
from skadi.index.embed import send_tables as ie_send_tables
from skadi.io.protobuf import demo as d_io
from skadi.io.protobuf import packet as p_io
from skadi.io.unpacker import string_table as u_st
from skadi.protoc import demo_pb2 as pb_d
from skadi.protoc import netmessages_pb2 as pb_n
try:
from skadi.io import cBitstream as b_io
except ImportError:
from skadi.io import bitstream as b_io
try:
from skadi.engine.dt import cRecv as dt_r
except ImportError:
from skadi.engine.dt import recv as dt_r
Meta = c.namedtuple('Meta', ['file_header', 'server_info', 'voice_init'])
FileHeader = c.namedtuple('FileHeader', [
'demo_file_stamp', 'network_protocol', 'server_name', 'client_name',
'map_name', 'game_directory', 'fullpackets_version'
])
ServerInfo = c.namedtuple('ServerInfo', [
'protocol', 'server_count', 'is_dedicated', 'is_hltv', 'c_os', 'map_crc',
'client_crc', 'string_table_crc', 'max_clients', 'max_classes',
'player_slot', 'tick_interval', 'game_dir', 'map_name', 'sky_name',
'host_name'
])
VoiceInit = c.namedtuple('VoiceInit', ['quality', 'codec'])
Prologue = c.namedtuple('Prologue', [
'meta', 'recv_tables', 'string_tables', 'game_event_list', 'class_bits'
])
test_needs_decoder = lambda st: st.needs_decoder
class InvalidDemo(RuntimeError):
pass
def load(io, tick=0):
demo_io = d_io.construct(io)
prologue = id_prologue.construct(demo_io)
# mash all packet svc messages together, then index them
signon_packets = list(prologue.all_dem_signon_packet)
data = ''.join([pb.data for _, pb in signon_packets])
packet = ie_packet.construct(p_io.construct(data))
# meta: file header
_, pbmsg = prologue.dem_file_header
file_header = FileHeader(*[getattr(pbmsg, a) for a in FileHeader._fields])
# meta: server info
_, pbmsg = packet.svc_server_info
server_info = ServerInfo(*[getattr(pbmsg, a) for a in ServerInfo._fields])
# meta: voice init
_, pbmsg = packet.svc_voice_init
voice_init = VoiceInit(*[getattr(pbmsg, a) for a in VoiceInit._fields])
# prologue: meta
meta = Meta(file_header, server_info, voice_init)
# prologue: send tables
_, pbmsg = prologue.dem_send_tables
_send_tables = ie_send_tables.construct(p_io.construct(pbmsg.data))
send_tables = c.OrderedDict()
for pbmsg in [pb for _, pb in _send_tables.all_svc_send_table]:
if pbmsg.is_end:
break
send_table = _parse_cdemo_send_table(pbmsg)
send_tables[send_table.dt] = send_table
# prologue: recv tables
flattener = Flattener(send_tables)
recv_tables = c.OrderedDict()
_, pbmsg = prologue.dem_class_info
class_info = c.OrderedDict()
for cls in pbmsg.classes:
_id, dt, name = str(cls.class_id), cls.table_name, cls.network_name
class_info[_id] = (dt, name)
for st in filter(test_needs_decoder, send_tables.values()):
props = flattener.flatten(st)
cls = next(_id for _id, (dt, _) in class_info.items() if dt == st.dt)
recv_tables[cls] = dt_r.construct(st.dt, props)
# prologue: string tables
pbmsgs = [pb for _, pb in packet.all_svc_create_string_table]
string_tables = _parse_all_csvc_create_string_tables(pbmsgs)
# prologue: game event list
_, pbmsg = packet.svc_game_event_list
game_event_list = c.OrderedDict()
for desc in pbmsg.descriptors:
_id, name = desc.eventid, desc.name
keys = [(k.type, k.name) for k in desc.keys]
game_event_list[_id] = (name, keys)
# prologue: class bits
class_bits = server_info.max_classes.bit_length()
return Prologue(meta, recv_tables, string_tables, game_event_list, class_bits)
def _parse_cdemo_send_table(pbmsg):
dt, props = pbmsg.net_table_name, []
for p in pbmsg.props:
attributes = {
'var_name': p.var_name,
'type': p.type,
'flags': p.flags,
'num_elements': p.num_elements,
'num_bits': p.num_bits,
'dt_name': p.dt_name,
'priority': p.priority,
'low_value': p.low_value,
'high_value': p.high_value
}
props.append(dt_p.construct(dt, attributes))
# assign properties used for parsing array elements
for i, p in enumerate(props):
if p.type == dt_p.Type.Array:
p.array_prop = props[i - 1]
return dt_s.construct(dt, props, pbmsg.is_end, pbmsg.needs_decoder)
def _parse_all_csvc_create_string_tables(pbmsgs):
string_tables = c.OrderedDict()
for pbmsg in pbmsgs:
ne = pbmsg.num_entries
eb = int(math.ceil(math.log(pbmsg.max_entries, 2)))
sf = pbmsg.user_data_fixed_size
sb = pbmsg.user_data_size_bits
bs = b_io.construct(pbmsg.string_data)
entries = list(u_st.construct(bs, ne, eb, sf, sb))
name = pbmsg.name
string_tables[name] = stab.construct(name, eb, sf, sb, entries)
return string_tables
class Flattener(object):
def __init__(self, send_tables):
self.send_tables = send_tables
def flatten(self, st):
aggregate = []
exclusions = self._aggregate_exclusions(st)
self._build(st, aggregate, exclusions, [])
return aggregate
def _build(self, st, aggregate, exclusions, props, proxy_for=None):
self._compile(st, aggregate, exclusions, props)
for p in props:
if proxy_for:
_p = copy.copy(p)
_p.var_name = '{}.{}'.format(p.origin_dt, p.var_name).encode('UTF-8')
_p.origin_dt = proxy_for
else:
_p = p
aggregate.append(_p)
def _compile(self, st, aggregate, exclusions, props):
def test_excluded(p):
return
for p in st.props:
excluded = (st.dt, p.var_name) in exclusions
ineligible = p.flags & (dt_p.Flag.Exclude | dt_p.Flag.InsideArray)
if excluded or ineligible:
continue
if p.type == dt_p.Type.DataTable:
_st = self.send_tables[p.dt_name]
if dt_p.test_collapsible(p):
self._compile(_st, aggregate, exclusions, props)
else:
self._build(_st, aggregate, exclusions, [], proxy_for=p.origin_dt)
else:
props.append(p)
def _aggregate_exclusions(self, st):
def recurse(_dt_prop):
st = self.send_tables[_dt_prop.dt_name]
return self._aggregate_exclusions(st)
inherited = map(recurse, st.dt_props)
return st.exclusions + list(it.chain(*inherited))
|
StarcoderdataPython
|
1610776
|
from opencmiss.zinc.context import Context
from opencmiss.zinc.material import Material
from mapclientplugins.lungmodelstep.model.meshmodel import MeshModel
class LungModel(object):
def __init__(self):
self._context = Context("LungModelView")
self._logger = self._context.getLogger()
self._initialize()
self._leftRegion = self.setRegion('leftregion')
self._rightRegion = self.setRegion('rightregion')
self._meshModel = MeshModel(self._leftRegion, self._rightRegion, self._materialModule)
def getContext(self):
return self._context
def setRegion(self, name):
region = self._context.getDefaultRegion().createChild(name)
return region
def _initialize(self):
tess = self._context.getTessellationmodule().getDefaultTessellation()
tess.setRefinementFactors(12)
self._materialModule = self._context.getMaterialmodule()
self._materialModule.defineStandardMaterials()
solidBlue = self._materialModule.createMaterial()
solidBlue.setName('solidBlue')
solidBlue.setManaged(True)
solidBlue.setAttributeReal3(Material.ATTRIBUTE_AMBIENT, [0.0, 0.2, 0.6])
solidBlue.setAttributeReal3(Material.ATTRIBUTE_DIFFUSE, [0.0, 0.7, 1.0])
solidBlue.setAttributeReal3(Material.ATTRIBUTE_EMISSION, [0.0, 0.0, 0.0])
solidBlue.setAttributeReal3(Material.ATTRIBUTE_SPECULAR, [0.1, 0.1, 0.1])
solidBlue.setAttributeReal(Material.ATTRIBUTE_SHININESS, 0.2)
transBlue = self._materialModule.createMaterial()
transBlue.setName('transBlue')
transBlue.setManaged(True)
transBlue.setAttributeReal3(Material.ATTRIBUTE_AMBIENT, [0.0, 0.2, 0.6])
transBlue.setAttributeReal3(Material.ATTRIBUTE_DIFFUSE, [0.0, 0.7, 1.0])
transBlue.setAttributeReal3(Material.ATTRIBUTE_EMISSION, [0.0, 0.0, 0.0])
transBlue.setAttributeReal3(Material.ATTRIBUTE_SPECULAR, [0.1, 0.1, 0.1])
transBlue.setAttributeReal(Material.ATTRIBUTE_ALPHA, 0.3)
transBlue.setAttributeReal(Material.ATTRIBUTE_SHININESS, 0.2)
solidTissue = self._materialModule.createMaterial()
solidTissue.setName('solidTissue')
solidTissue.setManaged(True)
solidTissue.setAttributeReal3(Material.ATTRIBUTE_AMBIENT, [0.9, 0.7, 0.5])
solidTissue.setAttributeReal3(Material.ATTRIBUTE_DIFFUSE, [0.9, 0.7, 0.5])
solidTissue.setAttributeReal3(Material.ATTRIBUTE_EMISSION, [0.0, 0.0, 0.0])
solidTissue.setAttributeReal3(Material.ATTRIBUTE_SPECULAR, [0.2, 0.2, 0.3])
solidTissue.setAttributeReal(Material.ATTRIBUTE_ALPHA, 1.0)
solidTissue.setAttributeReal(Material.ATTRIBUTE_SHININESS, 0.2)
glyphmodule = self._context.getGlyphmodule()
glyphmodule.defineStandardGlyphs()
def getScene(self):
return self._leftRegion.getScene(), self._rightRegion.getScene()
def getMeshModel(self):
return self._meshModel
|
StarcoderdataPython
|
6629331
|
<gh_stars>0
"""
APIQuery should start by checking if a cached completion exists. If it doesn't-
prompt the API for a completion, cache it and use it.
"""
import openai, os, json, re, IO.io as io
from encoder.encoder import get_encoder
MAX_TOKENS = 2048;
with open('config.json') as configFile:
config = json.loads(configFile.read());
openai.api_key = config['OPENAI_API_KEY'];
encoder = get_encoder();
def clamp(num, min_value, max_value): # https://www.tutorialspoint.com/How-to-clamp-floating-numbers-in-Pythons
return max(min(num, max_value), min_value);
def tokenizePrompt(prompt):
return encoder.encode(prompt);
def handleCompletion(completion): # Get the text of a completion and prepare it
assert completion and completion != '', 'Unable to handle no/blank completion';
completionText = completion['choices'][0]['text']; # Get the text from the first (Best) completion choice
completionText = re.sub(r'^\s+', '', completionText); # Remove the new lines from the start of the text
return completionText;
def getCachedCompletions():
if (os.path.exists('completionsCache.json')):
with open('completionsCache.json', 'r') as cacheFile:
try:
completions = json.loads(cacheFile.read());
return completions;
except (json.JSONDecodeError):
return {};
return {};
def getCachedCompletion(prompt):
completions = getCachedCompletions();
if (prompt in completions):
return completions[prompt];
def cacheCompletion(prompt, completion):
completions = getCachedCompletions();
completions[prompt] = completion;
with open('completionsCache.json', 'w') as cacheFile:
cacheFile.write(json.dumps(completions));
def promptGPT3(prompt, APIEngine, maxTokens):
cachedCompletion = getCachedCompletion(prompt);
if (cachedCompletion):
io.out(handleCompletion(cachedCompletion));
return;
tokens = tokenizePrompt(prompt);
completion = openai.Completion.create(
engine = APIEngine,
prompt = prompt,
temperature = 0.65,
max_tokens = clamp(MAX_TOKENS - len(tokens), 1, maxTokens)
);
if (completion and 'choices' in completion):
cacheCompletion(prompt, completion);
io.out(handleCompletion(completion));
return;
io.out('Sorry. I don\'t know that one.');
def APIQuery(query, APIEngine, maxTokens):
promptGPT3(query, APIEngine, maxTokens);
|
StarcoderdataPython
|
8085536
|
# Define imports
import pygame
from pygame import *
import random
# Define helpers
from GameMenu import Menu
from GameModel import Model
from GameView import View
from GameController import Controller
if __name__ == "__main__":
# Initialize pygame
pygame.init()
# Initialize menu
menu = Menu()
# Set number of players selected in menu
num_players = menu.get_player_select()
# Initialize the player list
player_list = {}
# Create the players
for player in range(1, num_players + 1):
# Assign a random color to each player
color = (random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))
player_list[player] = color
# Create a new game model with previous number of players and list of players
model = Model(num_players, player_list)
# Create a new game view with previous model
view = View(model)
# Create a new game controller with previous view
controller = Controller(view)
# Play the game
controller.play()
|
StarcoderdataPython
|
6630746
|
<reponame>renhongl/electron
#!/usr/bin/env python
import argparse
import os
import subprocess
import sys
from lib.config import LIBCHROMIUMCONTENT_COMMIT, BASE_URL, PLATFORM, \
enable_verbose_mode, is_verbose_mode, get_target_arch
from lib.util import execute_stdout, get_atom_shell_version, scoped_cwd
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
VENDOR_DIR = os.path.join(SOURCE_ROOT, 'vendor')
PYTHON_26_URL = 'https://chromium.googlesource.com/chromium/deps/python_26'
if os.environ.has_key('CI'):
NPM = os.path.join(SOURCE_ROOT, 'node_modules', '.bin', 'npm')
else:
NPM = 'npm'
if sys.platform in ['win32', 'cygwin']:
NPM += '.cmd'
def main():
os.chdir(SOURCE_ROOT)
args = parse_args()
if not args.yes and PLATFORM != 'win32':
check_root()
if args.verbose:
enable_verbose_mode()
if sys.platform == 'cygwin':
update_win32_python()
if PLATFORM != 'win32':
update_clang()
update_submodules()
setup_python_libs()
update_node_modules('.')
bootstrap_brightray(args.dev, args.url, args.target_arch,
args.libcc_source_path, args.libcc_shared_library_path,
args.libcc_static_library_path)
if PLATFORM == 'linux':
download_sysroot(args.target_arch)
create_chrome_version_h()
touch_config_gypi()
run_update()
update_electron_modules('spec', args.target_arch)
def parse_args():
parser = argparse.ArgumentParser(description='Bootstrap this project')
parser.add_argument('-u', '--url',
help='The base URL from which to download '
'libchromiumcontent (i.e., the URL you passed to '
'libchromiumcontent\'s script/upload script',
default=BASE_URL,
required=False)
parser.add_argument('-v', '--verbose',
action='store_true',
help='Prints the output of the subprocesses')
parser.add_argument('-d', '--dev', action='store_true',
help='Do not download static_library build')
parser.add_argument('-y', '--yes', '--assume-yes',
action='store_true',
help='Run non-interactively by assuming "yes" to all ' \
'prompts.')
parser.add_argument('--target_arch', default=get_target_arch(),
help='Manually specify the arch to build for')
parser.add_argument('--libcc_source_path', required=False,
help='The source path of libchromiumcontent. ' \
'NOTE: All options of libchromiumcontent are ' \
'required OR let electron choose it')
parser.add_argument('--libcc_shared_library_path', required=False,
help='The shared library path of libchromiumcontent.')
parser.add_argument('--libcc_static_library_path', required=False,
help='The static library path of libchromiumcontent.')
return parser.parse_args()
def check_root():
if os.geteuid() == 0:
print "We suggest not running this as root, unless you're really sure."
choice = raw_input("Do you want to continue? [y/N]: ")
if choice not in ('y', 'Y'):
sys.exit(0)
def update_submodules():
execute_stdout(['git', 'submodule', 'sync'])
execute_stdout(['git', 'submodule', 'update', '--init', '--recursive'])
def setup_python_libs():
for lib in ('requests', 'boto'):
with scoped_cwd(os.path.join(VENDOR_DIR, lib)):
execute_stdout([sys.executable, 'setup.py', 'build'])
def bootstrap_brightray(is_dev, url, target_arch, libcc_source_path,
libcc_shared_library_path,
libcc_static_library_path):
bootstrap = os.path.join(VENDOR_DIR, 'brightray', 'script', 'bootstrap')
args = [
'--commit', LIBCHROMIUMCONTENT_COMMIT,
'--target_arch', target_arch,
url
]
if is_dev:
args = ['--dev'] + args
if (libcc_source_path != None and
libcc_shared_library_path != None and
libcc_static_library_path != None):
args += ['--libcc_source_path', libcc_source_path,
'--libcc_shared_library_path', libcc_shared_library_path,
'--libcc_static_library_path', libcc_static_library_path]
execute_stdout([sys.executable, bootstrap] + args)
def update_node_modules(dirname, env=None):
if env is None:
env = os.environ
if PLATFORM == 'linux':
# Use prebuilt clang for building native modules.
llvm_dir = os.path.join(SOURCE_ROOT, 'vendor', 'llvm-build',
'Release+Asserts', 'bin')
env['CC'] = os.path.join(llvm_dir, 'clang')
env['CXX'] = os.path.join(llvm_dir, 'clang++')
env['npm_config_clang'] = '1'
with scoped_cwd(dirname):
args = [NPM, 'install']
if is_verbose_mode():
args += ['--verbose']
# Ignore npm install errors when running in CI.
if os.environ.has_key('CI'):
try:
execute_stdout(args, env)
except subprocess.CalledProcessError:
pass
else:
execute_stdout(args, env)
def update_electron_modules(dirname, target_arch):
env = os.environ.copy()
env['npm_config_arch'] = target_arch
env['npm_config_target'] = get_atom_shell_version()
env['npm_config_disturl'] = 'https://atom.io/download/atom-shell'
update_node_modules(dirname, env)
def update_win32_python():
with scoped_cwd(VENDOR_DIR):
if not os.path.exists('python_26'):
execute_stdout(['git', 'clone', PYTHON_26_URL])
def update_clang():
execute_stdout([os.path.join(SOURCE_ROOT, 'script', 'update-clang.sh')])
def download_sysroot(target_arch):
if target_arch == 'ia32':
target_arch = 'i386'
if target_arch == 'x64':
target_arch = 'amd64'
execute_stdout([sys.executable,
os.path.join(SOURCE_ROOT, 'script', 'install-sysroot.py'),
'--arch', target_arch])
def create_chrome_version_h():
version_file = os.path.join(SOURCE_ROOT, 'vendor', 'brightray', 'vendor',
'libchromiumcontent', 'VERSION')
target_file = os.path.join(SOURCE_ROOT, 'atom', 'common', 'chrome_version.h')
template_file = os.path.join(SOURCE_ROOT, 'script', 'chrome_version.h.in')
with open(version_file, 'r') as f:
version = f.read()
with open(template_file, 'r') as f:
template = f.read()
content = template.replace('{PLACEHOLDER}', version.strip())
# We update the file only if the content has changed (ignoring line ending
# differences).
should_write = True
if os.path.isfile(target_file):
with open(target_file, 'r') as f:
should_write = f.read().replace('r', '') != content.replace('r', '')
if should_write:
with open(target_file, 'w') as f:
f.write(content)
def touch_config_gypi():
config_gypi = os.path.join(SOURCE_ROOT, 'vendor', 'node', 'config.gypi')
with open(config_gypi, 'w+') as f:
content = "\n{'variables':{}}"
if f.read() != content:
f.write(content)
def run_update():
update = os.path.join(SOURCE_ROOT, 'script', 'update.py')
execute_stdout([sys.executable, update])
if __name__ == '__main__':
sys.exit(main())
|
StarcoderdataPython
|
11390299
|
<reponame>ok97465/spyder
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
Working Directory widget.
"""
# Standard library imports
import os
import os.path as osp
# Third party imports
from qtpy.compat import getexistingdirectory
from qtpy.QtCore import Signal, Slot
# Local imports
from spyder.api.config.decorators import on_conf_change
from spyder.api.translations import get_translation
from spyder.api.widgets.main_container import PluginMainContainer
from spyder.api.widgets.toolbars import ApplicationToolbar
from spyder.config.base import get_home_dir
from spyder.utils.misc import getcwd_or_home
from spyder.widgets.comboboxes import PathComboBox
# Localization
_ = get_translation('spyder')
# --- Constants
# ----------------------------------------------------------------------------
class WorkingDirectoryActions:
Previous = 'previous_action'
Next = "next_action"
Browse = "browse_action"
Parent = "parent_action"
class WorkingDirectoryToolbarSections:
Main = "main_section"
# --- Widgets
# ----------------------------------------------------------------------------
class WorkingDirectoryToolbar(ApplicationToolbar):
ID = 'working_directory_toolbar'
# --- Container
# ----------------------------------------------------------------------------
class WorkingDirectoryContainer(PluginMainContainer):
"""Container for the working directory toolbar."""
# Signals
sig_current_directory_changed = Signal(str)
"""
This signal is emitted when the current directory has changed.
Parameters
----------
new_working_directory: str
The new new working directory path.
"""
# ---- PluginMainContainer API
# ------------------------------------------------------------------------
def setup(self):
# Variables
self.history = self.get_conf('history', [])
self.histindex = None
# Widgets
title = _('Current working directory')
self.toolbar = WorkingDirectoryToolbar(self, title)
self.pathedit = PathComboBox(
self,
adjust_to_contents=self.get_conf('working_dir_adjusttocontents'),
)
# Widget Setup
self.toolbar.setWindowTitle(title)
self.toolbar.setObjectName(title)
self.pathedit.setToolTip(
_(
"This is the working directory for newly\n"
"opened IPython consoles, for the Files\n"
"and Find panes and for new files\n"
"created in the editor"
)
)
self.pathedit.setMaxCount(self.get_conf('working_dir_history'))
self.pathedit.selected_text = self.pathedit.currentText()
# Signals
self.pathedit.open_dir.connect(self.chdir)
self.pathedit.activated[str].connect(self.chdir)
# Actions
self.previous_action = self.create_action(
WorkingDirectoryActions.Previous,
text=_('Back'),
tip=_('Back'),
icon=self.create_icon('previous'),
triggered=self.previous_directory,
)
self.next_action = self.create_action(
WorkingDirectoryActions.Next,
text=_('Next'),
tip=_('Next'),
icon=self.create_icon('next'),
triggered=self.next_directory,
)
browse_action = self.create_action(
WorkingDirectoryActions.Browse,
text=_('Browse a working directory'),
tip=_('Browse a working directory'),
icon=self.create_icon('DirOpenIcon'),
triggered=self.select_directory,
)
parent_action = self.create_action(
WorkingDirectoryActions.Parent,
text=_('Change to parent directory'),
tip=_('Change to parent directory'),
icon=self.create_icon('up'),
triggered=self.parent_directory,
)
for item in [self.previous_action, self.next_action, self.pathedit,
browse_action, parent_action]:
self.add_item_to_toolbar(
item,
self.toolbar,
section=WorkingDirectoryToolbarSections.Main,
)
def update_actions(self):
self.previous_action.setEnabled(
self.histindex is not None and self.histindex > 0)
self.next_action.setEnabled(
self.histindex is not None
and self.histindex < len(self.history) - 1
)
@on_conf_change(option='history')
def on_history_update(self, value):
self.history = value
# --- API
# ------------------------------------------------------------------------
def get_workdir(self):
"""
Get the working directory from our config system or return the user
home directory if none could be found.
Returns
-------
str:
The current working directory.
"""
if self.get_conf('startup/use_fixed_directory', ''):
workdir = self.get_conf('startup/fixed_directory')
elif self.get_conf('console/use_project_or_home_directory', ''):
workdir = get_home_dir()
else:
workdir = self.get_conf('console/fixed_directory', '')
if not osp.isdir(workdir):
workdir = get_home_dir()
return workdir
@Slot()
def select_directory(self, directory=None):
"""
Select working directory.
Parameters
----------
directory: str, optional
The directory to change to.
Notes
-----
If directory is None, a get directory dialog will be used.
"""
if directory is None:
self.sig_redirect_stdio_requested.emit(False)
directory = getexistingdirectory(
self,
_("Select directory"),
getcwd_or_home(),
)
self.sig_redirect_stdio_requested.emit(True)
if directory:
self.chdir(directory)
@Slot()
def previous_directory(self):
"""Select the previous directory."""
self.histindex -= 1
self.chdir(directory='', browsing_history=True)
@Slot()
def next_directory(self):
"""Select the next directory."""
self.histindex += 1
self.chdir(directory='', browsing_history=True)
@Slot()
def parent_directory(self):
"""Change working directory to parent one."""
self.chdir(osp.join(getcwd_or_home(), osp.pardir))
@Slot(str)
@Slot(str, bool)
@Slot(str, bool, bool)
def chdir(self, directory, browsing_history=False, emit=True):
"""
Set `directory` as working directory.
Parameters
----------
directory: str
The new working directory.
browsing_history: bool, optional
Add the new `directory` to the browsing history. Default is False.
emit: bool, optional
Emit a signal when changing the working directory.
Default is True.
"""
if directory:
directory = osp.abspath(str(directory))
# Working directory history management
if browsing_history:
directory = self.history[self.histindex]
elif directory in self.history:
self.histindex = self.history.index(directory)
else:
if self.histindex is None:
self.history = []
else:
self.history = self.history[:self.histindex + 1]
self.history.append(directory)
self.histindex = len(self.history) - 1
# Changing working directory
try:
os.chdir(directory)
self.pathedit.add_text(directory)
self.update_actions()
if emit:
self.sig_current_directory_changed.emit(directory)
except OSError:
self.history.pop(self.histindex)
def get_history(self):
"""
Get the current history list.
Returns
-------
list
List of string paths.
"""
return [str(self.pathedit.itemText(index)) for index
in range(self.pathedit.count())]
def set_history(self, history):
"""
Set the current history list.
Parameters
----------
history: list
List of string paths.
"""
self.set_conf('history', history)
if history:
self.pathedit.addItems(history)
if self.get_conf('workdir', None) is None:
workdir = self.get_workdir()
else:
workdir = self.get_conf('workdir')
self.chdir(workdir)
|
StarcoderdataPython
|
1611245
|
<reponame>youssefaz94/leshy<filename>src/resolvers/resolver.py
import logging as lg
from src.scrap.statusEnum import StateWorker
from src.resolvers.resolverFact import ResolverFact
_logger = lg.getLogger(__name__)
class Resolver:
def __init__(self, sources, worker_to_source_map, master, statuskeeper):
self._sources = sources
self._workerStateSource = {}
self._resolver_fact = None
self._master = master
self._statuskeeper = statuskeeper
self.start(worker_to_source_map)
def start(self, worker_to_source_map):
self._resolver_fact = ResolverFact(resolver = self, master= self._master, statuskeeper= self._statuskeeper)
for _worker, _source in worker_to_source_map.items():
self._workerStateSource[_worker] = {"failed": [], "current":_source, "pending": list(filter( lambda x: x != _source, self._sources))}
def resolve(self, worker, failure: Exception):
return self._resolver_fact.get_resolver(failure).resolve(worker)
def killWorker(self, worker):
self._workerStateSource[worker]["failed"] = self._workerStateSource[worker]["current"]
self._workerStateSource[worker]["current"] = None
def switchSource(self, worker):
self._workerStateSource[worker]["failed"] = self._workerStateSource[worker]["current"]
new_source = self._workerStateSource[worker]["current"]["pending"].pop()
self._workerStateSource[worker]["current"] = new_source
self._master.setWorkerSource(worker, new_source)
return
|
StarcoderdataPython
|
6564830
|
import contextlib
import csv
import pprint
import sys
from datetime import datetime
from nesteddict import NestedDict
import pymongo
class CursorFormatter(object):
'''
Output a set of cursor elements by iterating over then.
If root is a file name output the content to that file.
'''
def __init__(self, cursor, filename="", formatter="json", results=None):
'''
Data from cursor
output to <filename>suffix.ext.
'''
self._results = []
self._cursor = cursor
if (isinstance(cursor, pymongo.cursor.Cursor) or
isinstance(cursor, pymongo.command_cursor.CommandCursor)):
self._format = formatter
self._filename = filename
if results:
self._results = results
else:
raise ValueError("aggregate argument to CursorFormatter is not of class pymongo cursor")
def results(self):
return self._results
@contextlib.contextmanager
def _smart_open(self, filename=None):
if filename and filename != '-':
fh = open(filename, 'w')
else:
fh = sys.stdout
try:
yield fh
finally:
if fh is not sys.stdout:
fh.close()
@staticmethod
def dateMapField(doc, field, time_format=None):
'''
Given a field that contains a datetime we want it to be output as a string otherwise
pprint and other functions will abondon ship when they meet BSON time objects
'''
if time_format is None:
time_format = "%d-%b-%Y %H:%M"
d = NestedDict(doc)
if field in d:
value = d[field]
if isinstance(value, datetime):
d[field] = value.strftime(time_format)
else:
d[field] = datetime.fromtimestamp(value/1000)
return dict(d)
@staticmethod
def fieldMapper(doc, fields):
"""
Take 'doc' and create a new doc using only keys from the 'fields' list.
Supports referencing fields using dotted notation "a.b.c" so we can parse
nested fields the way MongoDB does.
"""
if fields is None or len(fields) == 0:
return doc
new_doc = NestedDict()
old_doc = NestedDict(doc)
for i in fields:
if i in old_doc:
# print( "doc: %s" % doc )
# print( "i: %s" %i )
new_doc[i] = old_doc[i]
return dict(new_doc)
@staticmethod
def dateMapper(doc, date_map, time_format=None):
'''
For all the fields in "datemap" find that key in doc and map the datetime object to
a strftime string. This pprint and others will print out readable datetimes.
'''
if date_map:
for i in date_map:
if isinstance(i, datetime):
CursorFormatter.dateMapField(doc, i, time_format=time_format)
return doc
def printCSVCursor(self, c, fieldnames, datemap, time_format=None):
'''
Output CSV format. items are separated by commas. We only output the fields listed
in the 'fieldnames'. We datemap fields listed in 'datemap'. If a datemap listed field
is not a datetime object we will thow an exception.
'''
with self._smart_open(self._filename) as output:
writer = csv.DictWriter(output, fieldnames=fieldnames)
writer.writeheader()
count = 0
for i in c:
self._results.append(i)
count = count + 1
d = CursorFormatter.fieldMapper(i, fieldnames)
d = CursorFormatter.dateMapper(d, datemap, time_format)
writer.writerow(d)
return count
def printJSONCursor(self, c, fieldnames, datemap, time_format=None):
"""
Output plan json objects.
:param c: collection
:param fieldnames: fieldnames to include in output
:param datemap: fieldnames to map dates to date strings
:param time_format: field names to map to a specific time format
:return:
"""
count = 0
with self._smart_open(self._filename) as output:
for i in c:
# print( "processing: %s" % i )
# print( "fieldnames: %s" % fieldnames )
self._results.append(i)
d = CursorFormatter.fieldMapper(i, fieldnames)
# print( "processing fieldmapper: %s" % d )
d = CursorFormatter.dateMapper(d, datemap, time_format)
pprint.pprint(d, output)
count = count + 1
return count
def printCursor(self, c, fieldnames=None, datemap=None, time_format=None):
'''
Output a cursor to a filename or stdout if filename is "-".
fmt defines whether we output CSV or JSON.
'''
if self._format == 'csv':
count = self.printCSVCursor(c, fieldnames, datemap, time_format)
else:
count = self.printJSONCursor(c, fieldnames, datemap, time_format)
return count
def output(self, fieldNames=None, datemap=None, time_format=None, aggregate=True):
'''
Output all fields using the fieldNames list. for fields in the list datemap indicates the field must
be date
'''
count = self.printCursor(self._cursor, fieldNames, datemap, time_format)
|
StarcoderdataPython
|
5150480
|
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="rotten_tomatoes_cli_fork",
description="Rotten Tomatoes Command Line Tool",
long_description=long_description,
long_description_content_type="text/markdown",
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/jaebradley/rotten_tomatoes_cli",
version="0.0.3",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
python_requires=">=3.4",
install_requires=[
"Click>=6.7",
"rotten_tomatoes_client @ git+https://github.com/seanbreckenridge/rotten_tomatoes_client@master#egg=rotten_tomatoes_client",
"terminaltables>=3.1.0",
"termcolor>=1.1.0"
],
entry_points={
"console_scripts": [
"rotten= scripts.rotten:rotten"
],
},
keywords=[
"Movies",
"Rotten Tomatoes",
],
classifiers=[]
)
|
StarcoderdataPython
|
6405750
|
from clearml import Task
default_mlops_settings = {
'project_name': 'Default Project',
'task_name': 'default_task',
'connect_frameworks': {
'matplotlib': False,
'tensorflow': False,
'tensorboard': False,
'pytorch': False,
'xgboost': False,
'scikit': False,
'fastai': False,
'lightgbm': False,
'hydra': False
}
}
class MLOpsTask(object):
"""
This class instantiates encapsulates ClearML instance of MLOps task
-
"""
def __init__(
self,
settings
):
"""
This method initializes parameters
:return: None
"""
self.settings = settings
self._task = None
@property
def task(self):
if not self.settings["use_mlops"]:
return None
if self._task is not None:
return self._task
self._task = Task.init(project_name=self.settings["project_name"],
task_name=self.settings["task_name"],
auto_connect_frameworks=self.settings["connect_frameworks"])
return self._task
def log_configuration(self, config_dict, name):
self.task.connect(config_dict, name)
|
StarcoderdataPython
|
6632541
|
from django.http import HttpResponse, HttpResponseRedirect
import json
from django.core.mail import send_mail
from django.core.exceptions import ObjectDoesNotExist
from string import letters, digits
import random
from random import choice
from django.conf import settings
from grid_user.forms import CreateUserForm
import xlab.settings
from models import TempUser, User
from slipstream.user.account import UserAccount
import logging
from django.shortcuts import render
log = logging.getLogger("[GRID_USER]: ")
def ajax_checkusername(request):
datadict = {}
username = request.POST.get('username', '')
user = User.objects.filter(username=username)
tempuser = TempUser.objects.filter(username=username)
if user or tempuser:
datadict['available'] = False
datadict['username'] = username
else:
datadict['available'] = True
datadict['username'] = username
return HttpResponse(json.dumps(datadict), content_type="text/json")
def ajax_register_user(request):
if not request.is_ajax():
return HttpResponse(content="Invalid Request Method.", status=400)
form = CreateUserForm(request.POST)
if form.is_valid():
data = request.POST
email = data['email']
firstname = data['firstname']
lastname = data['lastname']
password = data['password']
username = data['username']
key = ''.join(choice(letters + digits) for i in range(64))
log.info('Key Created: %s' % key)
# Test for existing email / avatar name locally
test = ''
xtest = ''
try:
test = User.objects.get(firstname=firstname, lastname=lastname)
xtest += "Firstname and Lastname exists"
except ObjectDoesNotExist:
pass
try:
test = User.objects.get(email=email)
xtest += " Email exists"
except ObjectDoesNotExist:
pass
try:
test = User.objects.get(username=username)
xtest += " Username exists"
except ObjectDoesNotExist:
pass
x_user = UserAccount(
data['firstname'],
data['lastname'],
data['password'],
data['email'],
)
activation_server = settings.ACCOUNT_SERVER_ADDRESS
account_server = settings.ACCOUNT_SERVER_URL
from_address = settings.ACCOUNT_ADMIN_EMAIL
# Test for existing user on grid
if not x_user.test_account(account_server) or xtest != '':
#if xtest != '':
datadict = {'status': False}
datadict['err_message'] = 'Existing Account Of Same %s: Please register with different credentials'%xtest
return HttpResponse(json.dumps(datadict), content_type="text/json")
# Attempt to create a temporary user
# try:
# tmp_user = TempUser.objects.create_temp_user(
# data['email'], data['firstname'], data['lastname'],
# key, data['password']
# )
tmp_user =""
try:
tmp_user = form.save(commit=False)
tmp_user.activation_key = key
tmp_user.save()
except:
datadict = {'status': False}
datadict['err_message'] = 'Existing Account: Please register with different credentials'
return HttpResponse(json.dumps(datadict), content_type="text/json")
# activate_link = '%s:%s'%(request.META['SERVER_NAME'], request.META['SERVER_PORT'])
send_mail('Account activation link', 'Please use the link to activate your account: %s/activate?key=%s' %
(activation_server, key), from_address, [email])
datadict = {'status': True}
datadict['firstname'] = firstname
datadict['lastname'] = lastname
datadict['email'] = email
datadict['id'] = tmp_user.id
return HttpResponse(json.dumps(datadict), content_type="text/json")
else:
datadict = {'status': False, 'error': form.errors}
return HttpResponse(
content=json.dumps(datadict),
mimetype='application/json'
)
def ajax_accounttype_user(request):
if request.method == 'POST':
lastrid = request.POST['user_id']
user = TempUser.objects.get(id=lastrid)
user.accounttype = "basic membership"
user.save()
return HttpResponse(content_type="text/json", status=200)
def ajax_checkpassword(request):
datadict = {}
if not request.is_ajax():
return HttpResponse(content="Invalid Request Method.", status=400)
currentpass = request.POST.get('password', None)
try:
check = request.user.check_password(currentpass)
except:
check = False
if check:
datadict['status'] = True
else:
datadict['status'] = False
return HttpResponse(json.dumps(datadict), content_type="text/json")
|
StarcoderdataPython
|
3502569
|
import os
from tqdm import tqdm
import json
car_model_dir = '/data/Kaggle/pku-autonomous-driving/car_models_json'
obj_output_dir = '/data/Kaggle/pku-autonomous-driving/car_model_obj'
car_model_dict = {}
for car_name in tqdm(os.listdir(car_model_dir)):
with open(os.path.join(car_model_dir, car_name)) as json_file:
json_dict = json.load(json_file)
output_obj = os.path.join(obj_output_dir, car_name.replace('json', 'obj'))
with open(output_obj, 'w') as f:
f.write("# OBJ file\n")
for vertices in json_dict['vertices']:
f.write("v")
for v in vertices:
f.write(" %.4f" % v)
f.write('\n')
for faces in json_dict['faces']:
f.write("f")
for face in faces:
f.write(" %d" % (face + 1))
f.write("\n")
|
StarcoderdataPython
|
140492
|
import itertools
import time
import numpy as np
import scipy.ndimage as ndi
import pytest
from mrrt.utils import ImageGeometry, ellipse_im
from mrrt.mri import mri_exp_approx
__all__ = ["test_mri_exp_approx"]
def _test_mri_exp_approx1(
segments=4,
nx=64,
tmax=25e-3,
dt=5e-6,
autocorr=False,
use_rmap=True,
atype="hist,time,unif",
nhist=None,
ctest=True,
verbose=False,
tol=None,
):
if verbose:
from matplotlib import pyplot as plt
from pyvolplot import subplot_stack
ti = np.arange(0, tmax, dt)
if True:
# Generate a synthetic fieldmap
fmap = np.zeros((64, 64))
fmap[6:27, 9:20] = 90
fmap[36:57, 9:20] = 120
fmap[5:26, 29:60] = 30
fmap[37:58, 29:60] = 60
if nx != 64:
fmap = ndi.zoom(fmap, nx / 64, order=0)
kernel_size = int(np.round(5 * nx / 64))
smoothing_kernel = np.ones((kernel_size, kernel_size)) / (
kernel_size ** 2
)
ndi.convolve(fmap, smoothing_kernel, output=fmap)
fmap = fmap + 10
if verbose:
plt.figure()
plt.imshow(fmap, interpolation="nearest", cmap="gray")
plt.title("Field Map")
if use_rmap:
# generate a T2 relaxation map
rmap = (
np.asarray(
[[0, 0, 18, 23, 0, 20 * 64 / nx], [6, 0, 8, 8, 0, 3 * 64 / nx]]
)
* nx
/ 64
)
ig = ImageGeometry(shape=(nx, nx), fov=(nx, nx))
rmap, params = 1 * ellipse_im(ig, rmap, oversample=3)
if verbose:
plt.figure()
plt.imshow(rmap, cmap="gray", interpolation="nearest")
plt.title("Relax Map"),
else:
rmap = 0
zmap = rmap + (2j * np.pi) * fmap
if not nhist:
if not np.any(rmap > 0):
nhist = [40]
else:
nhist = [40, 10]
# autocorr_arg = ['autocorr', True] # test autocorrelation version
if True: # convert to single precision
ti = np.asarray(ti, dtype="float32")
zmap = np.asarray(zmap, dtype="complex64") # single precision complex
if isinstance(segments, int):
pass
elif isinstance(segments, (list, tuple)) and len(segments) == 2:
pass
else:
raise ValueError("Invalid choice for segments")
kwargs = {"autocorr": autocorr, "ctest": ctest, "verbose": verbose}
tstart = time.time()
if tol is None:
B, C, hk, zk = mri_exp_approx(
ti, zmap, segments, approx_type=(atype, nhist), **kwargs
)
else:
B, C, hk, zk = mri_exp_approx(
ti, zmap, [segments, tol], approx_type=(atype, nhist), **kwargs
)
print("\tduration=%g s" % (time.time() - tstart))
if ctest:
Eh = np.exp(-ti[:, np.newaxis] * zk.ravel()[np.newaxis, :])
else:
Eh = np.exp(-ti[:, np.newaxis] * zmap.ravel()[np.newaxis, :])
Ep = np.dot(B, C) # matrix product
err = np.abs(Eh - Ep)
mse = np.mean(np.power(err, 2), axis=0)
if ctest:
wrms = np.sqrt(np.dot(mse, hk) / np.sum(hk))
else:
wrms = -1
if verbose:
subplot_stack(1000 * ti, B, title="Basis Components", colors=["k", "m"])
nf = np.floor(nhist[0] / 4)
if len(nhist) == 2:
ik = np.array([0, nf, 2 * nf, 3 * nf, nhist[1] - 1]) + 2 * nf * nhist[1]
ik = ik.tolist()
elif len(nhist) == 1:
ik = [0, nf, 2 * nf, 3 * nf, nhist[0] - 1]
ik = np.asarray(ik, dtype=int)
mse_mean = mse.mean()
max_err = err.max()
if verbose:
fig = subplot_stack(1000 * ti, Eh[:, ik], colors=["g", "k"])
fig = subplot_stack(
1000 * ti,
Ep[:, ik],
colors=["b--", "r--"],
fig=fig,
title="True and Approx",
)
fig = subplot_stack(
1000 * ti,
err[:, ik],
colors=["b--", "r--"],
title="True and Approx",
)
print(
"\tfor L=%d, wrms=%g, mse = %g, max_err=%g"
% (B.shape[1], wrms, mse_mean, max_err)
)
return wrms, mse_mean, max_err
@pytest.mark.parametrize(
"use_rmap, alg",
itertools.product(
[False, True],
[
"hist,svd",
"hist,time,unif",
"time,unif",
"hist,fs,unif",
"hist,fs,prctile",
"hist,fs,kmeans",
],
),
)
@pytest.mark.filterwarnings("ignore:the matrix subclass is not")
def test_mri_exp_approx(use_rmap, alg, verbose=False):
if alg == ["hist,fs,kmeans"]:
pytest.importorskip("sklearn")
tmax = 25e-3 # overall time duration (s)
wrms, mse_mean, max_err = _test_mri_exp_approx1(
segments=8, # number of segments
nx=64,
tmax=tmax,
dt=1e-3,
autocorr=False,
use_rmap=use_rmap,
atype=alg,
nhist=None,
ctest=True,
verbose=verbose,
)
if alg == "hist,fs,prctile" or (use_rmap and alg == "hist,fs,kmeans"):
# may want to just remove these options, as they perform relatively
# poorly
assert mse_mean < 0.01
else:
assert mse_mean < 1e-5
|
StarcoderdataPython
|
3355570
|
# Run Grtrans with rrjet model
# The rrjet model is defined in "fluid_model_rrjet.py"
# NOTE -- currently the power law emissivity is very slow because paralleization is off
# First make grtrans with 'make'
# Then run this in python
import numpy as np
import grtrans_batch as gr
import matplotlib.pyplot as plt
import scipy.ndimage.filters as filt
ang=20.
name = 'rrjet'+str(ang)
mu = np.cos(ang*np.pi/180.)
size = 300.
uout = 1./(10*size)
npix = 100
ngeo = 5000
cmperMpc = 3.086e24
MBH = 6.7e9
DTOBH = 16.528*cmperMpc
RADPERUAS = np.pi/180./3600./1.e6
psize_rg = 2*size/npix
cmperrg = 147708.8 * MBH
psize_cm = psize_rg * cmperrg
psize_rad = psize_cm / DTOBH
psize_uas = psize_rad / RADPERUAS
pp= 2.001
RF = 43.e9
cfun = 'jet'
cfun2 = 'seismic'
RERUN = True
FNAME = 'grtrans_jet_compare.txt'
def main():
# run grtrans
x=gr.grtrans()
x.write_grtrans_inputs(name+'.in', oname=name+'.out',
fname='RRJET',phi0=0.,
betaeconst=1.e-4, ximax=10.,
nfreq=1,fmin=RF,fmax=RF,
gmin=10., gmax=1.e35, p2=pp, p1=pp,
#ename='SYNCHPL',
ename='POLSYNCHPL',
nvals=4, fpositron=0,
spin=0., standard=1,
uout=uout,
mbh=MBH,
#epcoefindx=[1,1,1,1,1,1,1],
#epcoefindx=[1,1,1,1,0,0,0],
mdotmin=1.57e15,mdotmax=1.57e15,nmdot=1,
nmu=1,mumin=mu,mumax=mu,
gridvals=[-size,size,-size,size],
nn=[npix,npix,ngeo],
hindf=1,hnt=1,
muval=1.)
if RERUN:
x.run_grtrans()
# load image
x.read_grtrans_output()
x.convert_to_Jy(DTOBH)
#grt_obj=x
save_grtrans_image(x)
display_grtrans_image(x)
def save_grtrans_image(grt_obj):
"""quick save, not ehtim compatible"""
I_im = grt_obj.ivals[:,0,0].reshape(npix,npix).flatten()
Q_im = grt_obj.ivals[:,1,0].reshape(npix,npix).flatten()
U_im = grt_obj.ivals[:,2,0].reshape(npix,npix).flatten()
V_im = grt_obj.ivals[:,3,0].reshape(npix,npix).flatten()
# convert to Tb
factor = 3.254e13/(RF**2 * psize_rad**2)
I_im *= factor
Q_im *= factor
U_im *= factor
V_im *= factor
x = np.array([[i for i in range(npix)] for j in range(npix)]).flatten()
y = np.array([[j for i in range(npix)] for j in range(npix)]).flatten()
x -= npix/2
y -= npix/2
x = x*psize_uas
y = y*psize_uas
outdat = np.vstack((x.T,y.T,I_im.T,Q_im.T,U_im.T,V_im.T)).T
np.savetxt('../rrjet_and_riaf/'+FNAME,outdat)
#np.savetxt('../rrjet_and_riaf/grtrans_jet_compare_positron_noconv.txt',outdat)
return
def display_grtrans_image(grt_obj,nvec=20,veccut=0.005,blur_kernel=1.25):
plt.close('all')
I_im = grt_obj.ivals[:,0,0].reshape(npix,npix)
Q_im = grt_obj.ivals[:,1,0].reshape(npix,npix)
U_im = grt_obj.ivals[:,2,0].reshape(npix,npix)
V_im = grt_obj.ivals[:,3,0].reshape(npix,npix)
I_im = filt.gaussian_filter(I_im, (blur_kernel, blur_kernel))
Q_im = filt.gaussian_filter(Q_im, (blur_kernel, blur_kernel))
U_im = filt.gaussian_filter(U_im, (blur_kernel, blur_kernel))
V_im = filt.gaussian_filter(V_im, (blur_kernel, blur_kernel))
# convert to Tb
factor = 3.254e13/(RF**2 * psize_rad**2)
I_im *= factor
Q_im *= factor
U_im *= factor
V_im *= factor
# Polarization Vectors
P_im = np.abs(Q_im + 1j*U_im)
m_im = P_im/I_im
thin = npix//nvec
mask = I_im > veccut * np.max(I_im)
mask2 = mask[::thin, ::thin]
m = m_im[::thin, ::thin][mask2]
x = (np.array([[i for i in range(npix)] for j in range(npix)])[::thin, ::thin])
x = x[mask2]
y = (np.array([[j for i in range(npix)] for j in range(npix)])[::thin, ::thin])
y = y[mask2]
a = (-np.sin(np.angle(Q_im+1j*U_im)/2)[::thin, ::thin])
a = a[mask2]
#a = m*a
b = ( np.cos(np.angle(Q_im+1j*U_im)/2)[::thin, ::thin])
b = b[mask2]
#b = m*b
P_im[np.logical_not(mask)]=0.
m_im[np.logical_not(mask)]=0.
# ticks
xticks = ticks(npix, 2*size/npix)
yticks = ticks(npix, 2*size/npix)
# display Stokes I
plt.figure(0)
im = plt.imshow(I_im, cmap=plt.get_cmap(cfun), interpolation='gaussian')
cb = plt.colorbar(im, fraction=0.046, pad=0.04, orientation="vertical")
cb.set_label('Tb (K)', fontsize=14)
plt.title(("Stokes I, %.2f GHz " % (RF/1e9)), fontsize=16)
plt.xticks(xticks[0], xticks[1])
plt.yticks(yticks[0], yticks[1])
plt.xlabel('x/rg')
plt.ylabel('y/rg')
# display Stokes Q
plt.figure(1)
im = plt.imshow(Q_im, cmap=plt.get_cmap(cfun2), interpolation='gaussian')
cb = plt.colorbar(im, fraction=0.046, pad=0.04, orientation="vertical")
cb.set_label('Tb (K)', fontsize=14)
plt.title(("Stokes Q, %.2f GHz " % (RF/1e9)), fontsize=16)
plt.xticks(xticks[0], xticks[1])
plt.yticks(yticks[0], yticks[1])
plt.xlabel('x/rg')
plt.ylabel('y/rg')
# display Stokes U
plt.figure(2)
im = plt.imshow(U_im, cmap=plt.get_cmap(cfun2), interpolation='gaussian')
cb = plt.colorbar(im, fraction=0.046, pad=0.04, orientation="vertical")
cb.set_label('Tb (K)', fontsize=14)
plt.title(("Stokes U, %.2f GHz " % (RF/1e9)), fontsize=16)
plt.xticks(xticks[0], xticks[1])
plt.yticks(yticks[0], yticks[1])
plt.xlabel('x/rg')
plt.ylabel('y/rg')
# display Stokes V
plt.figure(3)
im = plt.imshow(V_im, cmap=plt.get_cmap(cfun2), interpolation='gaussian')
cb = plt.colorbar(im, fraction=0.046, pad=0.04, orientation="vertical")
cb.set_label('Tb (K)', fontsize=14)
plt.title(("Stokes V, %.2f GHz " % (RF/1e9)), fontsize=16)
plt.xticks(xticks[0], xticks[1])
plt.yticks(yticks[0], yticks[1])
plt.xlabel('x/rg')
plt.ylabel('y/rg')
# display P
# plt.figure(4)
# im = plt.imshow(P_im, cmap=plt.get_cmap(cfun), interpolation='gaussian')
# cb = plt.colorbar(im, fraction=0.046, pad=0.04, orientation="vertical")
# cb.set_label('Tb (K)', fontsize=14)
# plt.title(("P, %.2f GHz " % (RF/1e9)), fontsize=16)
# plt.xticks(xticks[0], xticks[1])
# plt.yticks(yticks[0], yticks[1])
# plt.xlabel('x/rg')
# plt.ylabel('y/rg')
# # display m
# plt.figure(5)
# im = plt.imshow(m_im, cmap=plt.get_cmap('viridis'), interpolation='gaussian')
# cb = plt.colorbar(im, fraction=0.046, pad=0.04, orientation="vertical")
# cb.set_label('P/I', fontsize=14)
# plt.title(("P/I, %.2f GHz " % (RF/1e9)), fontsize=16)
# plt.xticks(xticks[0], xticks[1])
# plt.yticks(yticks[0], yticks[1])
# plt.xlabel('x/rg')
# plt.ylabel('y/rg')
# display I with pol ticks
plt.figure(6)
im = plt.imshow(I_im, cmap=plt.get_cmap(cfun), interpolation='gaussian')
cb = plt.colorbar(im, fraction=0.046, pad=0.04, orientation="vertical")
cb.set_label('Tb (K)', fontsize=14)
plt.title(("I, %.2f GHz " % (RF/1e9)), fontsize=16)
plt.xticks(xticks[0], xticks[1])
plt.yticks(yticks[0], yticks[1])
plt.xlabel('x/rg')
plt.ylabel('y/rg')
plt.quiver(x, y, a, b,
headaxislength=20, headwidth=1, headlength=.01, minlength=0, minshaft=1,
width=.01*npix, units='x', pivot='mid', color='k', angles='uv',
scale=1.0/thin)
plt.quiver(x, y, a, b,
headaxislength=20, headwidth=1, headlength=.01, minlength=0, minshaft=1,
width=.005*npix, units='x', pivot='mid', color='w', angles='uv',
scale=1.1/thin)
plt.show()
def ticks(axisdim, psize, nticks=8):
"""Return a list of ticklocs and ticklabels
psize should be in desired units
"""
axisdim = int(axisdim)
nticks = int(nticks)
if not axisdim % 2: axisdim += 1
if nticks % 2: nticks -= 1
tickspacing = float((axisdim-1))/nticks
ticklocs = np.arange(0, axisdim+1, tickspacing) - 0.5
ticklabels= np.around(psize * np.arange((axisdim-1)/2.0, -(axisdim)/2.0, -tickspacing), decimals=1)
return (ticklocs, ticklabels)
if __name__=='__main__':
main()
|
StarcoderdataPython
|
6639444
|
<gh_stars>10-100
from tests.base import TestCase, main, assets, create_ocrd_file, create_ocrd_file_with_defaults
from ocrd_utils import MIMETYPE_PAGE
from ocrd_models import OcrdMets
from ocrd_modelfactory import (
exif_from_filename,
page_from_image,
page_from_file
)
SAMPLE_IMG = assets.path_to('kant_aufklaerung_1784/data/OCR-D-IMG/INPUT_0017.tif')
SAMPLE_PAGE = assets.path_to('kant_aufklaerung_1784/data/OCR-D-GT-PAGE/PAGE_0017_PAGE.xml')
class TestModelFactory(TestCase):
def test_exif_from_filename(self):
exif_from_filename(SAMPLE_IMG)
with self.assertRaisesRegex(Exception, "Must pass 'image_filename' to 'exif_from_filename'"):
exif_from_filename(None)
def test_page_from_file(self):
f = create_ocrd_file_with_defaults(mimetype='image/tiff', local_filename=SAMPLE_IMG, ID='file1')
self.assertEqual(f.mimetype, 'image/tiff')
p = page_from_file(f)
self.assertEqual(p.pcGtsId, f.ID)
self.assertEqual(p.get_Page().imageWidth, 1457)
def test_page_from_file_page(self):
f = create_ocrd_file_with_defaults(mimetype=MIMETYPE_PAGE, local_filename=SAMPLE_PAGE)
p = page_from_file(f)
self.assertEqual(p.get_Page().imageWidth, 1457)
def test_page_from_file_no_local_filename(self):
with self.assertRaisesRegex(ValueError, "input_file must have 'local_filename' property"):
page_from_file(create_ocrd_file_with_defaults(mimetype='image/tiff'))
def test_page_from_file_no_existe(self):
with self.assertRaisesRegex(FileNotFoundError, "File not found: 'no-existe'"):
mets = OcrdMets.empty_mets()
ocrd_file = mets.add_file('FOO', ID='foo', local_filename='no-existe', mimetype='foo/bar')
page_from_file(ocrd_file)
def test_page_from_file_unsupported_mimetype(self):
with self.assertRaisesRegex(ValueError, "Unsupported mimetype"):
page_from_file(create_ocrd_file_with_defaults(local_filename=__file__, mimetype='foo/bar'))
def test_imports_from_generateds(self):
from ocrd_models.ocrd_page import MetadataItemType
if __name__ == '__main__':
main(__file__)
|
StarcoderdataPython
|
3385286
|
<filename>devices/master/splthread.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import threading
from abc import ABCMeta, abstractmethod
class SplThread(metaclass=ABCMeta):
'''Partly abstract class to implement threading & message handling
'''
def __init__(self, msg_handler, child):
self.msg_handler = msg_handler
self.child = child
@abstractmethod
def _run(self):
''' starts the thread loop
'''
pass
@abstractmethod
def _stop(self):
''' stops the thread loop
'''
pass
@abstractmethod
def event_listener(self, queue_event):
''' handler for system events
'''
pass
@abstractmethod
def query_handler(self, queue_event, max_result_count):
''' handler for system queries
'''
pass
def run(self):
''' starts the child thread
'''
# Create a Thread with a function without any arguments
#th = threading.Thread(target=_ws_main, args=(server,))
self.th = threading.Thread(target=self.child._run)
# Start the thread
self.th.setDaemon(True)
self.th.start()
def stop(self, timeout=0):
''' stops the child thread. If timeout > 0, it will wait timeout secs for the thread to finish
'''
self.child._stop()
if timeout > 0:
self.th.join(timeout)
return self.th.isAlive()
|
StarcoderdataPython
|
3581355
|
<reponame>kynk94/torch-firewood<gh_stars>1-10
from . import gan, semantic_segmentation
|
StarcoderdataPython
|
8129718
|
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.gis.db import models
from django.contrib.gis.measure import D
from django.db.utils import IntegrityError
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _
from django.utils.html import strip_tags
from django.db.models import Count
from django.contrib.gis.db.models import Extent, Union
from django.contrib.gis.geos import fromstr
from django.db.models import Q
import random
from sorl.thumbnail import get_thumbnail
import re
import logging
logger = logging.getLogger(__name__)
# south introspection rules
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ['^django\.contrib\.gis\.db\.models\.fields\.PointField'])
add_introspection_rules([], ['^django\.contrib\.gis\.db\.models\.fields\.MultiPolygonField'])
except ImportError:
pass
def get_extent_for_openlayers(geoqueryset, srid):
"""
Accepts a GeoQuerySet and SRID.
Returns the extent as a GEOS object in the Google Maps projection system favored by OpenLayers.
The result can be directly passed out for direct use in a JavaScript map.
"""
extent = fromstr('MULTIPOINT (%s %s, %s %s)' % geoqueryset.extent(), srid=srid)
extent.transform(4326)
return extent
class Event(models.Model):
name = models.CharField(max_length=100, blank=True, null=True)
slug = models.SlugField(max_length=100, blank=True, null=True)
description = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.name
def save(self):
"""
Auto-populate an empty slug field from the MyModel name and
if it conflicts with an existing slug then append a number and try
saving again.
"""
if not self.slug:
self.slug = slugify(self.name) # Where self.name is the field used for 'pre-populate from'
while True:
try:
super(Event, self).save()
# Assuming the IntegrityError is due to a slug fight
except IntegrityError:
match_obj = re.match(r'^(.*)-(\d+)$', self.slug)
if match_obj:
next_int = int(match_obj.group(2)) + 1
self.slug = match_obj.group(1) + '-' + str(next_int)
else:
self.slug += '-2'
else:
break
class Neighborhood(models.Model):
"""
Neighborhood or town if no neighborhoods are available.
"""
n_id = models.CharField('Neighborhood ID', max_length=20, help_text='ID derived from GIS, not necessarily unique since we are mixing neighborhood types.')
name = models.CharField(max_length=50)
slug = models.SlugField(max_length=100, blank=True, null=True)
geometry = models.MultiPolygonField(srid=26986)
objects = models.GeoManager()
class Meta:
verbose_name = _('Neighborhood')
verbose_name_plural = _('Neighborhoods')
ordering = ['name']
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('neighborhood', [slugify(self.name)])
def save(self, *args, **kwargs):
"""Auto-populate an empty slug field from the MyModel name and
if it conflicts with an existing slug then append a number and try
saving again.
"""
if not self.slug:
self.slug = slugify(self.name) # Where self.name is the field used for 'pre-populate from'
super(Neighborhood, self).save(*args, **kwargs)
class Parktype(models.Model):
name = models.CharField(max_length=50, blank=True, null=True)
class Meta:
verbose_name = _('Parktype')
verbose_name_plural = _('Parktypes')
def __unicode__(self):
return self.name
class Parkowner(models.Model):
name = models.CharField(max_length=50, blank=True, null=True)
class Meta:
verbose_name = _('Parkowner')
verbose_name_plural = _('Parkowners')
def __unicode__(self):
return self.name
class Friendsgroup(models.Model):
name = models.CharField(max_length=100)
url = models.URLField(blank=True, null=True)
class Parkimage(models.Model):
""" Image taken in a park.
"""
image = models.ImageField(upload_to='parkimages')
caption = models.TextField(default='', blank=True)
hero_image = models.BooleanField(default=False)
default = models.BooleanField(default=False)
hide = models.BooleanField(default=False)
class Meta:
verbose_name = _('Parkimage')
verbose_name_plural = _('Parkimages')
ordering = ['pk']
def __unicode__(self):
caption = getattr(self, 'caption', '')
return '%i: %s' % (self.pk, caption)
def get_thumbnail(self, include_large=False):
TN_DEFAULT_WIDTH = 300
TN_DEFAULT_HEIGHT = 200
TN_DEFAULT_SIZE = '300x200'
LARGE_SIZE = '950x600'
TN_MED_LANDSCAPE = '600x400'
TN_MED_PORTRAIT = '300x400'
PLACEHOLDER = 'http://placehold.it/300x200'
image = {
'src': PLACEHOLDER,
'masonry_src': PLACEHOLDER,
'caption': self.caption,
'default': self.default,
'width': TN_DEFAULT_WIDTH,
'height': TN_DEFAULT_HEIGHT
}
try:
image['large_src'] = get_thumbnail(self.image, LARGE_SIZE, crop='center', quality=100).url
tn = get_thumbnail(self.image, TN_DEFAULT_SIZE, crop='center', quality=80)
image['src'], image['masonry_src'] = tn.url, tn.url
#if
if self.default:
image['width'], image['height'] = tn.width, tn.height
else:
if random.random() < 0.75:
image['ratio'] = self.image.width / self.image.height
if image['ratio'] == 0:
medium_image_portrait = get_thumbnail(self.image, TN_MED_PORTRAIT, crop='center', quality=100)
image['src'], image['masonry_src'] = tn.url, medium_image_portrait.url
image['width'], image['height'] = medium_image_portrait.width, medium_image_portrait.height
else:
medium_image_landscape = get_thumbnail(self.image, TN_MED_LANDSCAPE, crop='center', quality=100)
image['src'], image['masonry_src'] = tn.url, medium_image_landscape.url
image['width'], image['height'] = medium_image_landscape.width, medium_image_landscape.height
except Exception as e:
return None
return image
def thumbnail(self):
if self.image:
thumb = get_thumbnail(self.image.file, settings.ADMIN_THUMBS_SIZE, crop='center', quality=80)
return u'<img width="%s" height="%s" src="%s" alt="%s" />' % (thumb.width, thumb.height, thumb.url, self.caption)
else:
return None
thumbnail.short_description = 'Image'
thumbnail.allow_tags = True
get_thumbnail.allow_tags = True
def get_parks_string(self):
parks = [p.name for p in self.parks.all()]
return ", ".join(parks)
get_parks_string.short_description = 'Parks'
class Park(models.Model):
"""
Park or similar Open Space.
"""
ACCESS_CHOICES = (
('y', 'Yes'),
('n', 'No'),
('u', 'Unknown'),
)
os_id = models.CharField('OS ID', max_length=9, null=True, blank=True, help_text='Refers to MassGIS OS_ID')
name = models.CharField(max_length=100, blank=True, null=True)
slug = models.SlugField(max_length=100, blank=True, null=True, unique=True)
alt_name = models.CharField('Alternative name', max_length=100, blank=True, null=True)
description = models.TextField(blank=True, null=True)
address = models.CharField(max_length=50, blank=True, null=True)
phone = models.CharField(max_length=50, blank=True, null=True)
neighborhoods = models.ManyToManyField(Neighborhood, related_name='neighborhoods', blank=True)
parktype = models.ForeignKey(Parktype, blank=True, null=True)
parkowner = models.ForeignKey(Parkowner, blank=True, null=True)
friendsgroup = models.ForeignKey("Friendsgroup", blank=True, null=True)
events = models.ManyToManyField("Event", related_name="events", blank=True, null=True)
access = models.CharField(max_length=1, blank=True, null=True, choices=ACCESS_CHOICES)
area = models.FloatField(blank=True, null=True)
images = models.ManyToManyField(Parkimage, blank=True, null=True, related_name='parks')
featured = models.BooleanField(default=False)
geometry = models.MultiPolygonField(srid=26986)
objects = models.GeoManager()
class Meta:
verbose_name = _('Park')
verbose_name_plural = _('Parks')
def __unicode__(self):
return self.name
@classmethod
def featured_with_images(cls):
return (
cls.objects
.annotate(num_of_images=Count('images'))
.filter(featured=True, num_of_images__gt=0)
)
@models.permalink
def get_absolute_url(self):
return ('park', ['%s-%d' % (slugify(self.name), self.id)])
def area_acres(self):
return round((self.area / 4047), 1)
def lat_long(self):
self.geometry.transform(4326)
return [self.geometry.centroid.y, self.geometry.centroid.x]
def point_on_surface(self):
self.geometry.transform(4326)
return list(self.geometry.point_on_surface)
def get_image_thumbnails(self, include_large=False):
images = []
for i in self.images.filter(default=False):
try:
images.append(i.get_thumbnail(include_large=include_large))
except IOError, e:
logger.error(e)
except Exception as e:
logger.error(e)
if not images:
for i in self.images.filter(default=True):
try:
images.append(i.get_thumbnail(include_large=include_large))
except IOError, e:
logger.error(e)
except Exception as e:
logger.error(e)
return images
def to_external_document(self, user, include_large=False, include_extra_info=False):
change_url = None
if user.has_perm('parks.change_park'):
change_url = reverse('admin:parks_park_change', args=(self.id,))
def image_format(park):
image = park.get_image_thumbnails(include_large=include_large)[:1]
return image[0] if image else {}
facilities = Activity.objects.filter(activity__park=self.id).distinct()
doc = {
'id': self.id,
'url': self.get_absolute_url(),
'name': self.name,
'area': self.area_acres(),
'description': self.description,
'images': self.get_image_thumbnails(include_large=include_large),
'access': self.get_access_display(),
'address': self.address,
'owner': self.parkowner.name,
'point_on_surface': self.point_on_surface(),
'change_url': change_url
}
if include_extra_info:
filtered_queryset = Park.objects.filter(name=self.name) # doesn't yet transform correctly after aggregated
extent = get_extent_for_openlayers(filtered_queryset, 26986)
doc['nearby_parks'] = [{'id': p.pk, 'url': p.get_absolute_url(), 'name': p.name, 'image': image_format(p)} for p in self.nearest_parks_by_distance(0.25)]
doc['recommended_parks'] = [{'id': p.pk, 'url': p.get_absolute_url(), 'name': p.name, 'image': image_format(p)} for p in self.recommended_parks()]
doc['activities'] = [{'name': p.name, 'slug': p.slug, 'id': p.id } for p in facilities]
doc['bbox'] = list(extent.coords)
return doc
def nearest_parks_by_distance(self, distance_in_miles):
return Park.objects.filter(geometry__distance_lt=(self.geometry, D(mi=distance_in_miles))).filter(~Q(name=self.name)).distinct('name')
def recommended_parks(self):
return self.nearest_parks_by_distance(0.25).filter(parktype=self.parktype).filter(~Q(name=self.name)).distinct('name')
# all_facilities = []
# for id in facilities:
# all_facilities.push(id)
# return Parks.objects.filter(pk__in=self.id).distinct()
def get_facilities(self, park_id):
""" Returns facilities as JSON for park id
"""
park = Park.objects.get(pk=park_id)
facilities = Facility.objects.transform(4326).filter(park=park).select_related('facilitytype').prefetch_related('activity')
features = []
for f in facilities:
activities = [a.name for a in f.activity.all()]
geojson_prop = dict(
name=f.name,
icon=f.facilitytype.icon.url,
activities=activities,
status=f.status,
access=f.access,
notes=f.notes,
)
response = dict(type='FeatureCollection')
return facilities
def save(self, *args, **kwargs):
self.area = self.geometry.area
# FIXME: we need a better slugify routine
self.slug = '%s-%d' % (slugify(self.name), self.id)
super(Park, self).save(*args, **kwargs)
try:
# cache containing neighorhood
# doesn't work with admin forms, m2m get cleared during admin save
# FIXME: improve routine - compare neighborhoods we intersect with against already stored neighborhoods
neighborhoods = Neighborhood.objects.filter(geometry__intersects=self.geometry)
self.neighborhoods.clear()
self.neighborhoods.add(*neighborhoods)
except TypeError:
self.neighborhoods = None
class Activity(models.Model):
name = models.CharField(max_length=50, blank=True, null=True)
slug = models.SlugField(max_length=100, blank=True, null=True)
class Meta:
verbose_name = _('Activity')
verbose_name_plural = _('Activities')
ordering = ['name']
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.name) # Where self.name is the field used for 'pre-populate from'
super(Activity, self).save(*args, **kwargs)
class Facilitytype(models.Model):
name = models.CharField(max_length=50, blank=True, null=True)
icon = models.ImageField(blank=False, upload_to="icons", null=False, help_text="Must be 32x37px to function properly")
class Meta:
verbose_name = _('Facilitytype')
verbose_name_plural = _('Facilitytypes')
def __unicode__(self):
return self.name
class Facility(models.Model):
"""
Facility in or outside a park.
"""
name = models.CharField(max_length=50, blank=True, null=True)
facilitytype = models.ForeignKey(Facilitytype)
activity = models.ManyToManyField(Activity, related_name='activity')
location = models.CharField(max_length=50, blank=True, null=True, help_text='Address, nearby Landmark or similar location information.')
status = models.CharField(max_length=50, blank=True, null=True) # FIXME: choices?
park = models.ForeignKey(Park, blank=True, null=True)
notes = models.TextField(blank=True,)
access = models.TextField(blank=True,)
geometry = models.PointField(srid=26986)
objects = models.GeoManager()
class Meta:
verbose_name = _('Facility')
verbose_name_plural = _('Facilities')
def activity_string(self):
out = []
for activity in self.activity.all():
out.append(activity.name)
return ", ".join(out)
activity_string.short_description = 'Activities'
def parktype_string(self):
return self.park.parktype
def icon_url(self):
if self.facilitytype.icon:
return '%s' % (self.facilitytype.icon.url,)
return '%sparks/img/icons/%s.png' % (settings.STATIC_URL, slugify(self.facilitytype))
def admin_url(self):
return reverse('admin:parks_facility_change', args=(self.id,))
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
try:
# cache containing park
self.park = Park.objects.get(geometry__contains=self.geometry)
except:
self.park = None
super(Facility, self).save(*args, **kwargs)
class Story(models.Model):
RATING_CHOICES = (
('1', "Happy"),
('2', "Blah"),
('3', "Idea"),
('4', "Sad"),
)
date = models.DateTimeField(auto_now_add=True)
title = models.CharField(max_length=100, blank=False, null=False)
rating = models.CharField(max_length=1, default='0', blank=False, null=False, choices=RATING_CHOICES)
text = models.TextField(blank=False, null=False)
email = models.EmailField(max_length=100, blank=False, null=False)
park = models.ForeignKey(Park, blank=True, null=False)
objectionable_content = models.BooleanField(default=False)
class Meta:
ordering = ('-date',)
@models.permalink
def get_absolute_url(self):
return ('parks.views.story', [str(self.id)])
|
StarcoderdataPython
|
3242636
|
<filename>run_experiments.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from collections import OrderedDict
import spam
PATHS = [
'data/csv/enron1_clean_dataset.csv',
'data/csv/enron2_clean_dataset.csv',
'data/csv/enron3_clean_dataset.csv',
'data/csv/enron4_clean_dataset.csv',
'data/csv/enron5_clean_dataset.csv',
'data/csv/enron6_clean_dataset.csv',
]
if __name__ == '__main__':
for path in PATHS:
with open(CONFIG_FILENAME, 'r') as f:
CONFIG = json.load(f, object_pairs_hook=OrderedDict)
CONFIG['preprocess']['params']['read_csv_filepath'] = path
with open(CONFIG_FILENAME, 'w+') as f:
json.dump(CONFIG, f, indent=4)
spam()
print('\n{}\n'.format('=' * 50))
|
StarcoderdataPython
|
3516490
|
<filename>user_test.py
import unittest
from user import User
class TestUser(unittest.TestCase):
'''
Test class that defines test cases for our accounts class behaviours.
Args:
unittest.TestCase: Testcase class that helps in creating test cases
'''
def setUp(self):
'''
set up method to clear before each test case.
'''
self.new_user = User("<NAME>","<EMAIL>", "12345")
def tearDown(self):
'''
Method that does clean up after each test case has run
'''
User.user_list = []
def test_account_init(self):
'''
Test case to test if the object is initializzed properly
'''
self.assertEqual(self.new_user.login_name, "<NAME>")
self.assertEqual(self.new_user.email,"<EMAIL>")
self.assertEqual(self.new_user.password, "<PASSWORD>")
def test_save_user(self):
'''
Test case to test that the user object is saved into the user_list
'''
self.new_user.save_user()
self.assertEqual(len(User.user_list),1)
def test_save_multiple_users(self):
'''
test case to see if multiple users can be saved into user_list
'''
self.new_user.save_user()
User("<NAME>","<EMAIL>","12345").save_user()
self.assertEqual(len(User.user_list),2)
def test_user_login(self):
'''
Test case to test user login
'''
self.new_user.save_user()
test_user = User("<NAME>","<EMAIL>","12345")
test_user.save_user()
find_user = User.user_login("<EMAIL>")
self.assertEqual(find_user.email,test_user.email)
def test_user_exists(self):
'''
Test case to acertain that a user exists
'''
self.new_user.save_user()
test_user = User("<NAME>","<EMAIL>","12345").save_user()
user_exists = User.user_exist("<EMAIL>")
self.assertTrue(user_exists)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
252755
|
"""Test cases completing Checkout step 2"""
from module_06.src.elements.inventory_item import InventoryItem
from module_06.src.pages.inventory import InventorySortOptions
from module_06.src.pages.login import LoginPage
from module_06.src.pages.cart import CartPage
from module_06.tests.common.test_base import TestBase
from module_06.src.pages.checkout_first import CheckoutFirstStep
from module_06.src.pages.checkout_overview import CheckoutPage
_DEF_USER = 'standard_user'
_DEF_PASSWORD = '<PASSWORD>'
class TestCheckoutStep2(TestBase):
def test_completing_checkout(self):
login = LoginPage(self.driver)
login.open()
inventory_page = login.login(_DEF_USER, _DEF_PASSWORD)
first_item = inventory_page.products[0]
first_item: InventoryItem
details_page = first_item.open_details()
details_page.add_to_cart()
cart_page = inventory_page.open_cart()
cart_page.checkout()
contact_info_page = CheckoutFirstStep(self.driver)
contact_info_page.fill_info("Angie", "Garcia", "44540")
contact_info_page.checkout()
checkout_page = CheckoutPage(self.driver)
print('\n')
print(f'Label: {checkout_page.get_title_text()}')
assert checkout_page.get_title_text() == 'CHECKOUT: OVERVIEW', 'Checkout page title should be CHECKOUT: OVERVIEW'
checkout_page.finish_buy()
assert checkout_page.get_thanks_text() == 'THANK YOU FOR YOUR ORDER', 'Success page label should be THANK YOU FOR YOUR ORDER'
assert checkout_page.get_img() != 'False', 'Image Pony OK'
def test_validation_prices(self):
login = LoginPage(self.driver)
login.open()
inventory_page = login.login(_DEF_USER, _DEF_PASSWORD)
first_item = inventory_page.products[0]
first_item: InventoryItem
details_page = first_item.open_details()
details_page.add_to_cart()
cart_page = inventory_page.open_cart()
cart_page.checkout()
contact_info_page = CheckoutFirstStep(self.driver)
contact_info_page.fill_info("Angie", "Garcia", "44540")
contact_info_page.checkout()
checkout_page = CheckoutPage(self.driver)
assert checkout_page.get_subtotal_text() == "Item total: $29.99"
assert checkout_page.get_tax_text() == "Tax: $2.40"
assert checkout_page.get_total_text() == "Total: $32.39"
def test_cancel_checkout(self):
login = LoginPage(self.driver)
login.open()
inventory_page = login.login(_DEF_USER, _DEF_PASSWORD)
first_item = inventory_page.products[0]
first_item: InventoryItem
details_page = first_item.open_details()
details_page.add_to_cart()
cart_page = inventory_page.open_cart()
cart_page.checkout()
contact_info_page = CheckoutFirstStep(self.driver)
contact_info_page.fill_info("Angie", "Garcia", "44540")
contact_info_page.checkout()
checkout_page = CheckoutPage(self.driver)
checkout_page.cancel_checkout()
assert inventory_page.get_label() == 'PRODUCTS', 'Inventory page label should be Products'
|
StarcoderdataPython
|
8127666
|
# -*- coding: utf-8 -*-
from functools import partial
from trytond.model import ModelView, ModelSQL, fields, Unique
from trytond.pool import PoolMeta
from trytond.pyson import Eval
from trytond.transaction import Transaction
from nereid import url_for, current_website
from flask import json
from babel import numbers
__all__ = [
'Template', 'Product', 'ProductVariationAttributes', 'ProductAttribute',
]
__metaclass__ = PoolMeta
class Template:
"Product Template"
__name__ = 'product.template'
variation_attributes = fields.One2Many(
'product.variation_attributes', 'template', 'Variation Attributes',
)
@classmethod
def __setup__(cls):
super(Template, cls).__setup__()
cls._error_messages.update({
'missing_attributes':
"Please define following attributes for product %s: %s"
})
def validate_variation_attributes(self):
for product in self.products_displayed_on_eshop:
product.validate_attributes()
@classmethod
def validate(cls, templates):
super(Template, cls).validate(templates)
for template in templates:
template.validate_variation_attributes()
def _get_product_variation_data(self):
"""
This method returns the variation data in a serializable format
for the main API. Extend this module to add data that your
customization may need. In most cases, just extending the serialize
api method in product and variation should be sufficient.
"""
variation_attributes = map(
lambda variation: variation.serialize(),
self.variation_attributes
)
variants = []
for product in self.products_displayed_on_eshop:
variant_data = product.serialize(purpose='variant_selection')
variant_data['attributes'] = {}
for variation in self.variation_attributes:
if variation.attribute.type_ == 'selection':
# Selection option objects are obviously not serializable
# So get the name
variant_data['attributes'][variation.attribute.id] = \
str(
product.get_attribute_value(variation.attribute).id
)
else:
variant_data['attributes'][variation.attribute.name] = \
product.get_attribute_value(variation.attribute)
variants.append(variant_data)
rv = {
'variants': variants,
'variation_attributes': variation_attributes,
}
return rv
def get_product_variation_data(self):
"""
Returns json data for product for variants. The data returned
by this method should be sufficient to render a product selection
interface based on variation data.
The structure of the data returned is::
{
'variants': [
# A list of active records of the variants if not
# requested as JSON. If JSON, the record is serialized
# with type JSON.
{
# see documentation of the serialize method
# on product.product to see values sent.
}
],
'variation_attributes': [
{
# see documentation of the serialize method
# on product.varying_attribute to see values sent.
}
...
]
}
.. tip::
If your downstream module needs more information in the
JSON, subclass and implement _get_product_variation_data
which returns a dictionary. Otherwise, it would require you
to deserialize, add value and then serialize again.
"""
return json.dumps(self._get_product_variation_data())
class Product:
"Product"
__name__ = 'product.product'
@classmethod
def __setup__(cls):
super(Product, cls).__setup__()
cls._error_messages.update({
'missing_attributes':
"Please define following attributes for product %s: %s"
})
@classmethod
def copy(cls, products, default=None):
with Transaction().set_context(_copy=True):
# Inject a context variable to let other methods know that
# control is coming from copy method.
return super(Product, cls).copy(products, default)
def validate_attributes(self):
"""Check if product defines all the attributes specified in
template variation attributes.
"""
if Transaction().context.get('_copy'):
# While copying, attributes are added later so first time
# validation will always result in error saying there are
# missing attributes, hence skip the validation if its coming
# from copy method.
return
if not self.displayed_on_eshop:
return
required_attrs = set(
[v.attribute for v in self.template.variation_attributes]
)
missing = required_attrs - \
set(map(lambda attr: attr.attribute, self.attributes))
if missing:
missing = map(lambda attr: attr.name, missing)
self.raise_user_error(
"missing_attributes",
(self.rec_name, ', '.join(missing))
)
@classmethod
def validate(cls, products):
super(Product, cls).validate(products)
for product in products:
product.validate_attributes()
def get_attribute_value(self, attribute, silent=True):
"""
:param attribute: Active record of attribute
"""
for product_attr in self.attributes:
if product_attr.attribute == attribute:
return getattr(
product_attr,
'value_%s' % attribute.type_
)
else:
if silent:
return True
raise AttributeError(attribute.name)
def serialize(self, purpose=None):
"""
Return serializable dictionary suitable for use with variant
selection.
"""
if purpose != 'variant_selection':
return super(Product, self).serialize(purpose)
currency_format = partial(
numbers.format_currency,
currency=current_website.company.currency.code,
locale=current_website.default_locale.language.code
)
return {
'id': self.id,
'rec_name': self.rec_name,
'name': self.name,
'code': self.code,
'price': currency_format(self.sale_price(1)),
'url': url_for('product.product.render', uri=self.uri),
'image_urls': [
{
'large': (
image.transform_command().thumbnail(500, 500, 'a')
.url()
),
'thumbnail': (
image.transform_command().thumbnail(120, 120, 'a')
.url()
),
'regular': image.url,
}
for image in self.get_images()
],
}
class ProductVariationAttributes(ModelSQL, ModelView):
"Variation attributes for product template"
__name__ = 'product.variation_attributes'
sequence = fields.Integer('Sequence')
template = fields.Many2One('product.template', 'Template', required=True)
attribute = fields.Many2One(
'product.attribute', 'Attribute', required=True,
domain=[('sets', '=',
Eval('_parent_template', {}).get('attribute_set', -1))],
)
widget = fields.Selection([
('dropdown', 'Dropdown'),
('swatches', 'Swatches'),
], 'Widget', required=True)
@staticmethod
def default_widget():
return 'dropdown'
@staticmethod
def default_sequence():
return 10
def serialize(self, purpose=None):
"""
Returns serialized version of the attribute::
{
'sequence': 1, # Integer id to determine order
'name': 'shirt color', # Internal name of the attribute
'display_name': 'Color', # (opt) display name of attr
'rec_name': 'Color', # The name that should be shown
'widget': 'swatch', # clue on how to render widget
'options': [
# id, value of the options available to choose from
(12, 'Blue'),
(13, 'Yellow'),
...
]
}
"""
if self.attribute.type_ == 'selection':
# The attribute type needs options to choose from.
# Send only the options that the products displayed on webshop
# can have, instead of the exhaustive list of attribute options
# the attribute may have.
#
# For example, the color attribute values could be
# ['red', 'yellow', 'orange', 'green', 'black', 'blue']
# but the shirt itself might only be available in
# ['red', 'yellow']
#
# This can be avoided by returning options based on the product
# rather than on the attributes list of values
options = set()
for product in self.template.products_displayed_on_eshop:
value = product.get_attribute_value(self.attribute)
options.add((value.id, value.name))
else:
options = []
return {
'sequence': self.sequence,
'name': self.attribute.name,
'display_name': self.attribute.display_name,
'widget': self.widget,
'options': list(options),
'attribute_id': self.attribute.id,
}
class ProductAttribute:
__name__ = 'product.attribute'
@classmethod
def __setup__(cls):
super(ProductAttribute, cls).__setup__()
table = cls.__table__()
cls._sql_constraints += [
('unique_name', Unique(table, table.name),
'Attribute name must be unique!'),
]
|
StarcoderdataPython
|
5157271
|
import traceback
from _pytest.logging import LogCaptureFixture
from click.testing import Result
def assert_dict_key_and_val_in_stdout(dict_, stdout):
"""Use when stdout contains color info and command chars"""
for key, val in dict_.items():
if isinstance(val, dict):
assert key in stdout
assert_dict_key_and_val_in_stdout(val, stdout)
else:
assert key in stdout
assert str(val) in stdout
def assert_no_logging_messages_or_tracebacks(my_caplog, click_result):
"""
Use this assertion in all CLI tests unless you have a very good reason.
Without this assertion, it is easy to let errors and tracebacks bubble up
to users without being detected, unless you are manually inspecting the
console output (stderr and stdout), as well as logging output from every
test.
Usage:
```
def test_my_stuff(caplog):
...
result = runner.invoke(...)
...
assert_no_logging_messages_or_tracebacks(caplog, result)
```
:param my_caplog: the caplog pytest fixutre
:param click_result: the Result object returned from click runner.invoke()
"""
assert_no_logging_messages(my_caplog)
assert_no_tracebacks(click_result)
def assert_no_logging_messages(my_caplog):
"""
Assert no logging output messages.
:param my_caplog: the caplog pytest fixutre
"""
assert isinstance(
my_caplog, LogCaptureFixture
), "Please pass in the caplog object from your test."
messages = my_caplog.messages
assert isinstance(messages, list)
if messages:
print("Found logging messages:\n")
print("\n".join([m for m in messages]))
assert not messages
def assert_no_tracebacks(click_result):
"""
Assert no tracebacks.
:param click_result: the Result object returned from click runner.invoke()
"""
assert isinstance(
click_result, Result
), "Please pass in the click runner invoke result object from your test."
if click_result.exc_info:
# introspect the call stack to make sure no exceptions found there way through
# https://docs.python.org/2/library/sys.html#sys.exc_info
_type, value, _traceback = click_result.exc_info
if not isinstance(value, SystemExit):
# SystemExit is a known "good" exit type
print("".join(traceback.format_tb(_traceback)))
assert False, "Found exception of type {} with message {}".format(
_type, value
)
if not isinstance(click_result.exception, SystemExit):
# Ignore a SystemeExit, because some commands intentionally exit in an error state
assert not click_result.exception, "Found exception {}".format(
click_result.exception
)
assert (
"traceback" not in click_result.output.lower()
), "Found a traceback in the console output: {}".format(click_result.output)
assert (
"traceback" not in click_result.stdout.lower()
), "Found a traceback in the console output: {}".format(click_result.stdout)
try:
assert (
"traceback" not in click_result.stderr.lower()
), "Found a traceback in the console output: {}".format(click_result.stderr)
except ValueError as ve:
# sometimes stderr is not captured separately
pass
|
StarcoderdataPython
|
12807671
|
<filename>.leetcode/781.rabbits-in-forest.py
# @lc app=leetcode id=781 lang=python3
#
# [781] Rabbits in Forest
#
# https://leetcode.com/problems/rabbits-in-forest/description/
#
# algorithms
# Medium (55.99%)
# Likes: 563
# Dislikes: 446
# Total Accepted: 32.1K
# Total Submissions: 57.5K
# Testcase Example: '[1,1,2]'
#
# There is a forest with an unknown number of rabbits. We asked n rabbits "How
# many rabbits have the same color as you?" and collected the answers in an
# integer array answers where answers[i] is the answer of the i^th rabbit.
#
# Given the array answers, return the minimum number of rabbits that could be
# in the forest.
#
#
# Example 1:
#
#
# Input: answers = [1,1,2]
# Output: 5
# Explanation:
# The two rabbits that answered "1" could both be the same color, say red.
# The rabbit that answered "2" can't be red or the answers would be
# inconsistent.
# Say the rabbit that answered "2" was blue.
# Then there should be 2 other blue rabbits in the forest that didn't answer
# into the array.
# The smallest possible number of rabbits in the forest is therefore 5: 3 that
# answered plus 2 that didn't.
#
#
# Example 2:
#
#
# Input: answers = [10,10,10]
# Output: 11
#
#
#
# Constraints:
#
#
# 1 <= answers.length <= 1000
# 0 <= answers[i] < 1000
#
#
#
# @lc tags=hash-table;string;stack
# @lc imports=start
from imports import *
# @lc imports=end
# @lc idea=start
#
# 深林里有兔子,求兔子数量,问n只兔子,多少与其颜色相同的兔子。
# 统计。
#
# @lc idea=end
# @lc group=
# @lc rank=
# @lc code=start
class Solution:
def numRabbits(self, answers: List[int]) -> int:
d = defaultdict(int)
for a in answers:
d[a + 1] += 1
res = 0
for c in d:
v = d[c]
t = ((v - 1) // c + 1) * c
res += t
return res
pass
# @lc code=end
# @lc main=start
if __name__ == '__main__':
print('Example 1:')
print('Input : ')
print('answers = [1,1,2]')
print('Exception :')
print('5')
print('Output :')
print(str(Solution().numRabbits([1, 1, 2])))
print()
print('Example 2:')
print('Input : ')
print('answers = [10,10,10]')
print('Exception :')
print('11')
print('Output :')
print(str(Solution().numRabbits([10, 10, 10])))
print()
pass
# @lc main=end
|
StarcoderdataPython
|
5040711
|
"""Binarized model."""
import numpy as np
from .base_model import BaseModel
from ...core import Task
class BinarizedClassifier(BaseModel):
def __init__(self, model, data_type, label_index):
"""
Initialize a Model.
Args:
model (torch.nn.Module): model to wrap.
data_type (depiction.core.DataType): data type.
label_index (int): index of the label to consider as positive.
"""
super(BinarizedClassifier, self).__init__(Task.BINARY, data_type)
self.model = model
self.label_index = label_index
def predict(self, sample, *args, **kwargs):
"""
Run the model for inference on a given sample and with the provided
parameters.
Args:
sample (np.ndarray): an input sample for the model.
args (list): list of arguments for prediction.
kwargs (dict): list of key-value arguments for prediction.
Returns:
int: 1 or 0 depending on the highest logit.
"""
y = self.model.predict(sample, *args, **kwargs)
return (np.argmax(y, axis=1) == self.label_index).astype(np.int)
|
StarcoderdataPython
|
8130001
|
class Queen:
def __init__(self, row, column):
if row < 0 or row > 7 or column < 0 or column > 7:
raise ValueError(r".+")
self.row = row
self.col = column
def can_attack(self, another_queen):
if self.row == another_queen.row and self.col == another_queen.col:
raise ValueError(r".+")
return self.row == another_queen.row \
or self.col == another_queen.col \
or abs(self.row - another_queen.row) == abs(self.col - another_queen.col)
|
StarcoderdataPython
|
11374070
|
# Variables with Global Scope
name = 'XXX'
employee_id = 0
salary = 0.0
is_manager = False
def get_employee_details():
print("Starting the Function")
name = input('Enter your name: ') # Creating the Local Variable
print(f"Local Name: {name}")
print("Starting the Program")
get_employee_details() # Invoking the function
print(f"Name: {name}")
|
StarcoderdataPython
|
3269358
|
from axiom.test.historic.stubloader import StubbedTest
from xmantissa.ixmantissa import INavigableElement
from xquotient.compose import Composer, Drafts
class DraftsUpgradeTest(StubbedTest):
"""
Test that the Drafts item has been removed and is no longer a powerup for a
composer.
"""
def test_upgrade(self):
self.assertEqual(self.store.count(Drafts), 0)
composer = self.store.findUnique(Composer)
for pup in composer.powerupsFor(INavigableElement):
self.failIf(isinstance(pup, Drafts))
|
StarcoderdataPython
|
8026000
|
#! /usr/bin/python
#
# riak_python_delete.py
#
# Feb/09/2015
#
# ------------------------------------------------------------------
import cgi
import sys
#
sys.path.append ('/var/www/data_base/common/python_common')
#
from cgi_manipulate import parse_parameter
from curl_get import curl_delete_proc
# ------------------------------------------------------------------
#
print ("Content-type: text/html\n\n")
#
#
url_base = 'http://host_ubuntu1:8098/riak/shimane'
#
array_bb = parse_parameter ()
#
for it in range (len(array_bb)):
key_in = array_bb[it]
url_target = url_base + '/' + key_in
curl_delete_proc (url_target)
#
print ("*** OK ***<br />")
#
# ---------------------------------------------------------------
|
StarcoderdataPython
|
9627250
|
<filename>run.py<gh_stars>0
import os
import yaml
import arxiv
import textwrap
import smtplib, ssl
from time import sleep
from pathlib import Path
from datetime import date, datetime
today = date.today()
def build_query(domains, keyword):
query = '('
for i, domain in enumerate(domains):
query += f'cat:{domain}'
if i != len(domains) - 1:
query += ' OR '
query += f') AND ({keyword})'
return query
def build_content(config):
domains = config['domains']
keywords = config['keywords']
query_config = config['query_config']
total_mail = len(config['keywords'])
subject_placeholder = 'arXiv newsletter ' + str(today) + ' {index}/' + str(total_mail)
content_placeholder = '\n' + '*' * 35 + '\n ' + subject_placeholder + ' \n' + '*' * 35 + '\n'
entry_placeholder = '{index}. {title}\n{authors}\nPublished at: {publish}\nUpdated at: {update}\nCategories: {categories}\n{notes}\n{link}\n\nAbstract:\n{abstract}\n'
messages = []
for i, keyword in enumerate(keywords):
query = build_query(domains, keyword)
while True:
try:
results = arxiv.query(query=query, **query_config)
break
except:
pass
entries = ''
for j, result in enumerate(results):
entry = entry_placeholder.format(
index=j + 1,
title=result.title,
authors=', '.join(result.authors),
publish=result.published,
update=result.updated,
categories=', '.join([tag.term for tag in result.tags]),
link=result.arxiv_url,
abstract=result.summary,
notes=f'Comments: {result.arxiv_comment}\n' if result.arxiv_comment is not None else ''
)
entries += entry + '\n'
subject = subject_placeholder.format(index=i + 1)
content = content_placeholder.format(index=i + 1)
content += '\nQuery: ' + keyword + '\n\n' + entries
# content = textwrap.wrap(content, width=80, replace_whitespace=False)
# content = '\n'.join(content)
messages.append((subject, content))
return messages
def send_mail(config):
mail_config = config['mail']
smtp_server = mail_config['server']
port = mail_config['port']
sender = mail_config['user']
password = mail_config['password']
context = ssl.create_default_context()
comments = '\nPowered by arXiv-newsletter \nhttps://github.com/SXKDZ/arXiv-newsletter\n'
messages = build_content(config)
for recipient in mail_config['recipient']:
for i, each_message in enumerate(messages):
subject, content = each_message
header = f'To: {recipient}\nFrom: arXiv-bot <{sender}>\nSubject: {subject}\n'
message = header + content + comments
print(header)
with open(f'{today}/{i + 1}.txt', 'w+') as f:
f.write(message)
try:
with smtplib.SMTP_SSL(smtp_server, port, context=context) as server:
server.login(sender, password)
server.sendmail(sender, recipient, message.encode('ascii', 'ignore').decode('ascii'))
except smtplib.SMTPDataError as e:
print(e.smtp_error)
print()
def main():
os.chdir(Path(__file__).parent.absolute())
try:
os.mkdir(f'{today}')
except OSError:
pass
with open('config.yml', 'r') as f:
config = yaml.load(f, Loader=yaml.SafeLoader)
send_mail(config)
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
5193340
|
#!/usr/bin/env python3
# coding: utf8
# Author: <NAME>, 2015
'''
Send an email with attachments.
'''
# Adapted from:
# http://stackoverflow.com/a/3363254/5272432
# http://stackoverflow.com/questions/73781
import smtplib
from os.path import basename
from collections import namedtuple
from subprocess import Popen, PIPE
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import COMMASPACE, formatdate
ConnectionSet = namedtuple('ConnectionSet',
'address server port user pwd')
def send_mail(send_from, send_to, connection, **msg_contents):
'''
Send an email with attachments.
'''
if not isinstance(send_to, list):
raise TypeError('Argument `send_to`: expected list, got %s' %
type(send_to))
msg = compose_message(send_from, send_to, **msg_contents)
if connection.server:
# Use a remote SMTP server.
send_with_smtplib(msg, send_from, send_to, connection)
else:
# Use Unix `sendmail`.
send_with_ssmtp(msg)
def compose_message(send_from, send_to, subject='', text='', files=()):
'''
Generate a properly formatted MIME message.
'''
msg = MIMEMultipart()
msg['From'] = send_from
msg['To'] = COMMASPACE.join(send_to)
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = subject
msg.attach(MIMEText(text))
for f in files or []:
with open(f, "rb") as fil:
msg.attach(MIMEApplication(
fil.read(),
Content_Disposition='attachment; filename="%s"' % basename(f),
Name=basename(f)
))
return msg.as_string()
def send_with_ssmtp(msg):
'''
Use Unix' sendmail/ssmtp command for sending.
'''
p = Popen(['sendmail', '-t', '-oi'], stdin=PIPE)
p.communicate(msg.encode('utf8'))
def send_with_smtplib(msg, send_from, send_to, connection):
'''
Login to an SMTP server for sending.
'''
server = smtplib.SMTP(connection.server, connection.port)
server.ehlo()
server.starttls()
server.login(connection.user, connection.pwd)
server.sendmail(send_from, send_to, msg)
server.close()
|
StarcoderdataPython
|
8156844
|
"""!
@brief Templates for tests of SyncPR (oscillatory network based on Kuramoto model for pattern recognition).
@authors <NAME> (<EMAIL>)
@date 2014-2020
@copyright BSD-3-Clause
"""
# Generate images without having a window appear.
import matplotlib;
matplotlib.use('Agg');
from pyclustering.nnet import solve_type;
from pyclustering.nnet.syncpr import syncpr, syncpr_visualizer;
class SyncprTestTemplates:
@staticmethod
def templateOutputDynamic(solver, ccore):
net = syncpr(5, 0.1, 0.1, ccore);
output_dynamic = net.simulate(10, 10, [-1, 1, -1, 1, -1], solver, True);
assert len(output_dynamic) == 11; # 10 steps without initial values.
@staticmethod
def templateOutputDynamicLengthStaticSimulation(collect_flag, ccore_flag):
net = syncpr(5, 0.1, 0.1, ccore_flag);
output_dynamic = net.simulate_static(10, 10, [-1, 1, -1, 1, -1], solution = solve_type.FAST, collect_dynamic = collect_flag);
if (collect_flag is True):
assert len(output_dynamic) == 11; # 10 steps without initial values.
else:
assert len(output_dynamic) == 1;
@staticmethod
def templateOutputDynamicLengthDynamicSimulation(collect_flag, ccore_flag):
net = syncpr(5, 0.1, 0.1, ccore_flag);
output_dynamic = net.simulate_dynamic([-1, 1, -1, 1, -1], solution = solve_type.FAST, collect_dynamic = collect_flag);
if (collect_flag is True):
assert len(output_dynamic) > 1;
else:
assert len(output_dynamic) == 1;
@staticmethod
def templateIncorrectPatternForSimulation(pattern, ccore_flag):
net = syncpr(10, 0.1, 0.1, ccore=ccore_flag);
try: net.simulate(10, 10, pattern);
except: return;
assert False;
@staticmethod
def templateTrainNetworkAndRecognizePattern(ccore_flag):
net = syncpr(10, 0.1, 0.1, ccore_flag);
patterns = [];
patterns += [ [1, 1, 1, 1, 1, -1, -1, -1, -1, -1] ];
patterns += [ [-1, -1, -1, -1, -1, 1, 1, 1, 1, 1] ];
net.train(patterns);
# recognize it
for i in range(len(patterns)):
output_dynamic = net.simulate(10, 10, patterns[i], solve_type.RK4, True);
ensembles = output_dynamic.allocate_sync_ensembles(0.5);
assert len(ensembles) == 2;
assert len(ensembles[0]) == len(ensembles[1]);
# sort results
ensembles[0].sort();
ensembles[1].sort();
assert (ensembles[0] == [0, 1, 2, 3, 4]) or (ensembles[0] == [5, 6, 7, 8, 9]);
assert (ensembles[1] == [0, 1, 2, 3, 4]) or (ensembles[1] == [5, 6, 7, 8, 9]);
@staticmethod
def templateIncorrectPatternForTraining(patterns, ccore_flag):
net = syncpr(10, 0.1, 0.1, ccore_flag);
try: net.train(patterns);
except: return;
assert False;
@staticmethod
def templatePatternVisualizer(collect_dynamic, ccore_flag = False):
net = syncpr(5, 0.1, 0.1, ccore = ccore_flag);
output_dynamic = net.simulate(10, 10, [-1, 1, -1, 1, -1], solve_type.RK4, collect_dynamic);
syncpr_visualizer.show_pattern(output_dynamic, 5, 2);
syncpr_visualizer.animate_pattern_recognition(output_dynamic, 1, 5);
@staticmethod
def templateMemoryOrder(ccore_flag):
net = syncpr(10, 0.1, 0.1, ccore_flag);
patterns = [];
patterns += [ [1, 1, 1, 1, 1, -1, -1, -1, -1, -1] ];
patterns += [ [-1, -1, -1, -1, -1, 1, 1, 1, 1, 1] ];
net.train(patterns);
assert net.memory_order(patterns[0]) < 0.8;
assert net.memory_order(patterns[1]) < 0.8;
for pattern in patterns:
net.simulate(20, 10, pattern, solve_type.RK4);
memory_order = net.memory_order(pattern);
assert (memory_order > 0.95) and (memory_order <= 1.000005);
@staticmethod
def templateStaticSimulation(ccore_falg):
net = syncpr(10, 0.1, 0.1, ccore_falg);
patterns = [];
patterns += [ [1, 1, 1, 1, 1, -1, -1, -1, -1, -1] ];
patterns += [ [-1, -1, -1, -1, -1, 1, 1, 1, 1, 1] ];
net.train(patterns);
net.simulate_static(20, 10, patterns[0], solve_type.RK4);
memory_order = net.memory_order(patterns[0]);
assert (memory_order > 0.95) and (memory_order <= 1.000005);
@staticmethod
def templateDynamicSimulation(ccore_flag):
net = syncpr(10, 0.1, 0.1, ccore_flag);
patterns = [];
patterns += [ [1, 1, 1, 1, 1, -1, -1, -1, -1, -1] ];
patterns += [ [-1, -1, -1, -1, -1, 1, 1, 1, 1, 1] ];
net.train(patterns);
net.simulate_dynamic(patterns[0], order = 0.998, solution = solve_type.RK4);
memory_order = net.memory_order(patterns[0]);
assert (memory_order > 0.998) and (memory_order <= 1.0);
@staticmethod
def templateGlobalSyncOrder(ccore_flag):
net = syncpr(10, 0.1, 0.1, ccore_flag);
patterns = [ [1, 1, 1, 1, 1, -1, -1, -1, -1, -1] ];
patterns += [ [-1, -1, -1, -1, -1, 1, 1, 1, 1, 1] ];
global_sync_order = net.sync_order();
assert (global_sync_order < 1.0) and (global_sync_order > 0.0);
net.train(patterns);
global_sync_order = net.sync_order();
assert (global_sync_order < 1.0) and (global_sync_order > 0.0);
@staticmethod
def templateLocalSyncOrder(ccore_flag):
net = syncpr(10, 0.1, 0.1, ccore_flag);
patterns = [ [1, 1, 1, 1, 1, -1, -1, -1, -1, -1] ];
patterns += [ [-1, -1, -1, -1, -1, 1, 1, 1, 1, 1] ];
local_sync_order = net.sync_local_order();
assert (local_sync_order < 1.0) and (local_sync_order > 0.0);
net.train(patterns);
local_sync_order = net.sync_local_order();
assert (local_sync_order < 1.0) and (local_sync_order > 0.0);
@staticmethod
def templateIncorrectPatternValues(ccore_flag):
patterns = [];
patterns += [ [2, 1, 1, 1, 1, -1, -1, -1, -1, -1] ];
patterns += [ [-1, -2, -1, -1, -1, 1, 1, 1, 1, 1] ];
SyncprTestTemplates.templateIncorrectPatternForTraining(patterns, ccore_flag);
|
StarcoderdataPython
|
104712
|
import urllib.request
import re
import argparse
import sys
import os
from time import sleep
__version__ = "1.0"
banner = """
\033[1m\033[91m .d888888b.
d88888888b
8888 8888
8888 8888
8888 8888 .d888888b.
8888 8888 d88888888b
8888 8888 8888 8888
8888 8888 8888 8888
8888 8888 8888 8888
d88P 8888 8888 8888
888888888888888b. 8888 8888 "88888888888888
8888888888888P" 8888 8888 Y8888888888888
8888 8888
"888 888"
Y888888Y
\033[93mMusical.ly Downloader (\033[91mMusicalSave\033[93m)
\033[94mMade with <3 by: \033[93mSadCode Official (\033[91mSadCode\033[93m)
\033[94mVersion: \033[93m{}
\033[0m
"""
def download(url):
print("\033[1;92m [+] Visiting -->\033[93m", url)
response = urllib.request.urlopen(url)
html = response.read().decode('utf-8')
print("\033[1;92m [+] Extracting Video")
file_url = re.search('http(.*)mp4', html)
file_url = file_url[0]
file_name = file_url.split("/")[-1]
path = file_name
print("\033[1;92m [+] Downloading -->\033[93m", path)
urllib.request.urlretrieve(file_url, path)
print("\033[1;33m [!] Successfully Downloaded To -->\033[93m",
os.getcwd()+"/"+str(file_name),
"\033[0m")
def main():
parser = argparse.ArgumentParser(description = "Musical.ly Downloader")
parser.add_argument("-u", "--url",
help = "URL to the Musical.ly video.")
parser.add_argument("-v", "--version",
action='store_true',
help = "Get the current version.")
args = parser.parse_args()
if args.version:
print(__version__)
elif args.url:
print(banner.format(__version__))
download(args.url)
elif len(sys.argv) == 1:
parser.print_help()
main()
|
StarcoderdataPython
|
208929
|
<gh_stars>0
# -*- coding: utf-8 -*-
"""save_princess_peach.constants.py
Constants for save-princes-peach.
"""
# Default file used as grid
DEFAULT_GRID = '/Users/JFermin/Documents/GitHub/itsMeMario/tests/test_grids/init_grid.txt'
# players
BOWSER = 'b'
PEACH = 'p'
MARIO = 'm'
PLAYERS = [BOWSER, PEACH, MARIO]
# additional tiles
HAZARDS = '*'
SAFE = '-'
VISITED = "v"
ALL_POSITIONS = [BOWSER, PEACH, MARIO, HAZARDS, SAFE, VISITED]
SAFE_POSITION = [SAFE, PEACH, BOWSER]
# Movements
UP = 'UP'
DOWN = 'DOWN'
LEFT = 'LEFT'
RIGHT = 'RIGHT'
|
StarcoderdataPython
|
3333420
|
import math
print("Calculator")
a = int(input("Give the first number: "))
b = int(input("Give the second number: "))
while True:
print("(1) +\n(2) -\n(3) *\n(4) /\n(5)sin(number1/number2)\n(6)cos(number1/number2)\n(7) Change numbers\n(8) Quit\nCurrent numbers: ", a, b)
choice = int(input("Please select something (1-8): "))
if(choice == 1):
result = a + b
print("The result is:", result)
elif(choice == 2):
result = a - b
print("The result is:", result)
elif(choice == 3):
result = a * b
print("The result is:", result)
elif(choice == 4):
result = a / b
print("The result is:", result)
elif(choice == 5):
result = math.sin(a/b)
print("The result is:", result)
elif(choice == 6):
result = math.cos(a/b)
print("The result is:", result)
elif(choice == 7):
a = int(input("Give the first number: "))
b = int(input("Give the second number: "))
elif(choice == 8):
print("Thank you!")
break
else:
print("Selection was not correct.")
|
StarcoderdataPython
|
1998413
|
info = []
contents = ""
with open("AoC4.txt", "r") as File1:
for line in File1:
contents += line
info = contents.split("\n\n")
valid = 0
for thing in info:
if "byr" in thing and \
"iyr" in thing and \
"eyr" in thing and \
"hgt" in thing and \
"hcl" in thing and \
"ecl" in thing and \
"pid" in thing:
valid += 1
print(valid)
|
StarcoderdataPython
|
4960811
|
# Copyright 2006-2021 by <NAME>. All rights reserved.
#
# This file is part of the Biopython distribution and governed by your
# choice of the "Biopython License Agreement" or the "BSD 3-Clause License".
# Please see the LICENSE file that should have been included as part of this
# package.
"""Bio.Align support module (not for general use).
Unless you are writing a new parser or writer for Bio.Align, you should not
use this module. It provides base classes to try and simplify things.
"""
from abc import ABC
from abc import abstractmethod
from Bio import StreamModeError
class AlignmentIterator(ABC):
"""Base class for building Alignment iterators.
You should write a parse method that returns an Alignment generator. You
may wish to redefine the __init__ method as well.
"""
def __init__(self, source, mode="t", fmt=None):
"""Create an AlignmentIterator object.
Arguments:
- source - input file stream, or path to input file
This method MAY be overridden by any subclass.
Note when subclassing:
- there should be a single non-optional argument, the source.
- you can add additional optional arguments.
"""
try:
self.stream = open(source, "r" + mode)
self.should_close_stream = True
except TypeError: # not a path, assume we received a stream
if mode == "t":
if source.read(0) != "":
raise StreamModeError(
"%s files must be opened in text mode." % fmt
) from None
elif mode == "b":
if source.read(0) != b"":
raise StreamModeError(
"%s files must be opened in binary mode." % fmt
) from None
else:
raise ValueError("Unknown mode '%s'" % mode) from None
self.stream = source
self.should_close_stream = False
try:
self.alignments = self.parse(self.stream)
except Exception:
if self.should_close_stream:
self.stream.close()
raise
def __next__(self):
"""Return the next entry."""
try:
return next(self.alignments)
except Exception:
if self.should_close_stream:
self.stream.close()
raise
def __iter__(self):
"""Iterate over the entries as Alignment objects.
This method SHOULD NOT be overridden by any subclass. It should be
left as is, which will call the subclass implementation of __next__
to actually parse the file.
"""
return self
@abstractmethod
def parse(self, stream):
"""Start parsing the file, and return an Alignment iterator."""
class AlignmentWriter:
"""Base class for alignment writers. This class should be subclassed.
It is intended for sequential file formats with an (optional)
header, one or more alignments, and an (optional) footer.
The user may call the write_file() method to write a complete
file containing the alignments.
Alternatively, users may call the write_header(), followed
by multiple calls to write_alignment() and/or write_alignments(),
followed finally by write_footer().
Note that write_header() cannot require any assumptions about
the number of alignments.
"""
def __init__(self, target, mode="w"):
"""Create the writer object."""
if mode == "w":
try:
target.write("")
except TypeError:
# target was opened in binary mode
raise StreamModeError("File must be opened in text mode.") from None
except AttributeError:
# target is a path
stream = open(target, mode)
else:
stream = target
elif mode == "wb":
try:
target.write(b"")
except TypeError:
# target was opened in text mode
raise StreamModeError("File must be opened in binary mode.") from None
except AttributeError:
# target is a path
stream = open(target, mode)
else:
stream = target
else:
raise RuntimeError("Unknown mode '%s'" % mode)
self._target = target
self.stream = stream
def write_header(self, alignments):
"""Write the file header to the output file."""
pass
##################################################
# You MUST implement this method in the subclass #
# if the file format defines a file header. #
##################################################
def write_footer(self):
"""Write the file footer to the output file."""
pass
##################################################
# You MUST implement this method in the subclass #
# if the file format defines a file footer. #
##################################################
def write_alignment(self, alignment):
"""Write a single alignment to the output file.
alignment - an Alignment object
"""
raise NotImplementedError("This method should be implemented")
###################################################
# You MUST implement this method in the subclass. #
###################################################
def write_alignments(self, alignments, maxcount=None):
"""Write alignments to the output file, and return the number of alignments.
alignments - A list or iterator returning Alignment objects
maxcount - The maximum number of alignments allowed by the
file format, or None if there is no maximum.
"""
count = 0
if maxcount is None:
for alignment in alignments:
self.write_alignment(alignment)
count += 1
else:
for alignment in alignments:
if count == maxcount:
if maxcount == 1:
raise ValueError("More than one alignment found")
else:
raise ValueError(
"Number of alignments is larger than %d" % maxcount
)
self.write_alignment(alignment)
count += 1
return count
def write_file(self, alignments, mincount=0, maxcount=None):
"""Write a file with the alignments, and return the number of alignments.
alignments - A list or iterator returning Alignment objects
"""
try:
self.write_header(alignments)
count = self.write_alignments(alignments, maxcount)
self.write_footer()
finally:
if self.stream is not self._target:
self.stream.close()
if count < mincount:
if mincount == 1: # Common case
raise ValueError("Must have one alignment")
elif mincount == maxcount:
raise ValueError(
"Number of alignments is %d (expected %d)" % (count, mincount)
)
else:
raise ValueError(
"Number of alignmnets is %d (expected at least %d)"
% (count, mincount)
)
return count
|
StarcoderdataPython
|
11371127
|
def horspool(t, w, n, m):
LAST = suffix.last_occurrence(w[:-1])
i = 1
while i <= n - m + 1:
c = t[i + m - 1]
if w[m] == c:
j = 1
while j < m and t[i + j - 1] == w[j]:
j = j + 1
if j == m:
yield i
bad_character = LAST.get(c, 0)
i = i + (m - bad_character)
|
StarcoderdataPython
|
4907795
|
from threading import Timer, Thread
from time import time
class RepeatedTimer():
def __init__(self, interval, function, timelimit = None, countlimit = None, callback = None):
# announce interval to class
self.interval = interval
# announce target function to class
self.function = function
# init variable for
self.is_running = False
# error catching
assert not ((timelimit is not None) and (countlimit is not None)), 'Cannot use both time limit and count limit'
assert not ((timelimit is None) and (countlimit is None)), 'Time limit xor count limit must be defined'
# announce countlimit
self.countlimit = countlimit
if timelimit is not None:
# announce timelimit
self.timelimit = timelimit
elif self.countlimit is not None:
# convert countlimit to timelimit
self.timelimit = self.interval*countlimit - self.interval/2
# recalibrate time limit to take into account first run at time t=0
self.timelimit = self.timelimit - self.interval
# announce callback function
self.callback = callback
def __run(self):
self.is_running = False
self.start_it()
self.function()
def start_it(self):
if not self.is_running and (time() - self.time_init) < self.timelimit:
self.next_call += self.interval
self._timer = Timer(self.next_call - time(), self.__run)
self._timer.start()
self.is_running = True
else:
self.stop()
def start_all(self):
# get starting time for time limit
self.time_init = time()
# start 0th instance
initial_thread = Thread(target=self.function)
initial_thread.start()
# get starting time for 0th timed call
self.next_call = time()
self.start_it()
def stop(self):
self._timer.cancel()
self.is_running = False
if self.callback is not None:
self.callback()
|
StarcoderdataPython
|
6468973
|
from .load_model import load_model
from .SiameseModel import SiameseModel
__all__ = [
"load_model",
"SiameseModel",
]
|
StarcoderdataPython
|
3237279
|
<filename>web/blueprints/task/__init__.py
from dataclasses import asdict
from typing import NoReturn
from flask import Blueprint, jsonify, url_for, abort, flash, redirect, request, \
render_template
from flask_login import current_user
from pycroft.exc import PycroftException
from pycroft.lib.task import cancel_task, task_type_to_impl, \
manually_execute_task, reschedule_task
from pycroft.model import session
from pycroft.model.facilities import Building
from pycroft.model.task import Task, TaskStatus
from web.blueprints import redirect_or_404
from web.blueprints.access import BlueprintAccess
from web.blueprints.helpers.user import get_user_or_404
from web.blueprints.navigation import BlueprintNavigation
from web.blueprints.task.forms import RescheduleTaskForm
from web.blueprints.task.tables import TaskTable
from web.table.table import datetime_format
from web.template_filters import datetime_filter
bp = Blueprint('task', __name__)
access = BlueprintAccess(bp, required_properties=['user_show'])
nav = BlueprintNavigation(bp, "Tasks", icon='fa-tasks', blueprint_access=access)
def format_parameters(parameters):
"""Make task parameters human readable by looking up objects behind ids"""
# Replace building_id by the buildings short name
if bid := parameters.get("building_id"):
if building := Building.get(bid):
parameters["building"] = building.short_name
del parameters["building_id"]
return parameters
def task_row(task: Task):
task_impl = task_type_to_impl.get(task.type)
T = TaskTable
return {
"id": task.id,
"user": T.user.value(
href=url_for('user.user_show', user_id=task.user.id),
title=task.user.name
),
"name": task_impl.name,
"type": task.type.name,
"status": task.status.name,
"parameters": format_parameters(asdict(task.parameters)),
"errors": task.errors if task.errors is not None else list(),
"due": datetime_format(task.due, default='', formatter=datetime_filter),
"created": task.created.strftime("%Y-%m-%d %H:%M:%S"),
"creator": T.creator.value(
href=url_for('user.user_show', user_id=task.creator.id),
title=task.creator.name
),
'actions': [
T.actions.single_value(
href=url_for(
'.cancel_user_task',
task_id=task.id,
redirect=url_for('user.user_show', user_id=task.user.id, _anchor='tasks')
),
title="Abbrechen",
icon='fa-times',
btn_class='btn-link'
),
T.actions.single_value(
href=url_for(
'.reschedule_user_task',
task_id=task.id,
redirect=url_for('user.user_show', user_id=task.user.id, _anchor='tasks')
),
title="Datum Ändern",
icon='fa-calendar-alt',
btn_class='btn-link'
),
T.actions.single_value(
href=url_for(
'.manually_execute_user_task',
task_id=task.id,
redirect=url_for('user.user_show', user_id=task.user.id, _anchor='tasks')
),
title="Sofort ausführen",
icon='fa-fast-forward',
btn_class='btn-link'
)
] if task.status == TaskStatus.OPEN else None,
}
@bp.route("/user/<int:user_id>/json")
def json_tasks_for_user(user_id):
user = get_user_or_404(user_id)
return jsonify(items=[task_row(task) for task in user.tasks])
@bp.route("/user/json")
def json_user_tasks():
failed_only = bool(request.args.get("failed_only", False))
open_only = bool(request.args.get("open_only", False))
tasks = Task.q.order_by(Task.status.desc(), Task.due.asc())\
if failed_only:
tasks = tasks.filter_by(status=TaskStatus.FAILED).all()
elif open_only:
tasks = tasks.filter_by(status=TaskStatus.OPEN).all()
else:
tasks = tasks.all()
return jsonify(items=[task_row(task) for task in tasks])
def get_task_or_404(task_id) -> Task | NoReturn:
if task := session.session.get(Task, task_id):
return task
abort(404)
@bp.route("/<int:task_id>/manually_execute")
@access.require('user_change')
def manually_execute_user_task(task_id: int):
task = get_task_or_404(task_id)
try:
manually_execute_task(task, processor=current_user)
session.session.commit()
except Exception as e:
if not isinstance(e, PycroftException):
import logging
logging.getLogger('pycroft.web').error(
"Unexpected error in manual task execution: %s", e,
exc_info=True
)
flash(f"Fehler bei der Ausführung: {e}", 'error')
session.session.rollback()
else:
flash("Aufgabe erfolgreich ausgeführt", 'success')
return redirect_or_404(request.args.get("redirect"))
@bp.route("/<int:task_id>/cancel")
@access.require('user_change')
def cancel_user_task(task_id):
task = get_task_or_404(task_id)
cancel_task(task, current_user)
session.session.commit()
flash('Aufgabe erfolgreich abgebrochen.', 'success')
return redirect_or_404(request.args.get("redirect"))
@bp.route("/<int:task_id>/reschedule", methods=['GET', 'POST'])
@access.require('user_change')
def reschedule_user_task(task_id):
task = get_task_or_404(task_id)
form = RescheduleTaskForm()
return_url = url_for('user.user_show', user_id=task.user.id, _anchor='tasks')
if form.validate_on_submit():
reschedule_task(task, form.full_datetime, processor=current_user)
session.session.commit()
flash(f'Datum erfolgreich auf {form.full_datetime} geändert.', 'success')
return redirect(return_url)
return render_template(
"task/reschedule_task.html",
form_args={'form': form, 'cancel_to': return_url}
)
@bp.route("/user")
@nav.navigate("Tasks")
def user_tasks():
return render_template(
"task/tasks.html",
task_table=TaskTable(data_url=url_for('.json_user_tasks', open_only=1)),
task_failed_table=TaskTable(data_url=url_for('.json_user_tasks', failed_only=1),
sort_order='desc'),
page_title="Aufgaben (Nutzer)"
)
|
StarcoderdataPython
|
11216962
|
<reponame>fau-fablab/etiketten
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
"""
Performs some tests with pyBarcode. All created barcodes where saved in the
tests subdirectory with a tests.html to watch them.
"""
__docformat__ = 'restructuredtext en'
import codecs
import os
import sys
import webbrowser
from barcode import get_barcode, get_barcode_class, __version__
try:
from barcode.writer import ImageWriter
except ImportError:
ImageWriter = None
PATH = os.path.dirname(os.path.abspath(__file__))
TESTPATH = os.path.join(PATH, 'tests')
HTMLFILE = os.path.join(TESTPATH, 'tests.html')
HTML = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<title>pyBarcode {version} Test</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
</head>
<body>
<h1>pyBarcode {version} Tests</h1>
{body}
</body>
</html>
"""
OBJECTS = ('<p><h2>{name}</h2><br />\n'
'<object data="{filename}" type="image/svg+xml">\n'
'<param name="src" value="{filename}" /></object>')
IMAGES = ('<h3>As PNG-Image</h3><br />\n'
'<img src="{filename}" alt="{name}" /></p>\n')
NO_PIL = '<h3>PIL was not found. No PNG-Image created.</h3></p>\n'
TESTCODES = (
('ean8', '40267708'),
('ean13', '5901234123457'),
('upca', '36000291453'),
('jan', '4901234567894'),
('isbn10', '3-12-517154-7'),
('isbn13', '978-3-16-148410-0'),
('issn', '1144875X'),
('code39', 'Example Code 39'),
('pzn', '487780'),
)
def test():
if not os.path.isdir(TESTPATH):
try:
os.mkdir(TESTPATH)
except OSError, e:
print('Test not run.')
print('Error:', e)
sys.exit(1)
objects = []
append = lambda x, y: objects.append(OBJECTS.format(filename=x, name=y))
append_img = lambda x, y: objects.append(IMAGES.format(filename=x, name=y))
options = dict(module_width=0.495, module_height=25.0)
for codename, code in TESTCODES:
bcode = get_barcode(codename, code)
filename = bcode.save(os.path.join(TESTPATH, codename))
print('Code: {0}, Input: {1}, Output: {2}'.format(
bcode.name, code, bcode.get_fullcode()))
append(filename, bcode.name)
if ImageWriter is not None:
bcodec = get_barcode_class(codename)
bcode = bcodec(code, writer=ImageWriter())
opts = dict(font_size=14, text_distance=1)
if codename.startswith('i'):
opts['center_text'] = False
filename = bcode.save(os.path.join(TESTPATH, codename), opts)
append_img(filename, bcode.name)
else:
objects.append(NO_PIL)
# Save htmlfile with all objects
with codecs.open(HTMLFILE, 'w', encoding='utf-8') as f:
obj = '\n'.join(objects)
f.write(HTML.format(version=__version__, body=obj))
if __name__ == '__main__':
test()
webbrowser.open(HTMLFILE)
|
StarcoderdataPython
|
42802
|
# NOTE: override the kaolin one
from .renderer.base import Renderer as DIBRenderer
|
StarcoderdataPython
|
5047246
|
from dataclasses import dataclass, field
from decimal import Decimal
from typing import List, Optional, Union
@dataclass
class Item:
class Meta:
name = "ITEM"
quantity: Optional[int] = field(
default=None,
metadata={
"type": "Attribute",
}
)
price: Optional[Decimal] = field(
default=None,
metadata={
"type": "Attribute",
"min_inclusive": Decimal("0"),
}
)
@dataclass
class LongItemDefn(Item):
class Meta:
name = "LONG_ITEM_DEFN"
description: Optional[str] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
"required": True,
}
)
@dataclass
class ShortItemDefn(Item):
class Meta:
name = "SHORT_ITEM_DEFN"
id: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
}
)
@dataclass
class Poitems:
class Meta:
name = "POITEMS"
item: List[Union[Item, ShortItemDefn, LongItemDefn]] = field(
default_factory=list,
metadata={
"type": "Element",
"namespace": "",
"min_occurs": 1,
}
)
|
StarcoderdataPython
|
5024307
|
<gh_stars>0
import pygame
import pygame.gfxdraw
from typing import List, Union, Tuple
from .. import ui_manager
from ..core.ui_element import UIElement
from ..elements import ui_text_box
class UITooltip(UIElement):
"""
A tool tip is a floating block of text that gives additional information after a user hovers over an interactive
part of a GUI for a short time. In Pygame GUI the tooltip's text is style-able with HTML.
At the moment the tooltips are only available as an option on UIButton elements.
Tooltips also don't allow a container as they are designed to overlap normal UI boundaries and be contained only
within the 'root' window/container, which is synonymous with the pygame display surface.
:param html_text: Text styled with HTML, to be displayed on the tooltip.
:param hover_distance: Distance in pixels between the tooltip and the thing being hovered.
:param manager: The UIManager that manages this element.
:param parent_element: The element this element 'belongs to' in the theming hierarchy.
:param object_id: A custom defined ID for fine tuning of theming.
"""
def __init__(self, html_text: str, hover_distance: Tuple[int, int],
manager: ui_manager.UIManager,
parent_element: UIElement = None,
object_id: Union[str, None] = None):
new_element_ids, new_object_ids = self.create_valid_ids(parent_element=parent_element,
object_id=object_id,
element_id='tool_tip')
super().__init__(relative_rect=pygame.Rect((0, 0), (-1, -1)),
manager=manager,
container=None,
starting_height=manager.get_sprite_group().get_top_layer(),
layer_thickness=1,
element_ids=new_element_ids,
object_ids=new_object_ids)
rect_width = 170
rect_width_string = self.ui_theme.get_misc_data(self.object_ids, self.element_ids, 'rect_width')
if rect_width_string is not None:
rect_width = int(rect_width_string)
self.hover_distance_from_target = hover_distance
self.text_block = ui_text_box.UITextBox(html_text,
pygame.Rect(0, 0, rect_width, -1),
manager=self.ui_manager,
layer_starting_height=self._layer+1,
parent_element=self)
self.relative_rect.height = self.text_block.rect.height
self.relative_rect.width = self.text_block.rect.width
self.rect.width = self.text_block.rect.width
self.rect.height = self.text_block.rect.height
# Get a shadow from the shadow generator
self.image = pygame.Surface((0, 0))
def kill(self):
"""
Overrides the UIElement's default kill method to also kill the text block element that helps make up the
complete tool tip.
"""
self.text_block.kill()
super().kill()
def find_valid_position(self, position: pygame.math.Vector2) -> bool:
"""
Finds a valid position for the tool tip inside the root container of the UI.
The algorithm starts from the position of the target we are providing a tool tip for then it
tries to fit the rectangle for the tool tip onto the screen by moving it above, below, to the left and to the
right, until we find a position that fits the whole tooltip rectangle on the screen at once.
If we fail to manage this then the method will return False. Otherwise it returns True and set the position
of the tool tip to our valid position.
:param position: A 2D vector representing the position of the target this tool tip is for.
:return bool: returns True if we find a valid (visible) position and False if we do not.
"""
window_rect = self.ui_manager.get_window_stack().get_root_window().get_container().rect
self.rect.centerx = position.x
self.rect.top = position.y + self.hover_distance_from_target[1]
if window_rect.contains(self.rect):
self.text_block.rect.x = self.rect.x
self.text_block.rect.y = self.rect.y
return True
else:
if self.rect.bottom > window_rect.bottom:
self.rect.bottom = position.y - self.hover_distance_from_target[1]
if self.rect.right > window_rect.right:
self.rect.right = window_rect.right - self.hover_distance_from_target[0]
if self.rect.left < window_rect.left:
self.rect.left = window_rect.left + self.hover_distance_from_target[0]
if window_rect.contains(self.rect):
self.text_block.rect.x = self.rect.x
self.text_block.rect.y = self.rect.y
return True
else:
return False
|
StarcoderdataPython
|
345567
|
import os
import pytest
from django.conf import settings
from django.core.files.uploadedfile import SimpleUploadedFile
pytestmark = pytest.mark.django_db
def test_image_delete_from_folder(foobar):
banner = SimpleUploadedFile(
name="banner.png",
content=open("artists/tests/test_banner.png", "rb").read(),
content_type="image/png",
)
assert not os.path.exists(f"{settings.MEDIA_ROOT}/artist_banner/banner.png")
foobar.artist_banner = banner
foobar.save()
assert os.path.exists(f"{settings.MEDIA_ROOT}/artist_banner/banner.png")
foobar.delete()
assert not os.path.exists(f"{settings.MEDIA_ROOT}/artist_banner/banner.png")
|
StarcoderdataPython
|
3314451
|
# -*- coding: utf-8 -*-
import lemoncheesecake.api as lcc
from lemoncheesecake.matching import check_that_in, check_that, has_length, is_integer, is_dict, is_list, require_that, \
equal_to
from common.base_test import BaseTest
SUITE = {
"description": "Method 'lookup_account_names'"
}
@lcc.prop("main", "type")
@lcc.prop("positive", "type")
@lcc.tags("api", "database_api", "database_api_accounts", "lookup_account_names")
@lcc.suite("Check work of method 'lookup_account_names'", rank=1)
class LookupAccountNames(BaseTest):
def __init__(self):
super().__init__()
self.__database_api_identifier = None
def check_fields_account_ids_format(self, response, field):
if not self.validator.is_account_id(response[field]):
lcc.log_error("Wrong format of '{}', got: {}".format(field, response[field]))
else:
lcc.log_info("'{}' has correct format: account_object_type".format(field))
def check_account_structure(self, account_info):
if check_that("account_info", account_info, has_length(16)):
check_that_in(
account_info,
"network_fee_percentage", is_integer(),
"active", is_dict(),
"options", is_dict(),
"whitelisting_accounts", is_list(),
"blacklisting_accounts", is_list(),
"whitelisted_accounts", is_list(),
"blacklisted_accounts", is_list(),
"active_special_authority", is_list(),
"top_n_control_flags", is_integer(),
"accumulated_reward", is_integer(),
"extensions", is_list(),
quiet=True
)
self.check_fields_account_ids_format(account_info, "id")
if not self.validator.is_account_id(account_info["registrar"]):
lcc.log_error("Wrong format of 'registrar', got: {}".format(account_info["registrar"]))
else:
lcc.log_info("'registrar' has correct format: account_object_type")
if not self.validator.is_account_name(account_info["name"]):
lcc.log_error("Wrong format of 'name', got: {}".format(account_info["name"]))
else:
lcc.log_info("'name' has correct format: account_name")
if not self.validator.is_echorand_key(account_info["echorand_key"]):
lcc.log_error("Wrong format of 'echorand_key', got: {}".format(account_info["echorand_key"]))
else:
lcc.log_info("'echorand_key' has correct format: echo_rand_key")
if not self.validator.is_account_statistics_id(account_info["statistics"]):
lcc.log_error("Wrong format of 'statistics', got: {}".format(account_info["statistics"]))
else:
lcc.log_info("'statistics' has correct format: account_statistics_object_type")
if len(account_info) == 21:
if not self.validator.is_vesting_balance_id(account_info["cashback_vb"]):
lcc.log_error("Wrong format of 'cashback_vb', got: {}".format(account_info["cashback_vb"]))
else:
lcc.log_info("'cashback_vb' has correct format: vesting_balance_object_type")
lcc.set_step("Check 'active' field")
if check_that("active", account_info["active"], has_length(3)):
check_that_in(
account_info["active"],
"weight_threshold", is_integer(),
"account_auths", is_list(),
"key_auths", is_list(),
quiet=True
)
lcc.set_step("Check 'options' field")
if check_that("active", account_info["options"], has_length(3)):
delegating_account = account_info["options"]["delegating_account"]
if not self.validator.is_account_id(delegating_account):
lcc.log_error("Wrong format of 'delegating_account'got: {}".format(delegating_account))
else:
lcc.log_info("'{}' has correct format: account_object_type".format(delegating_account))
check_that_in(
account_info["options"],
"delegate_share", is_integer(),
"extensions", is_list(),
quiet=True
)
def setup_suite(self):
super().setup_suite()
lcc.set_step("Setup for {}".format(self.__class__.__name__))
self.__database_api_identifier = self.get_identifier("database")
lcc.log_info("Database API identifier is '{}'".format(self.__database_api_identifier))
@lcc.test("Simple work of method 'lookup_account_names'")
def method_main_check(self):
lcc.set_step("Get info about default account")
params = [self.accounts[0], self.accounts[1], self.accounts[2]]
response_id = self.send_request(self.get_request("lookup_account_names", [params]),
self.__database_api_identifier)
response = self.get_response(response_id, log_response=True)
lcc.log_info("Call method 'lookup_account_names' with param: {}".format(params))
lcc.set_step("Check length of received accounts")
require_that(
"'list of received accounts'",
response["result"], has_length(len(params))
)
for account_num, account_info in enumerate(response["result"]):
lcc.set_step("Checking account #{} - '{}'".format(account_num, params[account_num]))
self.check_account_structure(account_info)
@lcc.prop("positive", "type")
@lcc.tags("api", "database_api", "database_api_accounts", "lookup_account_names")
@lcc.suite("Positive testing of method 'lookup_account_names'", rank=2)
class PositiveTesting(BaseTest):
def __init__(self):
super().__init__()
self.__database_api_identifier = None
self.__registration_api_identifier = None
self.echo_acc0 = None
@staticmethod
def compare_accounts(account, performed_account):
check_that_in(
account,
"registrar", equal_to(performed_account["registrar"]),
"name", equal_to(performed_account["name"]),
"active", equal_to(performed_account["active"]),
"echorand_key", equal_to(performed_account["echorand_key"]),
"options", equal_to(performed_account["options"])
)
def setup_suite(self):
super().setup_suite()
self._connect_to_echopy_lib()
lcc.set_step("Setup for {}".format(self.__class__.__name__))
self.__database_api_identifier = self.get_identifier("database")
self.__registration_api_identifier = self.get_identifier("registration")
lcc.log_info(
"API identifiers are: database='{}', registration='{}'".format(self.__database_api_identifier,
self.__registration_api_identifier))
self.echo_acc0 = self.get_account_id(self.accounts[0], self.__database_api_identifier,
self.__registration_api_identifier)
lcc.log_info("Echo account is '{}'".format(self.echo_acc0))
def teardown_suite(self):
self._disconnect_to_echopy_lib()
super().teardown_suite()
@lcc.test("Create accounts using account_create operation and get info about them")
@lcc.depends_on("DatabaseApi.Accounts.LookupAccountNames.LookupAccountNames.method_main_check")
def get_info_about_created_accounts(self, get_random_valid_account_name):
accounts = ["{}{}".format(get_random_valid_account_name, num) for num in range(2)]
accounts_public_keys = [self.generate_keys(), self.generate_keys()]
lcc.set_step("Perform two account creation operations and store accounts ids")
accounts = self.utils.get_account_id(self, accounts, accounts_public_keys, self.__database_api_identifier,
need_operations=True)
lcc.log_info("Two accounts created, ids: 1='{}', 2='{}'".format(accounts.get("accounts_ids")[0],
accounts.get("accounts_ids")[1]))
lcc.set_step("Get a list of created accounts by names")
response_id = self.send_request(self.get_request("lookup_account_names", [accounts.get("account_names")]),
self.__database_api_identifier)
response = self.get_response(response_id)
lcc.log_info("Call method 'lookup_account_names' with params: {}".format(accounts.get("account_names")))
accounts_info = response["result"]
for account_num, account in enumerate(accounts_info):
lcc.set_step("Checking account #{}".format(account_num))
perform_operation = accounts.get("list_operations")[account_num][0][1]
self.compare_accounts(account, perform_operation)
@lcc.test("Create account using account_create operation and "
"compare response from 'lookup_account_names' and 'get_objects'")
@lcc.depends_on("DatabaseApi.Accounts.LookupAccountNames.LookupAccountNames.method_main_check")
def compare_with_method_get_objects(self, get_random_valid_account_name):
account_name = get_random_valid_account_name
public_key = self.generate_keys()[1]
lcc.set_step("Perform account creation operation")
operation = self.echo_ops.get_account_create_operation(self.echo, account_name, public_key, public_key,
registrar=self.echo_acc0, signer=self.echo_acc0)
collected_operation = self.collect_operations(operation, self.__database_api_identifier)
broadcast_result = self.echo_ops.broadcast(echo=self.echo, list_operations=collected_operation,
log_broadcast=False)
if not self.is_operation_completed(broadcast_result, expected_static_variant=1):
raise Exception("Account is not created")
operation_result = self.get_operation_results_ids(broadcast_result)
lcc.log_info("Account is created, id='{}'".format(operation_result))
lcc.set_step("Get account by name")
response_id = self.send_request(self.get_request("lookup_account_names", [[account_name]]),
self.__database_api_identifier)
account_info_1 = self.get_response(response_id)["result"]
lcc.log_info("Call method 'get_account_by_name' with param: {}".format(account_name))
lcc.set_step("Get account by id")
account_id = self.get_account_by_name(account_name, self.__database_api_identifier).get("result").get("id")
response_id = self.send_request(self.get_request("get_objects", [[account_id]]),
self.__database_api_identifier)
account_info_2 = self.get_response(response_id)["result"]
lcc.log_info("Call method 'get_objects' with param: {}".format(account_id))
lcc.set_step("Checking created account")
for i, result in enumerate(account_info_1):
self.compare_accounts(result, account_info_2[i])
|
StarcoderdataPython
|
9654488
|
"""
Sort an XML file according to one or more provided schemas.
Based on https://github.com/OpenDataServices/iati-utils/blob/master/sort_iati.py
Copyright (c) 2013-2014 <NAME>
Copyright (c) 2016 Open Data Services Co-operative Limited
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from collections import OrderedDict
from warnings import warn
try:
import lxml.etree as ET
# Note that lxml is now "required" - it's listed as a requirement in
# setup.py and is needed for the tests to pass.
# However, stdlib etree still exists as an unsupported feature.
except ImportError:
import xml.etree.ElementTree as ET
warn('Using stdlib etree may work, but is not supported. Please install lxml.')
# Namespaces necessary for opening schema files
namespaces = {
'xsd': 'http://www.w3.org/2001/XMLSchema'
}
class XMLSchemaWalker(object):
"""
Class for traversing one or more XML schemas.
Based on the Schema2Doc class in https://github.com/IATI/IATI-Standard-SSOT/blob/version-2.02/gen.py
"""
def __init__(self, schemas):
"""
schema -- the filename of the schema to use, e.g.
'iati-activities-schema.xsd'
"""
self.trees = [ET.parse(schema) for schema in schemas]
def get_schema_element(self, tag_name, name_attribute):
"""
Return the specified element from the schema.
tag_name -- the name of the tag in the schema, e.g. 'complexType'
name_attribute -- the value of the 'name' attribute in the schema, ie.
the name of the element/type etc. being described,
e.g. iati-activities
"""
for tree in self.trees:
schema_element = tree.find("xsd:{0}[@name='{1}']".format(tag_name, name_attribute), namespaces=namespaces)
if schema_element is not None:
return schema_element
return schema_element
def element_loop(self, element, path):
"""
Return information about the children of the supplied element.
"""
a = element.attrib
type_elements = []
if 'type' in a:
complexType = self.get_schema_element('complexType', a['type'])
if complexType is not None:
type_elements = (
complexType.findall('xsd:choice/xsd:element',
namespaces=namespaces) +
complexType.findall('xsd:sequence/xsd:element',
namespaces=namespaces))
children = (
element.findall(
'xsd:complexType/xsd:choice/xsd:element',
namespaces=namespaces)
+ element.findall(
'xsd:complexType/xsd:sequence/xsd:element',
namespaces=namespaces)
+ element.findall(
'xsd:complexType/xsd:all/xsd:element',
namespaces=namespaces)
+ type_elements)
child_tuples = []
for child in children:
a = child.attrib
if 'name' in a:
child_tuples.append((a['name'], child, None, a.get('minOccurs'), a.get('maxOccurs')))
else:
child_tuples.append((a['ref'], None, child, a.get('minOccurs'), a.get('maxOccurs')))
return child_tuples
def create_schema_dict(self, parent_name, parent_element=None):
"""
Create a nested OrderedDict representing the structure (and order!) of
elements in the provided schema.
"""
if parent_element is None:
parent_element = self.get_schema_element('element', parent_name)
if parent_element is None:
return {}
return OrderedDict([
(name, self.create_schema_dict(name, element))
for name, element, _, _, _ in self.element_loop(parent_element, '')])
def sort_element(element, schema_subdict):
"""
Sort the given element's children according to the order of schema_subdict.
"""
children = list(element)
for child in children:
element.remove(child)
keys = list(schema_subdict.keys())
def index_key(x):
if x.tag in keys:
return keys.index(x.tag)
else:
return len(keys) + 1
for child in sorted(children, key=index_key):
element.append(child)
sort_element(child, schema_subdict.get(child.tag, {}))
|
StarcoderdataPython
|
207792
|
<filename>test/test_profile.py
from flask import url_for
from flask_login import current_user
from config import TRACKER_PASSWORD_LENGTH_MAX
from config import TRACKER_PASSWORD_LENGTH_MIN
from tracker.form.user import ERROR_PASSWORD_CONTAINS_USERNAME
from tracker.form.user import ERROR_PASSWORD_INCORRECT
from tracker.form.user import ERROR_PASSWORD_REPEAT_MISMATCHES
from tracker.user import random_string
from .conftest import DEFAULT_USERNAME
from .conftest import assert_logged_in
from .conftest import assert_not_logged_in
from .conftest import logged_in
@logged_in
def test_change_password(db, client):
new_password = <PASSWORD>[::-1]
resp = client.post(url_for('tracker.edit_own_user_profile'), follow_redirects=True,
data=dict(password=<PASSWORD>, password_repeat=<PASSWORD>,
password_current=DEFAULT_USERNAME))
assert resp.status_code == 200
# logout and test if new password was applied
resp = client.post(url_for('tracker.logout'), follow_redirects=True)
assert_not_logged_in(resp)
resp = client.post(url_for('tracker.login'), follow_redirects=True,
data=dict(username=DEFAULT_USERNAME, password=<PASSWORD>))
assert_logged_in(resp)
assert DEFAULT_USERNAME == current_user.name
@logged_in
def test_invalid_password_length(db, client):
resp = client.post(url_for('tracker.edit_own_user_profile'), follow_redirects=True,
data=dict(password='<PASSWORD>', new_password='<PASSWORD>', password_current=DEFAULT_USERNAME))
assert 'Field must be between {} and {} characters long.' \
.format(TRACKER_PASSWORD_LENGTH_MIN, TRACKER_PASSWORD_LENGTH_MAX) in resp.data.decode()
assert resp.status_code == 200
@logged_in
def test_password_must_not_contain_username(db, client):
new_password = <PASSWORD>'.<PASSWORD>(DEFAULT_USERNAME)
resp = client.post(url_for('tracker.edit_own_user_profile'), follow_redirects=True,
data=dict(password=<PASSWORD>, password_repeat=<PASSWORD>,
password_current=DEFAULT_USERNAME))
assert resp.status_code == 200
assert ERROR_PASSWORD_CONTAINS_USERNAME in resp.data.decode()
@logged_in
def test_password_repeat_mismatches(db, client):
new_password = <PASSWORD>()
resp = client.post(url_for('tracker.edit_own_user_profile'), follow_redirects=True,
data=dict(password=<PASSWORD>, password_repeat=<PASSWORD>[::-1],
password_current=DEFAULT_USERNAME))
assert resp.status_code == 200
assert ERROR_PASSWORD_REPEAT_MISMATCHES in resp.data.decode()
@logged_in
def test_current_password_incorrect(db, client):
new_password = <PASSWORD>()
resp = client.post(url_for('tracker.edit_own_user_profile'), follow_redirects=True,
data=dict(password=<PASSWORD>, password_repeat=<PASSWORD>,
password_current=<PASSWORD>))
assert resp.status_code == 200
assert ERROR_PASSWORD_INCORRECT in resp.data.decode()
|
StarcoderdataPython
|
3597285
|
import boto3
exceptions = boto3.client('workmail').exceptions
DirectoryServiceAuthenticationFailedException = exceptions.DirectoryServiceAuthenticationFailedException
DirectoryUnavailableException = exceptions.DirectoryUnavailableException
EmailAddressInUseException = exceptions.EmailAddressInUseException
EntityAlreadyRegisteredException = exceptions.EntityAlreadyRegisteredException
EntityNotFoundException = exceptions.EntityNotFoundException
EntityStateException = exceptions.EntityStateException
InvalidConfigurationException = exceptions.InvalidConfigurationException
InvalidParameterException = exceptions.InvalidParameterException
InvalidPasswordException = exceptions.InvalidPasswordException
MailDomainNotFoundException = exceptions.MailDomainNotFoundException
MailDomainStateException = exceptions.MailDomainStateException
NameAvailabilityException = exceptions.NameAvailabilityException
OrganizationNotFoundException = exceptions.OrganizationNotFoundException
OrganizationStateException = exceptions.OrganizationStateException
ReservedNameException = exceptions.ReservedNameException
UnsupportedOperationException = exceptions.UnsupportedOperationException
|
StarcoderdataPython
|
9665525
|
# --------------
# Import packages
import numpy as np
import pandas as pd
from scipy.stats import mode
# code starts here
bank = pd.read_csv(path)
categorical_var = bank.select_dtypes(include = 'object')
print(categorical_var)
numerical_var = bank.select_dtypes(include = 'number')
#print(numerical_var)
# code ends here
# --------------
# code starts here
banks = bank.drop(['Loan_ID'],axis=1)
null = bank.isnull().sum()
bank_mode = banks.mode()
banks = banks.fillna('bank_mode')
#code ends here
# --------------
# Code starts here
avg_loan_amount = pd.pivot_table(banks,index = ['Gender','Married','Self_Employed'],values ='LoanAmount',aggfunc = np.mean)
print(avg_loan_amount)
# code ends here
# --------------
# code starts here
cond1=banks[(banks['Self_Employed'] == 'Yes') & (banks['Loan_Status'] == 'Y')]
loan_approved_se = len(cond1)
print(loan_approved_se)
loan_approved_nse = len(banks[(banks['Self_Employed'] == 'No') & (banks['Loan_Status'] == 'Y')])
print(loan_approved_nse)
percentage_se = (loan_approved_se / 614 ) * 100
percentage_nse = (loan_approved_nse /614) * 100
print(percentage_se)
print(percentage_nse)
# code ends here
# --------------
# code starts here
count = 0
loan_term = banks['Loan_Amount_Term'].apply(lambda x :int(x)/ 12)
for i in loan_term:
if i >=25:
count = count + 1
print(count)
big_loan_term = count
# code ends here
# --------------
# code starts here
loan_groupby = banks.groupby('Loan_Status')
loan_groupby = loan_groupby[['ApplicantIncome','Credit_History']]
mean_values = loan_groupby.mean()
print(mean_values)
# code ends here
|
StarcoderdataPython
|
3229962
|
from botocore.exceptions import ClientError
from mock import MagicMock, patch, call
from sceptre_s3_packager.s3_packager import KeyResolver, UploadHook
from sceptre.stack import Stack
class TestKeyResolver(object):
def setup_method(self, method):
self.key_resolver = KeyResolver()
self.key_resolver.argument = './tests/testdata'
def test_simple(self):
key = self.key_resolver.resolve()
assert key == 'sceptre/55cdcd252b548216c5b4a0088de166b8'
class TestUploadHook(object):
def setup_method(self, method):
self.upload_hook = UploadHook()
self.upload_hook.argument = './tests/testdata'
self.upload_hook.stack = MagicMock(spec=Stack)
self.upload_hook.stack.region = 'eu-central-1'
self.upload_hook.stack_stack_name = 'my-stack'
self.upload_hook.stack.profile = None
self.upload_hook.stack.external_name = None
self.upload_hook.stack.sceptre_user_data = {
'Code': {
'S3Bucket': 'my-bucket'
}
}
@patch('sceptre.connection_manager.ConnectionManager.call')
def test_with_upload(self, mock_call):
mock_call.side_effect = [ClientError({
'Error': {
'Code': '404'
}
}, 'dummy'), None]
self.upload_hook.run()
mock_call.assert_has_calls([
call(
service='s3',
command='head_object',
kwargs={
'Bucket': 'my-bucket',
'Key': 'sceptre/55cdcd252b548216c5b4a0088de166b8'
},
),
call(
service='s3',
command='put_object',
kwargs={
'Bucket': 'my-bucket',
'Key': 'sceptre/55cdcd252b548216c5b4a0088de166b8',
'Body': b'PK\x03\x04\x14\x00\x00\x00\x00\x00\x00\x00!L\xbd\xbf\x0cg\x1e\x00\x00\x00\x1e\x00\x00\x00\x0b\x00\x00\x00my_file.txtContent for automated testing\nPK\x01\x02\x14\x03\x14\x00\x00\x00\x00\x00\x00\x00!L\xbd\xbf\x0cg\x1e\x00\x00\x00\x1e\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa4\x81\x00\x00\x00\x00my_file.txtPK\x05\x06\x00\x00\x00\x00\x01\x00\x01\x009\x00\x00\x00G\x00\x00\x00\x00\x00' # noqa: E501
},
)
])
@patch('sceptre.connection_manager.ConnectionManager.call')
def test_without_upload(self, mock_call):
self.upload_hook.run()
mock_call.assert_has_calls([
call(
service='s3',
command='head_object',
kwargs={
'Bucket': 'my-bucket',
'Key': 'sceptre/55cdcd252b548216c5b4a0088de166b8'
},
),
])
|
StarcoderdataPython
|
8027269
|
import numpy as np
import pandas as pd
from pandas import DataFrame
from sklearn.cluster import DBSCAN
from sklearn import preprocessing
import traceback
from datetime import datetime
def LTV_class_probability(value_series):
value_list = value_series.values.tolist()
class_1_prob = value_list.count(1)
class_2_prob = value_list.count(2)*2
class_3_prob = value_list.count(3)*5
total_prob = class_1_prob + class_2_prob + class_3_prob + 1e-10
output_dict = {
"Low_Class": round(class_1_prob/total_prob,2),
"Mid_Class": round(class_2_prob/total_prob,2),
"High_Class": round(class_3_prob/total_prob,2)
}
return output_dict
def generated_final_cluster(low_class_prob, mid_class_prob, high_class_prob):
max_prob = max(low_class_prob, mid_class_prob, high_class_prob)
if high_class_prob >= max_prob:
return 3
elif mid_class_prob >= max_prob:
return 2
elif low_class_prob>= max_prob:
return 1
else:
return -1
def guest_type_cols(LTV_Class, Final_Cluster):
if LTV_Class >= 1:
return "Returning Guest"
elif Final_Cluster == -1:
return "Ignore 1st-Time Guest"
else:
return "1st-Time Guest"
def construct_ground_truth_file(folder_path: str):
"""
Load datafile from directory and contruct the ground truth Dataframe Pandas for processing
"""
print(f"Start Construct Ground Truth File")
df_long_time_guests = DataFrame()
df_short_time_guests = DataFrame()
df_ground_truth = DataFrame()
try:
df_long_time_guests = pd.read_csv(folder_path + "LTV_class_of_long_time_guests.csv")
df_short_time_guests = pd.read_csv(folder_path + "LTV_class_of_short_time_guests.csv")
df_returning_guest = pd.concat([df_long_time_guests, df_short_time_guests]).reset_index()
df_returning_guest["LTV_Class"].replace({"CLV_Low_Prob": "1", "CLV_Mid_Prob": "2", "CLV_High_Prob": "3"}, inplace=True)
df_ground_truth = df_returning_guest.drop(columns = ["index", "CLV_Low_Prob", "CLV_Mid_Prob", "CLV_High_Prob"])
df_ground_truth["GuestID"] = df_ground_truth["GuestID"].astype(int)
df_ground_truth["LTV_Class"] = df_ground_truth["LTV_Class"].astype(int)
except Exception as error_sum:
print("___")
print("Error summary: \n", error_sum)
error_log = traceback.format_exc()
print("Error Details: \n", str(error_log))
print("___")
print(f"Construct Ground-Truth File With Total {len(df_ground_truth)} observations. ({len(df_long_time_guests)} observations from segmentations and {len(df_short_time_guests)} observations from classification.)")
if len(df_ground_truth) > 0:
print(f"\t* LTV Low-Class Guest: {len(df_ground_truth[df_ground_truth['LTV_Class']==1])} observations")
print(f"\t* LTV Mid-Class Guest: {len(df_ground_truth[df_ground_truth['LTV_Class']==2])} observations")
print(f"\t* LTV High-Class Guest: {len(df_ground_truth[df_ground_truth['LTV_Class']==3])} observations")
print(f"__________________________")
return df_ground_truth, df_returning_guest
def construct_1st_time_reservation_file(folder_path: str):
"""
Load datafile from directory and contruct the 1st-time reservation Dataframe Pandas for processing
"""
print(f"Start Construct 1st-time Reservation File")
df_1st_reservation_returning_guest = DataFrame()
df_1st_reservation_1st_time_guest = DataFrame()
df_1st_reservation_combine = DataFrame()
try:
df_1st_reservation_combine = pd.read_csv(folder_path + "1st_reservation_processed.csv")
df_1st_reservation_returning_guest = df_1st_reservation_combine[df_1st_reservation_combine["is1stVisit"]==False]
df_1st_reservation_1st_time_guest = df_1st_reservation_combine[df_1st_reservation_combine["is1stVisit"]==True]
# df_1st_reservation_combine = pd.concat([df_1st_reservation_returning_guest, df_1st_reservation_1st_time_guest]).reset_index().drop(columns = ['index'])
except Exception as error_sum:
print("___")
print("Error summary: \n", error_sum)
error_log = traceback.format_exc()
print("Error Details: \n", str(error_log))
print("___")
print(f"Construct 1st-time Reservation File With Total {len(df_1st_reservation_combine)} Observations.\n\
* Returning Guest: {len(df_1st_reservation_returning_guest)} observations\n\
* 1st-time Guest: {len(df_1st_reservation_1st_time_guest)} observations")
print(f"__________________________")
return df_1st_reservation_combine
def DBSCAN_clustering(df_input: DataFrame):
"""
Run the DBSCAN clustering algorithm to return dataframe with cluster group
Parameters
----------
Input
df_input : DataFrame
Output:
DataFrame with cluster group by guest ID
"""
print(f"Start Apply DBSCAN Clustering Algorithm")
start_time = datetime.now()
try:
X = np.array(df_input.drop(['GuestID', 'LTV_Class', 'is1stVisit'], 1).astype(float))
X = preprocessing.StandardScaler().fit_transform(X)
clf = DBSCAN(min_samples=2, eps=100, algorithm='kd_tree', n_jobs=-1)
clf.fit(X)
labels = clf.labels_
df_input['cluster_group'] = np.nan
for i in range(len(X)):
df_input['cluster_group'].iloc[i] = labels[i]
except Exception as error_sum:
print("___")
print("Error summary: \n", error_sum)
error_log = traceback.format_exc()
print("Error Details: \n", str(error_log))
print("___")
end_time = datetime.now()
process_time = str(end_time - start_time)
print(f"DBSCAN Clustering Algorithm Time Consuming: {process_time} with total {len(df_input)} observations")
print(f"__________________________")
return df_input
def probability_applied(df_input: DataFrame):
"""
Run the Probability clustering algorithm to return dataframe with cluster group
Parameters
----------
Input
df_input : DataFrame
Output: DataFrame with cluster group by guest ID
"""
print(f"Start Apply Probability Algorithm")
df_output = DataFrame()
try:
df_simplify_input = df_input[['GuestID', 'LTV_Class', 'cluster_group']].copy()
df_filter = df_input[df_input['cluster_group']!=-1][['GuestID', 'LTV_Class', 'cluster_group']].sort_values(by=['cluster_group'])
df_filter['LTV_Class'] = df_filter['LTV_Class'].fillna(-1)
df_filter = df_filter[df_filter['LTV_Class']!=-1]
df_groupby_cluster_group = df_filter.groupby('cluster_group')['LTV_Class'].apply(LTV_class_probability)
df_groupby_cluster_group = df_groupby_cluster_group.reset_index()
df_groupby_cluster_group = df_groupby_cluster_group.rename(columns={'LTV_Class':'Final_Cluster', 'level_1': 'Returning_Cluster'})
df_groupby_cluster_group = df_groupby_cluster_group.pivot(index='cluster_group', columns='Returning_Cluster', values='Final_Cluster').reset_index()
df_output = df_simplify_input.merge(df_groupby_cluster_group, left_on="cluster_group", right_on="cluster_group", how="left")
df_output["Final_Cluster"] = df_output.apply(lambda df_output: generated_final_cluster(df_output["Low_Class"], df_output["Mid_Class"], df_output["High_Class"]), axis=1)
df_output["Guest_Type"] = df_output.apply(lambda df_output: guest_type_cols(df_output["LTV_Class"], df_output["Final_Cluster"]), axis=1)
df_output['LTV_Class'] = df_output['LTV_Class'].fillna(df_output['Final_Cluster'])
df_output = df_output.drop(columns = ['cluster_group', 'Final_Cluster'])
except Exception as error_sum:
print("___")
print("Error summary: \n", error_sum)
error_log = traceback.format_exc()
print("Error Details: \n", str(error_log))
print("___")
print(f"Probability Algorithm Completed")
print(f"__________________________")
if len(df_output) > 0:
Counter = df_output['Guest_Type'].value_counts()
for guest_type in ['Returning Guest', '1st-Time Guest', 'Ignore 1st-Time Guest']:
if guest_type not in Counter.index:
Counter.loc[guest_type] = 0
print(f"Data File Output: {len(df_output)} observations.\n\
\t* Returning Guest: {Counter.loc['Returning Guest']} observations.\n\
\t* 1st-Time Guest: {Counter.loc['1st-Time Guest'] + Counter.loc['Ignore 1st-Time Guest']} observations:\n\
\t\t + Potential Guest: {Counter.loc['1st-Time Guest']} observations\n\
\t\t + Ignore Guest: {Counter.loc['Ignore 1st-Time Guest']} observations")
return df_output
def Potential_model(folder_path: str):
df_ground_truth, df_returning_guest = construct_ground_truth_file(folder_path)
df_1st_reservation_combine = construct_1st_time_reservation_file(folder_path)
df_combine = df_1st_reservation_combine.merge(df_ground_truth, left_on="GuestID", right_on="GuestID", how="left")
df_combine = DBSCAN_clustering(df_combine.copy())
df_combine.to_csv(folder_path + "DBSCAN_output.csv", index=False)
df_potential_model = probability_applied(df_combine.copy())
df_potential_model.to_csv(folder_path + "potential_model_output.csv", index=False)
df_potential_model = df_potential_model.rename(columns={'High_Class':'CLV_High_Prob', 'Low_Class': 'CLV_Low_Prob', 'Mid_Class': 'CLV_Mid_Prob'})
df_potential_model = df_potential_model[df_potential_model['Guest_Type'] != 'Returning Guest']
df_returning_guest['Guest_Type'] = "Returning Guest"
df_all_guest_merge = pd.concat([df_returning_guest.drop(columns=["index"]), df_potential_model]).reset_index().drop(columns=["index"])
df_input_guest_id = pd.read_csv(str(folder_path) + "data_input_with_guest_id.csv")
df_final_output = df_input_guest_id.merge(df_all_guest_merge, left_on="GuestID", right_on="GuestID", how="left")
df_final_output = df_final_output[df_final_output["Guest_Type"] != "Ignore 1st-Time Guest"]
df_final_output = df_final_output[["LastName","FirstName","Email","GuestID","LTV_Class","CLV_Low_Prob","CLV_Mid_Prob","CLV_High_Prob","Guest_Type"]].drop_duplicates(subset=["LastName","FirstName","Email"])
try:
print(f"Return The DataFrame Output")
print(df_final_output.head(10))
print(f"__________________________")
except:
pass
df_final_output[["LastName","FirstName","Email","GuestID","LTV_Class","CLV_Low_Prob","CLV_Mid_Prob","CLV_High_Prob","Guest_Type"]].to_json(folder_path + "final_output.json", orient='records')
|
StarcoderdataPython
|
393670
|
from .robust_kalman import RobustKalman
from . import utils
|
StarcoderdataPython
|
3296498
|
#!/usr/bin/env python3
import argparse
import json
import requests
import sys
from datetime import datetime
SUPPORTED_BUILDS = {
6002: 'https://support.microsoft.com/en-us/help/4343218', # 2008 SP2
7601: 'https://support.microsoft.com/en-us/help/4009469', # 7 / 2008R2 SP1
9200: 'https://support.microsoft.com/en-us/help/4009471', # 2012
9600: 'https://support.microsoft.com/en-us/help/4009470', # 8.1 / 2012R2
10240: 'https://support.microsoft.com/en-us/help/4000823', # Windows 10 1507 "RTM" "Threshold 1"
10586: 'https://support.microsoft.com/en-us/help/4000824', # Windows 10 1511 "November Update" "Threshold 2"
14393: 'https://support.microsoft.com/en-us/help/4000825', # Windows 10 1607 "Anniversary Update" "Redstone 1" / Server 2016
15063: 'https://support.microsoft.com/en-us/help/4018124', # Windows 10 1703 "Creators Update" "Redstone 2"
16299: 'https://support.microsoft.com/en-us/help/4043454', # Windows 10 1709 "Fall Creators Update" "Redstone 3"
17134: 'https://support.microsoft.com/en-us/help/4099479', # Windows 10 1803 "Redstone 4"
17763: 'https://support.microsoft.com/en-us/help/4464619', # Windows 10 1809 "Redstone 5" / Server 2019
}
BEGIN_MARKER = '"minorVersions":'
END_MARKER = ']\n'
DATE_FORMAT = '%Y-%m-%dT%H:%M:%S'
# Updates types and whether they are cumulative or not
UPDATE_TYPES = {
'': False, # legacy discontinued non-cumulative updates
'security-only update': False,
'monthly rollup': True,
'os build monthly rollup': True,
'preview of monthly rollup': True,
}
def fetch_security_updates(url):
html = requests.get(url).text
html = html.replace('\r\n', '\n')
json_begin = html.find(BEGIN_MARKER)
if json_begin == -1:
sys.stderr.write('Unable to find marker {} in {}\n'.format(
BEGIN_MARKER, url))
sys.exit(1)
json_begin += len(BEGIN_MARKER)
json_end = html.find(END_MARKER, json_begin)
if json_end == -1:
sys.stderr.write('Unable to find marker {} in {}\n'.format(
END_MARKER, url))
sys.exit(1)
json_end += len(END_MARKER)
updates_json = html[json_begin:json_end]
updates_json = json.loads(updates_json)
updates = []
for update in updates_json:
if not set(('releaseVersion','id','releaseDate')).issubset(set(update.keys())):
sys.stderr.write('Can\'t handle updates without id/releaseVersion/releaseDate\n')
sys.exit(1)
update_type = update['releaseVersion'].lower().strip()
if 'os build' in update_type: # new >= 10.0 updates type name format, they are all cumulative
update_type = 'monthly rollup'
if update_type not in UPDATE_TYPES:
sys.stderr.write('Update with unknown releaseVersion "{}"\n'.format(
update['releaseVersion']))
sys.stderr.write('\n' + str(update) + '\n')
sys.exit(1)
is_cumulative = UPDATE_TYPES[update_type]
date = datetime.strptime(update['releaseDate'], DATE_FORMAT).date()
updates.append((date, is_cumulative, update['id']))
updates.sort(key=lambda x: x[0])
return updates
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--csv', type=argparse.FileType('w'), default=None)
parser.add_argument('--sql', type=argparse.FileType('w'), default=None)
args = parser.parse_args()
if args.sql is None and args.csv is None:
args.csv = sys.stdout
if args.csv is not None:
args.csv.write('build_number\tis_cumulative\tpublish_date\tkb_id\n')
for build, url in SUPPORTED_BUILDS.items():
updates = fetch_security_updates(url)
for (date, is_cumulative, kb) in updates:
args.csv.write('{}\t{}\t{}/{}/{}\t{}\n'.format(build,
("1" if is_cumulative else "0"),
date.year, date.month, date.day, kb))
if args.sql is not None:
args.sql.write('\n')
args.sql.write('''CREATE TABLE [kb_list](
[build] [int] NOT NULL,
[cumulative] [bit] NOT NULL,
[id] [varchar](255) NOT NULL,
[date] [date] NOT NULL)\n''')
args.sql.write('INSERT INTO [kb_list] VALUES ')
sql = []
for build, url in SUPPORTED_BUILDS.items():
updates = fetch_security_updates(url)
for (date, is_cumulative, kb) in updates:
sql.append("({},{},'KB{}','{}-{}-{}')".format(
build, (1 if is_cumulative else 0), kb,
date.year, date.month, date.day))
args.sql.write(',\n '.join(sql) + ';')
|
StarcoderdataPython
|
11356498
|
"""Test suite for phlsys_textconvert."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import string
import unittest
import phlsys_textconvert
class Test(unittest.TestCase):
def _check_unicode_to_ascii(self, src, dst):
value = phlsys_textconvert.lossy_unicode_to_ascii(src)
self.assertEqual(value, dst)
self.assertIsInstance(value, type(dst))
def test_empty(self):
self._check_unicode_to_ascii(u"", "")
def test_ascii_printable(self):
self._check_unicode_to_ascii(
unicode(string.printable),
str(string.printable))
def test_trailing_leading_space(self):
self._check_unicode_to_ascii(u"trailing ", "trailing ")
self._check_unicode_to_ascii(u" leading", " leading")
self._check_unicode_to_ascii(u"trailing\t\t", "trailing\t\t")
self._check_unicode_to_ascii(u"\t\tleading", "\t\tleading")
def test_newlines(self):
self._check_unicode_to_ascii(u"new\nline", "new\nline")
self._check_unicode_to_ascii(u"windows\r\nline", "windows\r\nline")
self._check_unicode_to_ascii(u"\nline", "\nline")
self._check_unicode_to_ascii(u"\r\nline", "\r\nline")
self._check_unicode_to_ascii(u"new\n", "new\n")
self._check_unicode_to_ascii(u"windows\r\n", "windows\r\n")
def test_nuls(self):
self._check_unicode_to_ascii(u"nul\0middle", "nul\0middle")
self._check_unicode_to_ascii(u"nul-end\0", "nul-end\0")
self._check_unicode_to_ascii(u"\0nul-start", "\0nul-start")
def test_ellipses(self):
self._check_unicode_to_ascii(u"time passed\u2026", "time passed...")
def test_hyphenation_point(self):
self._check_unicode_to_ascii(u"hy\u2027phen\u2027ate", "hy?phen?ate")
def test_dashes(self):
self._check_unicode_to_ascii(u"\u2010", "-")
self._check_unicode_to_ascii(u"\u2011", "-")
self._check_unicode_to_ascii(u"\u2013", "-")
self._check_unicode_to_ascii(u"\u2013", "-")
self._check_unicode_to_ascii(u"\u2014", "-")
self._check_unicode_to_ascii(u"\u2015", "-")
self._check_unicode_to_ascii(u"\u2212", "-")
def test_quotes(self):
self._check_unicode_to_ascii(u"\u00b4", "'")
self._check_unicode_to_ascii(u"\u2018", "'")
self._check_unicode_to_ascii(u"\u2019", "'")
self._check_unicode_to_ascii(u"\u201c", '"')
self._check_unicode_to_ascii(u"\u201d", '"')
def test_bullets(self):
self._check_unicode_to_ascii(u"\u00b7", "*")
self._check_unicode_to_ascii(u"\u2022", "*")
self._check_unicode_to_ascii(u"\u2023", ">")
self._check_unicode_to_ascii(u"\u2024", "*")
self._check_unicode_to_ascii(u"\u2043", "-")
self._check_unicode_to_ascii(u"\u25b8", ">")
self._check_unicode_to_ascii(u"\u25e6", "o")
def test_A_Breathing(self):
# test we can convert unicode to unicode
phlsys_textconvert.to_unicode(unicode('hello'))
# test we can convert str to unicode
self.assertIsInstance(
phlsys_textconvert.to_unicode('hello'),
unicode)
# test invalid characters get replaced by the replacement character
self.assertEqual(
phlsys_textconvert.to_unicode('\xFF'),
u'\uFFFD')
# test 'horizontal ellipses' as UTF8 get replaced
self.assertEqual(
phlsys_textconvert.to_unicode('\xe2\x80\xa6'),
u'\uFFFD\uFFFD\uFFFD')
# test we can convert ascii to ascii
phlsys_textconvert.ensure_ascii('hello')
# test str stays str
self.assertIsInstance(
phlsys_textconvert.ensure_ascii('hello'),
str)
# test invalid characters get replaced by '?'
self.assertEqual(
phlsys_textconvert.ensure_ascii('\xFF'),
'?')
# test 'horizontal ellipses' as UTF8 get replaced
self.assertEqual(
phlsys_textconvert.ensure_ascii('\xe2\x80\xa6'),
'???')
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
|
StarcoderdataPython
|
3250768
|
<reponame>jonesholger/lbann
#!/usr/bin/python
import common
|
StarcoderdataPython
|
11232827
|
<reponame>jjacob/DailyPythonScripts<gh_stars>0
'''
Created on Nov 22, 2011
@author: <NAME>
Email: <EMAIL>
important features:
- read MC and data histograms and combine them
- set styles and colors
- allow switches for log-scale, cumulitative histograms, underflow/overflow bins, error sources
'''
import tools.PlottingUtilities as plotting
import FILES
import ROOTFileReader as reader
import QCDRateEstimation
def plot(histpath, qcdShapeFrom, qcdShapeForSystematics, qcdRateEstimate, rebin=1, suffixes=[]):
inputFiles = FILES.files
#get histograms
if len(suffixes) > 0:
for suffix in suffixes:
hist = histpath + '_' + suffix
histograms = reader.getHistogramDictionary(histpath, inputFiles)
else:
histograms = reader.getHistogramDictionary(histpath, inputFiles)
if __name__ == "__main__":
inputFiles = FILES.files
estimateQCD = QCDRateEstimation.estimateQCDWithRelIso
plot(histpath='TTbarEplusJetsPlusMetAnalysis/Ref selection/MET/patMETsPFlow/Angle_lepton_MET',
qcdShapeFrom ='TTbarEplusJetsPlusMetAnalysis/Ref selection/QCDConversions/MET/patMETsPFlow/Angle_lepton_MET',
qcdShapeForSystematics = 'TTbarEplusJetsPlusMetAnalysis/Ref selection/QCD non iso e+jets/MET/patMETsPFlow/Angle_lepton_MET',
qcdRateEstimate=estimateQCD,
rebin=1,
suffixes=['0btag', '1btag', '2orMoreBtags'])
|
StarcoderdataPython
|
4832023
|
<reponame>arnoyu-hub/COMP0016miemie<gh_stars>0
import numpy as np
import pytest
import pandas as pd
import pandas._testing as tm
from pandas.arrays import BooleanArray
from pandas.core.arrays.boolean import coerce_to_array
def test_boolean_array_constructor():
values = np.array([True, False, True, False], dtype="bool")
mask = np.array([False, False, False, True], dtype="bool")
result = BooleanArray(values, mask)
expected = pd.array([True, False, True, None], dtype="boolean")
tm.assert_extension_array_equal(result, expected)
with pytest.raises(TypeError, match="values should be boolean numpy array"):
BooleanArray(values.tolist(), mask)
with pytest.raises(TypeError, match="mask should be boolean numpy array"):
BooleanArray(values, mask.tolist())
with pytest.raises(TypeError, match="values should be boolean numpy array"):
BooleanArray(values.astype(int), mask)
with pytest.raises(TypeError, match="mask should be boolean numpy array"):
BooleanArray(values, None)
with pytest.raises(ValueError, match="values must be a 1D array"):
BooleanArray(values.reshape(1, -1), mask)
with pytest.raises(ValueError, match="mask must be a 1D array"):
BooleanArray(values, mask.reshape(1, -1))
def test_boolean_array_constructor_copy():
values = np.array([True, False, True, False], dtype="bool")
mask = np.array([False, False, False, True], dtype="bool")
result = BooleanArray(values, mask)
assert result._data is values
assert result._mask is mask
result = BooleanArray(values, mask, copy=True)
assert result._data is not values
assert result._mask is not mask
def test_to_boolean_array():
expected = BooleanArray(
np.array([True, False, True]), np.array([False, False, False])
)
result = pd.array([True, False, True], dtype="boolean")
tm.assert_extension_array_equal(result, expected)
result = pd.array(np.array([True, False, True]), dtype="boolean")
tm.assert_extension_array_equal(result, expected)
result = pd.array(np.array([True, False, True], dtype=object), dtype="boolean")
tm.assert_extension_array_equal(result, expected)
# with missing values
expected = BooleanArray(
np.array([True, False, True]), np.array([False, False, True])
)
result = pd.array([True, False, None], dtype="boolean")
tm.assert_extension_array_equal(result, expected)
result = pd.array(np.array([True, False, None], dtype=object), dtype="boolean")
tm.assert_extension_array_equal(result, expected)
def test_to_boolean_array_all_none():
expected = BooleanArray(np.array([True, True, True]), np.array([True, True, True]))
result = pd.array([None, None, None], dtype="boolean")
tm.assert_extension_array_equal(result, expected)
result = pd.array(np.array([None, None, None], dtype=object), dtype="boolean")
tm.assert_extension_array_equal(result, expected)
@pytest.mark.parametrize(
"a, b",
[
([True, False, None, np.nan, pd.NA], [True, False, None, None, None]),
([True, np.nan], [True, None]),
([True, pd.NA], [True, None]),
([np.nan, np.nan], [None, None]),
(np.array([np.nan, np.nan], dtype=float), [None, None]),
],
)
def test_to_boolean_array_missing_indicators(a, b):
result = pd.array(a, dtype="boolean")
expected = pd.array(b, dtype="boolean")
tm.assert_extension_array_equal(result, expected)
@pytest.mark.parametrize(
"values",
[
["foo", "bar"],
["1", "2"],
# "foo",
[1, 2],
[1.0, 2.0],
pd.date_range("20130101", periods=2),
np.array(["foo"]),
np.array([1, 2]),
np.array([1.0, 2.0]),
[np.nan, {"a": 1}],
],
)
def test_to_boolean_array_error(values):
# error in converting existing arrays to BooleanArray
msg = "Need to pass bool-like value"
with pytest.raises(TypeError, match=msg):
pd.array(values, dtype="boolean")
def test_to_boolean_array_from_integer_array():
result = pd.array(np.array([1, 0, 1, 0]), dtype="boolean")
expected = pd.array([True, False, True, False], dtype="boolean")
tm.assert_extension_array_equal(result, expected)
# with missing values
result = pd.array(np.array([1, 0, 1, None]), dtype="boolean")
expected = pd.array([True, False, True, None], dtype="boolean")
tm.assert_extension_array_equal(result, expected)
def test_to_boolean_array_from_float_array():
result = pd.array(np.array([1.0, 0.0, 1.0, 0.0]), dtype="boolean")
expected = pd.array([True, False, True, False], dtype="boolean")
tm.assert_extension_array_equal(result, expected)
# with missing values
result = pd.array(np.array([1.0, 0.0, 1.0, np.nan]), dtype="boolean")
expected = pd.array([True, False, True, None], dtype="boolean")
tm.assert_extension_array_equal(result, expected)
def test_to_boolean_array_integer_like():
# integers of 0's and 1's
result = pd.array([1, 0, 1, 0], dtype="boolean")
expected = pd.array([True, False, True, False], dtype="boolean")
tm.assert_extension_array_equal(result, expected)
# with missing values
result = pd.array([1, 0, 1, None], dtype="boolean")
expected = pd.array([True, False, True, None], dtype="boolean")
tm.assert_extension_array_equal(result, expected)
def test_coerce_to_array():
# TODO this is currently not public API
values = np.array([True, False, True, False], dtype="bool")
mask = np.array([False, False, False, True], dtype="bool")
result = BooleanArray(*coerce_to_array(values, mask=mask))
expected = BooleanArray(values, mask)
tm.assert_extension_array_equal(result, expected)
assert result._data is values
assert result._mask is mask
result = BooleanArray(*coerce_to_array(values, mask=mask, copy=True))
expected = BooleanArray(values, mask)
tm.assert_extension_array_equal(result, expected)
assert result._data is not values
assert result._mask is not mask
# mixed missing from values and mask
values = [True, False, None, False]
mask = np.array([False, False, False, True], dtype="bool")
result = BooleanArray(*coerce_to_array(values, mask=mask))
expected = BooleanArray(
np.array([True, False, True, True]), np.array([False, False, True, True])
)
tm.assert_extension_array_equal(result, expected)
result = BooleanArray(*coerce_to_array(np.array(values, dtype=object), mask=mask))
tm.assert_extension_array_equal(result, expected)
result = BooleanArray(*coerce_to_array(values, mask=mask.tolist()))
tm.assert_extension_array_equal(result, expected)
# raise errors for wrong dimension
values = np.array([True, False, True, False], dtype="bool")
mask = np.array([False, False, False, True], dtype="bool")
with pytest.raises(ValueError, match="values must be a 1D list-like"):
coerce_to_array(values.reshape(1, -1))
with pytest.raises(ValueError, match="mask must be a 1D list-like"):
coerce_to_array(values, mask=mask.reshape(1, -1))
def test_coerce_to_array_from_boolean_array():
# passing BooleanArray to coerce_to_array
values = np.array([True, False, True, False], dtype="bool")
mask = np.array([False, False, False, True], dtype="bool")
arr = BooleanArray(values, mask)
result = BooleanArray(*coerce_to_array(arr))
tm.assert_extension_array_equal(result, arr)
# no copy
assert result._data is arr._data
assert result._mask is arr._mask
result = BooleanArray(*coerce_to_array(arr), copy=True)
tm.assert_extension_array_equal(result, arr)
assert result._data is not arr._data
assert result._mask is not arr._mask
with pytest.raises(ValueError, match="cannot pass mask for BooleanArray input"):
coerce_to_array(arr, mask=mask)
def test_coerce_to_numpy_array():
# with missing values -> object dtype
arr = pd.array([True, False, None], dtype="boolean")
result = np.array(arr)
expected = np.array([True, False, pd.NA], dtype="object")
tm.assert_numpy_array_equal(result, expected)
# also with no missing values -> object dtype
arr = pd.array([True, False, True], dtype="boolean")
result = np.array(arr)
expected = np.array([True, False, True], dtype="object")
tm.assert_numpy_array_equal(result, expected)
# force bool dtype
result = np.array(arr, dtype="bool")
expected = np.array([True, False, True], dtype="bool")
tm.assert_numpy_array_equal(result, expected)
# with missing values will raise error
arr = pd.array([True, False, None], dtype="boolean")
msg = (
"cannot convert to 'bool'-dtype NumPy array with missing values. "
"Specify an appropriate 'na_value' for this dtype."
)
with pytest.raises(ValueError, match=msg):
np.array(arr, dtype="bool")
def test_to_boolean_array_from_strings():
result = BooleanArray._from_sequence_of_strings(
np.array(["True", "False", "1", "1.0", "0", "0.0", np.nan], dtype=object)
)
expected = BooleanArray(
np.array([True, False, True, True, False, False, False]),
np.array([False, False, False, False, False, False, True]),
)
tm.assert_extension_array_equal(result, expected)
def test_to_boolean_array_from_strings_invalid_string():
with pytest.raises(ValueError, match="cannot be cast"):
BooleanArray._from_sequence_of_strings(["donkey"])
@pytest.mark.parametrize("box", [True, False], ids=["series", "array"])
def test_to_numpy(box):
con = pd.Series if box else pd.array
# default (with or without missing values) -> object dtype
arr = con([True, False, True], dtype="boolean")
result = arr.to_numpy()
expected = np.array([True, False, True], dtype="object")
tm.assert_numpy_array_equal(result, expected)
arr = con([True, False, None], dtype="boolean")
result = arr.to_numpy()
expected = np.array([True, False, pd.NA], dtype="object")
tm.assert_numpy_array_equal(result, expected)
arr = con([True, False, None], dtype="boolean")
result = arr.to_numpy(dtype="str")
expected = np.array([True, False, pd.NA], dtype="<U5")
tm.assert_numpy_array_equal(result, expected)
# no missing values -> can convert to bool, otherwise raises
arr = con([True, False, True], dtype="boolean")
result = arr.to_numpy(dtype="bool")
expected = np.array([True, False, True], dtype="bool")
tm.assert_numpy_array_equal(result, expected)
arr = con([True, False, None], dtype="boolean")
with pytest.raises(ValueError, match="cannot convert to 'bool'-dtype"):
result = arr.to_numpy(dtype="bool")
# specify dtype and na_value
arr = con([True, False, None], dtype="boolean")
result = arr.to_numpy(dtype=object, na_value=None)
expected = np.array([True, False, None], dtype="object")
tm.assert_numpy_array_equal(result, expected)
result = arr.to_numpy(dtype=bool, na_value=False)
expected = np.array([True, False, False], dtype="bool")
tm.assert_numpy_array_equal(result, expected)
result = arr.to_numpy(dtype="int64", na_value=-99)
expected = np.array([1, 0, -99], dtype="int64")
tm.assert_numpy_array_equal(result, expected)
result = arr.to_numpy(dtype="float64", na_value=np.nan)
expected = np.array([1, 0, np.nan], dtype="float64")
tm.assert_numpy_array_equal(result, expected)
# converting to int or float without specifying na_value raises
with pytest.raises(ValueError, match="cannot convert to 'int64'-dtype"):
arr.to_numpy(dtype="int64")
with pytest.raises(ValueError, match="cannot convert to 'float64'-dtype"):
arr.to_numpy(dtype="float64")
def test_to_numpy_copy():
# to_numpy can be zero-copy if no missing values
arr = pd.array([True, False, True], dtype="boolean")
result = arr.to_numpy(dtype=bool)
result[0] = False
tm.assert_extension_array_equal(
arr, pd.array([False, False, True], dtype="boolean")
)
arr = pd.array([True, False, True], dtype="boolean")
result = arr.to_numpy(dtype=bool, copy=True)
result[0] = False
tm.assert_extension_array_equal(arr, pd.array([True, False, True], dtype="boolean"))
# FIXME: don't leave commented out
# TODO when BooleanArray coerces to object dtype numpy array, need to do conversion
# manually in the indexing code
# def test_indexing_boolean_mask():
# arr = pd.array([1, 2, 3, 4], dtype="Int64")
# mask = pd.array([True, False, True, False], dtype="boolean")
# result = arr[mask]
# expected = pd.array([1, 3], dtype="Int64")
# tm.assert_extension_array_equal(result, expected)
# # missing values -> error
# mask = pd.array([True, False, True, None], dtype="boolean")
# with pytest.raises(IndexError):
# result = arr[mask]
|
StarcoderdataPython
|
4866093
|
import FWCore.ParameterSet.Config as cms
from ElectroWeakAnalysis.ZMuMu.ZMuMuCategoriesSequences_cff import *
import copy
#### vertex refit for loose cut
goodZToMuMuVtxedAtLeast1HLTLoose = cms.EDProducer(
"KalmanVertexFitCompositeCandProducer",
src = cms.InputTag("goodZToMuMuAtLeast1HLTLoose")
)
goodZToMuMuPathLoose.__iadd__(goodZToMuMuVtxedAtLeast1HLTLoose)
goodZToMuMuPathLoose.setLabel("goodZToMuMuLoose")
goodZToMuMuVtxed2HLTLoose = copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTLoose)
goodZToMuMuVtxed2HLTLoose.src = cms.InputTag("goodZToMuMu2HLTLoose")
goodZToMuMu2HLTPathLoose.__iadd__(goodZToMuMuVtxed2HLTLoose)
goodZToMuMu2HLTPathLoose.setLabel("goodZToMuMu2HLTLoose")
goodZToMuMuVtxed1HLTLoose = copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTLoose)
goodZToMuMuVtxed1HLTLoose.src = cms.InputTag("goodZToMuMu1HLTLoose")
goodZToMuMu1HLTPathLoose.__iadd__(goodZToMuMuVtxed1HLTLoose)
goodZToMuMu1HLTPathLoose.setLabel("goodZToMuMu1HLTLoose")
goodZToMuMuVtxedBB2HLTLoose = copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTLoose)
goodZToMuMuVtxedBB2HLTLoose.src = cms.InputTag("goodZToMuMuBB2HLTLoose")
goodZToMuMuBB2HLTPathLoose.__iadd__(goodZToMuMuVtxedBB2HLTLoose)
goodZToMuMuBB2HLTPathLoose.setLabel("goodZToMuMuBB2HLTLoose")
goodZToMuMuVtxedAB1HLTLoose = copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTLoose)
goodZToMuMuVtxedAB1HLTLoose.src = cms.InputTag("goodZToMuMuAB1HLTLoose")
goodZToMuMuAB1HLTPathLoose.__iadd__(goodZToMuMuVtxedAB1HLTLoose)
goodZToMuMuAB1HLTPathLoose.setLabel("goodZToMuMuAB1HLTLoose")
## oneNonIsolatedZToMuMuVtxed= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLT)
## oneNonIsolatedZToMuMuVtxed.src= cms.InputTag("oneNonIsolatedZToMuMuAtLeast1HLT")
## oneNonIsolatedZToMuMuPath.__iadd__(oneNonIsolatedZToMuMuVtxed)
## oneNonIsolatedZToMuMuPath.setLabel("oneNonIsolatedZToMuMu")
## twoNonIsolatedZToMuMuVtxed = copy.deepcopy(goodZToMuMuVtxedAtLeast1HLT)
## twoNonIsolatedZToMuMuVtxed.src = cms.InputTag("twoNonIsolatedZToMuMuAtLeast1HLT")
## twoNonIsolatedZToMuMuPath.__iadd__(twoNonIsolatedZToMuMuVtxed)
## twoNonIsolatedZToMuMuPath.setLabel("twoNonIsolatedZToMuMu")
## goodZToMuMuSameCharge2HLTVtxedLoose= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTLoose)
## goodZToMuMuSameCharge2HLTVtxedLoose.src = cms.InputTag("goodZToMuMuSameCharge2HLTLoose")
## goodZToMuMuSameCharge2HLTPathLoose.__iadd__(goodZToMuMuSameCharge2HLTVtxedLoose)
## goodZToMuMuSameCharge2HLTPathLoose.setLabel("goodZToMuMuSameCharge2HLTLoose")
## goodZToMuMuSameCharge1HLTVtxedLoose= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTLoose)
## goodZToMuMuSameCharge1HLTVtxedLoose.src = cms.InputTag("goodZToMuMuSameCharge1HLTLoose")
## goodZToMuMuSameCharge1HLTPathLoose.__iadd__(goodZToMuMuSameCharge1HLTVtxedLoose)
## goodZToMuMuSameCharge1HLTPathLoose.setLabel("goodZToMuMuSameCharge1HLTLoose")
goodZToMuMuSameChargeVtxedLoose= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTLoose)
goodZToMuMuSameChargeVtxedLoose.src = cms.InputTag("goodZToMuMuSameChargeAtLeast1HLTLoose")
goodZToMuMuSameChargePathLoose.__iadd__(goodZToMuMuSameChargeVtxedLoose)
goodZToMuMuSameChargePathLoose.setLabel("goodZToMuMuSameChargeLoose")
goodZToMuMuOneStandAloneVtxedLoose= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTLoose)
goodZToMuMuOneStandAloneVtxedLoose.src = cms.InputTag("goodZToMuMuOneStandAloneMuonFirstHLTLoose")
goodZToMuMuOneStandAloneMuonPathLoose.__iadd__(goodZToMuMuOneStandAloneVtxedLoose)
goodZToMuMuOneStandAloneMuonPathLoose.setLabel("goodZToMuMuOneStandAloneMuonLoose")
goodZToMuMuOneTrackVtxedLoose= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTLoose)
goodZToMuMuOneTrackVtxedLoose.src = cms.InputTag("goodZToMuMuOneTrackFirstHLTLoose")
goodZToMuMuOneTrackPathLoose.__iadd__(goodZToMuMuOneTrackVtxedLoose)
goodZToMuMuOneTrackPathLoose.setLabel("goodZToMuMuOneTrackLoose")
goodZToMuMuOneTrackerMuonVtxedLoose= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTLoose)
goodZToMuMuOneTrackerMuonVtxedLoose.src = cms.InputTag("goodZToMuMuOneTrackerMuonFirstHLTLoose")
goodZToMuMuOneTrackerMuonPathLoose.__iadd__(goodZToMuMuOneTrackerMuonVtxedLoose)
goodZToMuMuOneTrackerMuonPathLoose.setLabel("goodZToMuMuOneTrackerMuonLoose")
### ntuples....
goodZToMuMuVtxedNtupleLoose = cms.EDProducer(
"CandViewNtpProducer",
src = cms.InputTag("goodZToMuMuVtxedLoose"),
variables = cms.VPSet(
cms.PSet(
tag = cms.untracked.string("mass"),
quantity = cms.untracked.string("mass")
),
cms.PSet(
tag = cms.untracked.string("vertexNdof"),
quantity = cms.untracked.string("vertexNdof")
),
cms.PSet(
tag = cms.untracked.string("vertexNormalizedChi2"),
quantity = cms.untracked.string("vertexNormalizedChi2")
),
)
)
goodZToMuMuVtxed2HLTNtupleLoose = copy.deepcopy(goodZToMuMuVtxedNtupleLoose)
goodZToMuMuVtxed2HLTNtupleLoose.src= cms.InputTag("goodZToMuMuVtxed2HLTLoose")
goodZToMuMu2HLTPathLoose.__iadd__(goodZToMuMuVtxed2HLTNtupleLoose)
goodZToMuMu2HLTPathLoose.setLabel("goodZToMuMu2HLTLoose")
goodZToMuMuVtxed1HLTNtupleLoose = copy.deepcopy(goodZToMuMuVtxedNtupleLoose)
goodZToMuMuVtxed1HLTNtupleLoose.src= cms.InputTag("goodZToMuMuVtxed1HLTLoose")
goodZToMuMu1HLTPathLoose.__iadd__(goodZToMuMuVtxed1HLTNtupleLoose)
goodZToMuMu1HLTPathLoose.setLabel("goodZToMuMu1HLTLoose")
goodZToMuMuVtxedBB2HLTNtupleLoose = copy.deepcopy(goodZToMuMuVtxedNtupleLoose)
goodZToMuMuVtxedBB2HLTNtupleLoose.src= cms.InputTag("goodZToMuMuVtxedBB2HLTLoose")
goodZToMuMuBB2HLTPathLoose.__iadd__(goodZToMuMuVtxedBB2HLTNtupleLoose)
goodZToMuMuBB2HLTPathLoose.setLabel("goodZToMuMuBB2HLTLoose")
goodZToMuMuVtxedAB1HLTNtupleLoose = copy.deepcopy(goodZToMuMuVtxedNtupleLoose)
goodZToMuMuVtxedAB1HLTNtupleLoose.src= cms.InputTag("goodZToMuMuVtxedAB1HLTLoose")
goodZToMuMuAB1HLTPathLoose.__iadd__(goodZToMuMuVtxedAB1HLTNtupleLoose)
goodZToMuMuAB1HLTPathLoose.setLabel("goodZToMuMuAB1HLTLoose")
## oneNonIsolatedZToMuMuVtxedNtuple = copy.deepcopy(goodZToMuMuVtxedNtuple)
## oneNonIsolatedZToMuMuVtxedNtuple.src = cms.InputTag("oneNonIsolatedZToMuMuVtxed")
## oneNonIsolatedZToMuMuPath.__iadd__(oneNonIsolatedZToMuMuVtxedNtuple)
## oneNonIsolatedZToMuMuPath.setLabel("oneNonIsolatedZToMuMu")
## twoNonIsolatedZToMuMuVtxedNtuple = copy.deepcopy(goodZToMuMuVtxedNtuple)
## twoNonIsolatedZToMuMuVtxedNtuple.src = cms.InputTag("twoNonIsolatedZToMuMuVtxed")
## twoNonIsolatedZToMuMuPath.__iadd__(twoNonIsolatedZToMuMuVtxedNtuple)
## twoNonIsolatedZToMuMuPath.setLabel("twoNonIsolatedZToMuMu")
## goodZToMuMuVtxedSameCharge2HLTNtupleLoose= copy.deepcopy(goodZToMuMuVtxedNtupleLoose)
## goodZToMuMuVtxedSameCharge2HLTNtupleLoose.src = cms.InputTag("goodZToMuMuVtxedSameCharge2HLTLoose")
## goodZToMuMuSameCharge2HLTPathLoose.__iadd__(goodZToMuMuVtxedSameCharge2HLTNtupleLoose)
## goodZToMuMuSameCharge2HLTPathLoose.setLabel("goodZToMuMuVtxedSameCharge2HLTLoose")
## goodZToMuMuVtxedSameCharge1HLTNtupleLoose= copy.deepcopy(goodZToMuMuVtxedNtupleLoose)
## goodZToMuMuVtxedSameCharge1HLTNtupleLoose.src = cms.InputTag("goodZToMuMuVtxedSameCharge1HLTLoose")
## goodZToMuMuSameCharge1HLTPathLoose.__iadd__(goodZToMuMuVtxedSameCharge1HLTNtupleLoose)
## goodZToMuMuSameCharge1HLTPathLoose.setLabel("goodZToMuMuSameCharge1HLTLoose")
goodZToMuMuVtxedSameChargeNtupleLoose= copy.deepcopy(goodZToMuMuVtxedNtupleLoose)
goodZToMuMuVtxedSameChargeNtupleLoose.src = cms.InputTag("goodZToMuMuSameChargeAtLeast1HLTLoose")
goodZToMuMuSameChargePathLoose.__iadd__(goodZToMuMuVtxedSameChargeNtupleLoose)
goodZToMuMuSameChargePathLoose.setLabel("goodZToMuMuSameChargeLoose")
goodZToMuMuVtxedOneStandAloneNtupleLoose= copy.deepcopy(goodZToMuMuVtxedNtupleLoose)
goodZToMuMuVtxedOneStandAloneNtupleLoose.src = cms.InputTag("goodZToMuMuOneStandAloneVtxedLoose")
goodZToMuMuOneStandAloneMuonPathLoose.__iadd__(goodZToMuMuVtxedOneStandAloneNtupleLoose)
goodZToMuMuOneStandAloneMuonPathLoose.setLabel("goodZToMuMuOneStandAloneMuonLoose")
goodZToMuMuVtxedOneTrackNtupleLoose= copy.deepcopy(goodZToMuMuVtxedNtupleLoose)
goodZToMuMuVtxedOneTrackNtupleLoose.src =cms.InputTag("goodZToMuMuOneTrackVtxedLoose")
goodZToMuMuOneTrackPathLoose.__iadd__(goodZToMuMuVtxedOneTrackNtupleLoose)
goodZToMuMuOneTrackPathLoose.setLabel("goodZToMuMuOneTrackLoose")
goodZToMuMuVtxedOneTrackerMuonNtupleLoose= copy.deepcopy(goodZToMuMuVtxedNtupleLoose)
goodZToMuMuVtxedOneTrackerMuonNtupleLoose.src =cms.InputTag("goodZToMuMuOneTrackerMuonVtxedLoose")
goodZToMuMuOneTrackerMuonPathLoose.__iadd__(goodZToMuMuVtxedOneTrackerMuonNtupleLoose)
goodZToMuMuOneTrackerMuonPathLoose.setLabel("goodZToMuMuOneTrackerMuonLoose")
vtxedNtuplesOut = cms.OutputModule(
"PoolOutputModule",
fileName = cms.untracked.string('VtxedNtupleLoose_test.root'),
outputCommands = cms.untracked.vstring(
"drop *",
# "keep *_goodZToMuMuOneStandAloneMuonNtuple_*_*",
"keep *_goodZToMuMuVtxedNtupleLoose_*_*",
"keep *_goodZToMuMuVtxed1HLTNtupleLoose_*_*",
"keep *_goodZToMuMuVtxed2HLTNtupleLoose_*_*",
"keep *_goodZToMuMuVtxedAB1HLTNtupleLoose_*_*",
"keep *_goodZToMuMuVtxedBB2HLTNtupleLoose_*_*",
# "keep *_goodZToMuMuVtxedSameCharge2HLTNtupleLoose_*_*",
"keep *_goodZToMuMuVtxedSameChargeNtupleLoose_*_*",
# "keep *_nonIsolatedZToMuMuVtxedNtuple_*_*",
# "keep *_oneNonIsolatedZToMuMuVtxedNtuple_*_*",
# "keep *_twoNonIsolatedZToMuMuVtxedNtuple_*_*",
"keep *_goodZToMuMuVtxedOneStandAloneNtupleLoose_*_*",
"keep *_goodZToMuMuVtxedOneTrackNtupleLoose_*_*",
"keep *_goodZToMuMuVtxedOneTrackerMuonNtupleLoose_*_*",
# "keep *_goodZToMuMu2HLTVtxedNtuple_*_*",
),
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring(
"goodZToMuMuPathLoose",
"goodZToMuMu1HLTPathLoose",
"goodZToMuMu2HLTPathLoose",
"goodZToMuMuAB1HLTPathLoose",
"goodZToMuMuBB2HLTPathLoose",
# "goodZToMuMuSameCharge2HLTPathLoose",
"goodZToMuMuSameChargePathLoose",
# "nonIsolatedZToMuMuPath",
# "oneNonIsolatedZToMuMuPath",
# "twoNonIsolatedZToMuMuPath",
"goodZToMuMuOneTrackPathLoose",
"goodZToMuMuOneTrackerMuonPathLoose",
"goodZToMuMuOneStandAloneMuonPathLoose",
)
)
)
vtxedNtuplesOut.setLabel("vtxedNtuplesOut")
VtxedNtuplesOut.__iadd__(vtxedNtuplesOut)
VtxedNtuplesOut.setLabel("VtxedNtuplesOut")
## ## vertex refit for tight cut
## goodZToMuMuVtxedAtLeast1HLTTight = cms.EDProducer(
## "KalmanVertexFitCompositeCandProducer",
## src = cms.InputTag("goodZToMuMuAtLeast1HLTTight")
## )
## goodZToMuMuVtxed2HLTTight = copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTTight)
## goodZToMuMuVtxed2HLTTight.src = cms.InputTag("goodZToMuMu2HLTTight")
## goodZToMuMu2HLTPathTight.__iadd__(goodZToMuMuVtxed2HLTTight)
## goodZToMuMu2HLTPathTight.setLabel("goodZToMuMu2HLTTight")
## goodZToMuMuVtxed1HLTTight = copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTTight)
## goodZToMuMuVtxed1HLTTight.src = cms.InputTag("goodZToMuMu1HLTTight")
## goodZToMuMu1HLTPathTight.__iadd__(goodZToMuMuVtxed1HLTTight)
## goodZToMuMu1HLTPathTight.setLabel("goodZToMuMu1HLTTight")
## oneNonIsolatedZToMuMuVtxedTight= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTTight)
## oneNonIsolatedZToMuMuVtxedTight.src= cms.InputTag("oneNonIsolatedZToMuMuAtLeast1HLTTight")
## oneNonIsolatedZToMuMuPathTight.__iadd__(oneNonIsolatedZToMuMuVtxedTight)
## oneNonIsolatedZToMuMuPathTight.setLabel("oneNonIsolatedZToMuMuTight")
## twoNonIsolatedZToMuMuVtxedTight = copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTTight)
## twoNonIsolatedZToMuMuVtxedTight.src = cms.InputTag("twoNonIsolatedZToMuMuAtLeast1HLTTight")
## twoNonIsolatedZToMuMuPathTight.__iadd__(twoNonIsolatedZToMuMuVtxedTight)
## twoNonIsolatedZToMuMuPathTight.setLabel("twoNonIsolatedZToMuMuTight")
## goodZToMuMuSameCharge2HLTVtxedTight= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTTight)
## goodZToMuMuSameCharge2HLTVtxedTight.src = cms.InputTag("goodZToMuMuSameCharge2HLTTight")
## goodZToMuMuSameCharge2HLTPathTight.__iadd__(goodZToMuMuSameCharge2HLTVtxedTight)
## goodZToMuMuSameCharge2HLTPathTight.setLabel("goodZToMuMuSameCharge2HLTTight")
## goodZToMuMuSameCharge1HLTVtxedTight= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTTight)
## goodZToMuMuSameCharge1HLTVtxedTight.src = cms.InputTag("goodZToMuMuSameCharge1HLTTight")
## goodZToMuMuSameCharge1HLTPathTight.__iadd__(goodZToMuMuSameCharge1HLTVtxedTight)
## goodZToMuMuSameCharge1HLTPathTight.setLabel("goodZToMuMuSameCharge1HLTTight")
## goodZToMuMuOneStandAloneVtxedTight= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTTight)
## goodZToMuMuOneStandAloneVtxedTight.src = cms.InputTag("goodZToMuMuOneStandAloneMuonFirstHLTTight")
## goodZToMuMuOneStandAloneMuonPathTight.__iadd__(goodZToMuMuOneStandAloneVtxedTight)
## goodZToMuMuOneStandAloneMuonPathTight.setLabel("goodZToMuMuOneStandAloneMuonTight")
## goodZToMuMuOneTrackVtxedTight= copy.deepcopy(goodZToMuMuVtxedAtLeast1HLTTight)
## goodZToMuMuOneTrackVtxedTight.src = cms.InputTag("goodZToMuMuOneTrackFirstHLTTight")
## goodZToMuMuOneTrackPathTight.__iadd__(goodZToMuMuOneTrackVtxedTight)
## goodZToMuMuOneTrackPathTight.setLabel("goodZToMuMuOneTrackTight")
## ### ntuples....
## goodZToMuMuVtxedNtupleTight = cms.EDProducer(
## "CandViewNtpProducer",
## src = cms.InputTag("goodZToMuMuVtxedTight"),
## variables = cms.VPSet(
## cms.PSet(
## tag = cms.untracked.string("mass"),
## quantity = cms.untracked.string("mass")
## ),
## cms.PSet(
## tag = cms.untracked.string("vertexNdof"),
## quantity = cms.untracked.string("vertexNdof")
## ),
## cms.PSet(
## tag = cms.untracked.string("vertexNormalizedChi2"),
## quantity = cms.untracked.string("vertexNormalizedChi2")
## ),
## )
## )
## goodZToMuMuVtxed2HLTNtupleTight = copy.deepcopy(goodZToMuMuVtxedNtupleTight)
## goodZToMuMuVtxed2HLTNtupleTight.src= cms.InputTag("goodZToMuMuVtxed2HLTTight")
## goodZToMuMu2HLTPathTight.__iadd__(goodZToMuMuVtxed2HLTTightNtupleTight)
## goodZToMuMu2HLTPathTight.setLabel("goodZToMuMu2HLTTight")
## goodZToMuMuVtxed1HLTNtupleTight = copy.deepcopy(goodZToMuMuVtxedNtupleTight)
## goodZToMuMuVtxed1HLTNtupleTight.src= cms.InputTag("goodZToMuMuVtxed1HLTTight")
## goodZToMuMu1HLTPathTight.__iadd__(goodZToMuMuVtxed1HLTNtupleTight)
## goodZToMuMu1HLTPathTight.setLabel("goodZToMuMu1HLTTight")
## oneNonIsolatedZToMuMuVtxedNtupleTight = copy.deepcopy(goodZToMuMuVtxedNtupleTight)
## oneNonIsolatedZToMuMuVtxedNtupleTight.src = cms.InputTag("oneNonIsolatedZToMuMuVtxedTight")
## oneNonIsolatedZToMuMuPathTight.__iadd__(oneNonIsolatedZToMuMuVtxedNtupleTight)
## oneNonIsolatedZToMuMuPathTight.setLabel("oneNonIsolatedZToMuMuTight")
## twoNonIsolatedZToMuMuVtxedNtupleTight = copy.deepcopy(goodZToMuMuVtxedNtupleTight)
## twoNonIsolatedZToMuMuVtxedNtupleTight.src = cms.InputTag("twoNonIsolatedZToMuMuVtxed")
## twoNonIsolatedZToMuMuPathTight.__iadd__(twoNonIsolatedZToMuMuVtxedNtupleTight)
## twoNonIsolatedZToMuMuPathTight.setLabel("twoNonIsolatedZToMuMuTight")
## goodZToMuMuVtxedSameCharge2HLTNtupleTight= copy.deepcopy(goodZToMuMuVtxedNtupleTight)
## goodZToMuMuVtxedSameCharge2HLTNtupleTight.src = cms.InputTag("goodZToMuMuVtxedSameCharge2HLTTight")
## goodZToMuMuSameCharge2HLTPathTight.__iadd__(goodZToMuMuVtxedSameCharge2HLTNtupleTight)
## goodZToMuMuSameCharge2HLTPathTight.setLabel("goodZToMuMuVtxedSameCharge2HLTTight")
## goodZToMuMuVtxedSameCharge1HLTNtupleTight= copy.deepcopy(goodZToMuMuVtxedNtupleTight)
## goodZToMuMuVtxedSameCharge1HLTNtupleTight.src = cms.InputTag("goodZToMuMuVtxedSameCharge1HLTTight")
## goodZToMuMuSameCharge1HLTPathTight.__iadd__(goodZToMuMuVtxedSameCharge1HLTNtupleTight)
## goodZToMuMuSameCharge1HLTPathTight.setLabel("goodZToMuMuSameCharge1HLTTight")
## goodZToMuMuVtxedOneStandAloneNtupleTight= copy.deepcopy(goodZToMuMuVtxedNtupleTight)
## goodZToMuMuVtxedOneStandAloneNtupleTight.src = cms.InputTag("goodZToMuMuOneStandAloneVtxedTight")
## goodZToMuMuOneStandAloneMuonPathTight.__iadd__(goodZToMuMuVtxedOneStandAloneNtupleTight)
## goodZToMuMuOneStandAloneMuonPathTight.setLabel("goodZToMuMuOneStandAloneMuonTight")
## goodZToMuMuVtxedOneTrackNtupleTight= copy.deepcopy(goodZToMuMuVtxedNtupleTight)
## goodZToMuMuVtxedOneTrackNtupleTight.src =cms.InputTag("goodZToMuMuOneTrackVtxed")
## goodZToMuMuOneTrackPathTight.__iadd__(goodZToMuMuVtxedOneTrackNtupleTight)
## goodZToMuMuOneTrackPathTight.setLabel("goodZToMuMuOneTrackTight")
## vtxedNtuplesOutTight = cms.OutputModule(
## "PoolOutputModule",
## fileName = cms.untracked.string('VtxedNtupleTight_test.root'),
## outputCommands = cms.untracked.vstring(
## "drop *",
## # "keep *_goodZToMuMuOneStandAloneMuonNtuple_*_*",
## "keep *_goodZToMuMuVtxedNtupleTight_*_*",
## "keep *_goodZToMuMuVtxed1HLTNtupleTight_*_*",
## "keep *_goodZToMuMuVtxed2HLTNtupleTight_*_*",
## "keep *_goodZToMuMuVtxedSameCharge2HLTNtupleTight_*_*",
## "keep *_goodZToMuMuVtxedSameCharge1HLTNtupleTight_*_*",
## "keep *_nonIsolatedZToMuMuVtxedNtupleTight_*_*",
## "keep *_oneNonIsolatedZToMuMuVtxedNtupleTight_*_*",
## "keep *_twoNonIsolatedZToMuMuVtxedNtupleTight_*_*",
## "keep *_goodZToMuMuVtxedOneStandAloneNtupleTight_*_*",
## "keep *_goodZToMuMuVtxedOneTrackNtupleTight_*_*",
## # "keep *_goodZToMuMu2HLTVtxedNtuple_*_*",
## ),
## SelectEvents = cms.untracked.PSet(
## SelectEvents = cms.vstring(
## "goodZToMuMuPathTight",
## "goodZToMuMu1HLTPathTight",
## "goodZToMuMu2HLTPathTight",
## "goodZToMuMuSameCharge2HLTPathTight",
## "goodZToMuMuSameCharge1HLTPathTight",
## "nonIsolatedZToMuMuPathTight",
## "oneNonIsolatedZToMuMuPathTight",
## "twoNonIsolatedZToMuMuPathTight",
## "goodZToMuMuOneTrackPathTight",
## "goodZToMuMuOneStandAloneMuonPathTight",
## )
## )
## )
## vtxedNtuplesOutTight.setLabel("vtxedNtuplesOutTight")
## VtxedNtuplesOutTight.__iadd__(vtxedNtuplesOutTight)
## VtxedNtuplesOutTight.setLabel("VtxedNtuplesOutTight")
|
StarcoderdataPython
|
11343162
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'BaseModel'
db.create_table('website_basemodel', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('last_modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('website', ['BaseModel'])
# Adding model 'Facility'
db.create_table('website_facility', (
('basemodel_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['website.BaseModel'], unique=True, primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('main_schedule', self.gf('django.db.models.fields.related.ForeignKey')(related_name='facility_main', to=orm['website.Schedule'])),
))
db.send_create_signal('website', ['Facility'])
# Adding M2M table for field special_schedules on 'Facility'
db.create_table('website_facility_special_schedules', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('facility', models.ForeignKey(orm['website.facility'], null=False)),
('schedule', models.ForeignKey(orm['website.schedule'], null=False))
))
db.create_unique('website_facility_special_schedules', ['facility_id', 'schedule_id'])
# Adding model 'Schedule'
db.create_table('website_schedule', (
('basemodel_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['website.BaseModel'], unique=True, primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('valid_start', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('valid_end', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
))
db.send_create_signal('website', ['Schedule'])
# Adding model 'OpenTime'
db.create_table('website_opentime', (
('basemodel_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['website.BaseModel'], unique=True, primary_key=True)),
('schedule', self.gf('django.db.models.fields.related.ForeignKey')(related_name='open_times', to=orm['website.Schedule'])),
('start_day', self.gf('django.db.models.fields.IntegerField')()),
('start_time', self.gf('django.db.models.fields.TimeField')()),
('end_day', self.gf('django.db.models.fields.IntegerField')()),
('end_time', self.gf('django.db.models.fields.TimeField')()),
))
db.send_create_signal('website', ['OpenTime'])
def backwards(self, orm):
# Deleting model 'BaseModel'
db.delete_table('website_basemodel')
# Deleting model 'Facility'
db.delete_table('website_facility')
# Removing M2M table for field special_schedules on 'Facility'
db.delete_table('website_facility_special_schedules')
# Deleting model 'Schedule'
db.delete_table('website_schedule')
# Deleting model 'OpenTime'
db.delete_table('website_opentime')
models = {
'website.basemodel': {
'Meta': {'object_name': 'BaseModel'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'website.opentime': {
'Meta': {'object_name': 'OpenTime', '_ormbases': ['website.BaseModel']},
'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['website.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'end_day': ('django.db.models.fields.IntegerField', [], {}),
'end_time': ('django.db.models.fields.TimeField', [], {}),
'schedule': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'open_times'", 'to': "orm['website.Schedule']"}),
'start_day': ('django.db.models.fields.IntegerField', [], {}),
'start_time': ('django.db.models.fields.TimeField', [], {})
},
'website.facility': {
'Meta': {'object_name': 'Facility', '_ormbases': ['website.BaseModel']},
'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['website.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'main_schedule': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'facility_main'", 'to': "orm['website.Schedule']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'special_schedules': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'facility_special'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['website.Schedule']"})
},
'website.schedule': {
'Meta': {'object_name': 'Schedule', '_ormbases': ['website.BaseModel']},
'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['website.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'valid_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'valid_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['website']
|
StarcoderdataPython
|
8102827
|
from easydict import EasyDict as edict
# init
__C_SHHB = edict()
cfg_data = __C_SHHB
__C_SHHB.STD_SIZE = (768,1024)
__C_SHHB.TRAIN_SIZE = (576,768)
__C_SHHB.DATA_PATH = 'ProcessedData/shanghaitech_part_B'
__C_SHHB.MEAN_STD = ([0.452016860247, 0.447249650955, 0.431981861591],[0.23242045939, 0.224925786257, 0.221840232611])
__C_SHHB.LABEL_FACTOR = 1
__C_SHHB.LOG_PARA = 100.
__C_SHHB.RESUME_MODEL = ''#model path
__C_SHHB.TRAIN_BATCH_SIZE = 6 #imgs
__C_SHHB.VAL_BATCH_SIZE = 6 #
__C_SHHB.NUM_WORKERS = 0
|
StarcoderdataPython
|
8049173
|
<filename>tricycle_kinematic.py
"""
Example tricycle_kinematic.py
Author: <NAME> <<EMAIL>>
GitHub: https://github.com/botprof/agv-examples
"""
# %%
# SIMULATION SETUP
import numpy as np
import matplotlib.pyplot as plt
from mobotpy.models import Tricycle
from mobotpy.integration import rk_four
# Set the simulation time [s] and the sample period [s]
SIM_TIME = 15.0
T = 0.1
# Create an array of time values [s]
t = np.arange(0.0, SIM_TIME, T)
N = np.size(t)
# Set the wheelbase and track of the vehicle [m]
ELL_W = 2.50
ELL_T = 1.75
# %%
# MODEL DEFINTION
def tricycle_f(x, u):
"""Tricycle vehicle kinematic model."""
f = np.zeros(4)
f[0] = u[0] * np.cos(x[2])
f[1] = u[0] * np.sin(x[2])
f[2] = u[0] * 1.0 / ELL_W * np.tan(x[3])
f[3] = u[1]
return f
# %%
# RUN SIMULATION
# Initialize arrays that will be populated with our inputs and states
x = np.zeros((4, N))
u = np.zeros((2, N))
# Set the initial pose [m, m, rad, rad], velocities [m/s, rad/s]
x[0, 0] = 0.0
x[1, 0] = 0.0
x[2, 0] = np.pi / 2.0
x[3, 0] = 0.0
u[0, 0] = 5.0
u[1, 0] = 0
# Run the simulation
for k in range(1, N):
x[:, k] = rk_four(tricycle_f, x[:, k - 1], u[:, k - 1], T)
u[0, k] = 5.0
u[1, k] = 0.25 * np.sin(2.0 * t[k])
# %%
# MAKE SOME PLOTS
# Change some plot settings (optional)
plt.rc("text", usetex=True)
plt.rc("text.latex", preamble=r"\usepackage{cmbright,amsmath,bm}")
plt.rc("savefig", format="pdf")
plt.rc("savefig", bbox="tight")
# Plot the states as a function of time
fig1 = plt.figure(1)
fig1.set_figheight(6.4)
ax1a = plt.subplot(611)
plt.plot(t, x[0, :])
plt.grid(color="0.95")
plt.ylabel(r"$x$ [m]")
plt.setp(ax1a, xticklabels=[])
ax1b = plt.subplot(612)
plt.plot(t, x[1, :])
plt.grid(color="0.95")
plt.ylabel(r"$y$ [m]")
plt.setp(ax1b, xticklabels=[])
ax1c = plt.subplot(613)
plt.plot(t, x[2, :] * 180.0 / np.pi)
plt.grid(color="0.95")
plt.ylabel(r"$\theta$ [deg]")
plt.setp(ax1c, xticklabels=[])
ax1c = plt.subplot(614)
plt.plot(t, x[3, :] * 180.0 / np.pi)
plt.grid(color="0.95")
plt.ylabel(r"$\phi$ [deg]")
plt.setp(ax1c, xticklabels=[])
ax1c = plt.subplot(615)
plt.step(t, u[0, :], "C1", where="post")
plt.grid(color="0.95")
plt.ylabel(r"$v_1$ [m/s]")
plt.setp(ax1c, xticklabels=[])
ax1d = plt.subplot(616)
plt.step(t, u[1, :], "C1", where="post")
plt.grid(color="0.95")
plt.ylabel(r"$v_2$ [deg/s]")
plt.xlabel(r"$t$ [s]")
# Save the plot
plt.savefig("../agv-book/figs/ch3/tricycle_kinematic_fig1.pdf")
# Let's now use the class Tricycle for plotting
vehicle = Tricycle(ELL_W, ELL_T)
# Plot the position of the vehicle in the plane
fig2 = plt.figure(2)
plt.plot(x[0, :], x[1, :])
plt.axis("equal")
X_L, Y_L, X_R, Y_R, X_F, Y_F, X_B, Y_B = vehicle.draw(
x[0, 0], x[1, 0], x[2, 0], x[3, 0]
)
plt.fill(X_L, Y_L, "k")
plt.fill(X_R, Y_R, "k")
plt.fill(X_F, Y_F, "k")
plt.fill(X_B, Y_B, "C2", alpha=0.5, label="Start")
X_L, Y_L, X_R, Y_R, X_F, Y_F, X_B, Y_B = vehicle.draw(
x[0, N - 1], x[1, N - 1], x[2, N - 1], x[3, N - 1]
)
plt.fill(X_L, Y_L, "k")
plt.fill(X_R, Y_R, "k")
plt.fill(X_F, Y_F, "k")
plt.fill(X_B, Y_B, "C3", alpha=0.5, label="End")
plt.xlabel(r"$x$ [m]")
plt.ylabel(r"$y$ [m]")
plt.legend()
# Save the plot
plt.savefig("../agv-book/figs/ch3/tricycle_kinematic_fig2.pdf")
# Show all the plots to the screen
plt.show()
# %%
# MAKE AN ANIMATION
# Create and save the animation
ani = vehicle.animate(x, T, True, "../agv-book/gifs/ch3/tricycle_kinematic.gif")
# Show the movie to the screen
plt.show()
# # Show animation in HTML output if you are using IPython or Jupyter notebooks
# plt.rc('animation', html='jshtml')
# display(ani)
# plt.close()
|
StarcoderdataPython
|
1905582
|
from gather_texts import getTexts
from ngrams import classify_text
from ngrams import classify_text
from server import classifyOffWorld
print("Getting text from MongoDB...")
texts = getTexts() #[0: 100]
print("Finished")
print("Concatenating text into a single file..")
texts_list = []
for text in texts:
texts_list.append( text['feed'] )
print("List created...")
part_list = 250 # int(len(texts_list)/5.0)
big_text = " ".join(texts_list[0:part_list])
big_text2 = " ".join(texts_list[part_list: 2*part_list])
print("Finished")
print("Creating classifications...")
owgrams1, owgrams2 = classifyOffWorld(big_text2)
grams1, grams2 = classify_text(big_text)
# find top 10 of 1-grams and 2-grams
total_grams1 = {}
for gram1 in owgrams1:
total_grams1[ gram1[0] ] = gram1[1]
total_grams2 = {}
for gram2 in owgrams2:
total_grams2[ gram2[0] ] = gram2[1]
for gram1 in grams1:
key = gram1[0]
if not key in total_grams1.keys():
total_grams1[key] = gram1[1]
else:
total_grams1[key] += gram1[1]
for gram2 in grams2:
key = gram2[0]
if not key in total_grams2.keys():
total_grams2[key] = gram2[1]
else:
total_grams2[key] += gram2[1]
grams1 = sorted(total_grams1.items(), key = lambda kv: kv[1])
grams1.reverse()
grams1 = (gram1 for gram1 in grams1[0:10])
grams2 = sorted(total_grams2.items(), key = lambda kv: kv[1])
grams2.reverse()
grams2 = (gram2 for gram2 in grams2[0:10])
print("Finished")
print("Writing classifications to file...")
f = open("classifications.txt", "w")
for gram1 in grams1:
f.write(gram1[0] + "\n")
for gram2 in grams2:
f.write(gram2[0] + "\n")
f.close()
print("Finished")
|
StarcoderdataPython
|
4832870
|
<reponame>facebookresearch/uimnet<filename>uimnet/modules/spectral_normalization/spectral_embedding.py
#!/usr/bin/env python3
#
# # Copyright (c) 2021 Facebook, inc. and its affiliates. All Rights Reserved
#
#
import torch
import torch.nn as nn
import torch.nn.functional as F
from uimnet.modules.spectral_normalization.base import SN
# Embedding layer with spectral norm
# We use num_embeddings as the dim instead of embedding_dim here
# for convenience sake
class SNEmbedding(nn.Embedding, SN):
def __init__(self, num_embeddings, embedding_dim, padding_idx=None,
max_norm=None, norm_type=2, scale_grad_by_freq=False,
sparse=False, _weight=None,
num_svs=1, num_itrs=1, eps=1e-12, sn_coef=1):
nn.Embedding.__init__(self, num_embeddings, embedding_dim, padding_idx,
max_norm, norm_type, scale_grad_by_freq,
sparse, _weight)
SN.__init__(self, num_svs, num_itrs, num_embeddings, eps=eps, sn_coef=sn_coef)
def forward(self, x):
return F.embedding(x, self.W_())
if __name__ == '__main__':
pass
|
StarcoderdataPython
|
195295
|
from django.apps import AppConfig
class CustomThemeDemoAppConfig(AppConfig):
name = 'django_cradmin.demo.custom_theme_demo'
verbose_name = "Django CRadmin custom theme demo"
def ready(self):
from django_cradmin.apps.cradmin_kss_styleguide import styleguide_registry
styleguide = styleguide_registry.CradminStyleGuide(
unique_id='django_cradmin_theme_example',
label='Django CRadmin example theme',
appname='custom_theme_demo',
sourcefolder='styles/cradmin_theme_example',
sourcefile='styleguide.scss',
)
styleguide_registry.Registry.get_instance().add(styleguide)
|
StarcoderdataPython
|
1847166
|
"""
Factories for generating edXML for testing XModule import
"""
import inspect
from tempfile import mkdtemp
from factory import Factory, Sequence, lazy_attribute, post_generation
from fs.osfs import OSFS
from lxml import etree
from xblock.mixins import HierarchyMixin
from xmodule.modulestore import only_xmodules
from xmodule.modulestore.inheritance import InheritanceMixin
from xmodule.x_module import XModuleMixin
class XmlImportData:
"""
Class to capture all of the data needed to actually run an XML import,
so that the Factories have something to generate
"""
def __init__(self, xml_node, xml=None, course_id=None,
default_class=None, policy=None,
filesystem=None, parent=None,
xblock_mixins=(), xblock_select=None):
self._xml_node = xml_node
self._xml_string = xml
self.course_id = course_id
self.default_class = default_class
self.filesystem = filesystem
self.xblock_mixins = xblock_mixins
self.xblock_select = xblock_select
self.parent = parent
if policy is None:
self.policy = {}
else:
self.policy = policy
@property
def xml_string(self):
"""Return the stringified version of the generated xml"""
if self._xml_string is not None:
return self._xml_string
return etree.tostring(self._xml_node)
def __repr__(self):
return "XmlImportData{!r}".format((
self._xml_node, self._xml_string, self.course_id,
self.default_class, self.policy,
self.filesystem, self.parent, self.xblock_mixins,
self.xblock_select,
))
# Extract all argument names used to construct XmlImportData objects,
# so that the factory doesn't treat them as XML attributes
XML_IMPORT_ARGS = inspect.getargspec(XmlImportData.__init__).args # lint-amnesty, pylint: disable=deprecated-method
class XmlImportFactory(Factory):
"""
Factory for generating XmlImportData's, which can hold all the data needed
to run an XModule XML import
"""
class Meta:
model = XmlImportData
filesystem = OSFS(mkdtemp())
xblock_mixins = (InheritanceMixin, XModuleMixin, HierarchyMixin)
xblock_select = only_xmodules
url_name = Sequence(str)
attribs = {}
policy = {}
inline_xml = True
tag = 'unknown'
course_id = 'edX/xml_test_course/101'
@classmethod
def _adjust_kwargs(cls, **kwargs):
"""
Adjust the kwargs to be passed to the generated class.
Any kwargs that match :fun:`XmlImportData.__init__` will be passed
through. Any other unknown `kwargs` will be treated as XML attributes
:param tag: xml tag for the generated :class:`Element` node
:param text: (Optional) specifies the text of the generated :class:`Element`.
:param policy: (Optional) specifies data for the policy json file for this node
:type policy: dict
:param attribs: (Optional) specify attributes for the XML node
:type attribs: dict
"""
tag = kwargs.pop('tag', 'unknown')
kwargs['policy'] = {'{tag}/{url_name}'.format(tag=tag, url_name=kwargs['url_name']): kwargs['policy']}
kwargs['xml_node'].text = kwargs.pop('text', None)
kwargs['xml_node'].attrib.update(kwargs.pop('attribs', {}))
# Make sure that the xml_module doesn't try and open a file to find the contents
# of this node.
inline_xml = kwargs.pop('inline_xml')
if inline_xml:
kwargs['xml_node'].set('not_a_pointer', 'true')
for key in list(kwargs.keys()):
if key not in XML_IMPORT_ARGS:
kwargs['xml_node'].set(key, kwargs.pop(key))
if not inline_xml:
kwargs['xml_node'].write(
kwargs['filesystem'].open(
'{}/{}.xml'.format(kwargs['tag'], kwargs['url_name'])
),
encoding='utf-8'
)
return kwargs
@lazy_attribute
def xml_node(self):
"""An :class:`xml.etree.Element`"""
return etree.Element(self.tag)
@post_generation
def parent(self, _create, extracted, **_):
"""Hook to merge this xml into a parent xml node"""
if extracted is None:
return
extracted._xml_node.append(self._xml_node) # pylint: disable=no-member, protected-access
extracted.policy.update(self.policy)
class CourseFactory(XmlImportFactory):
"""Factory for <course> nodes"""
tag = 'course'
name = '101'
static_asset_path = 'xml_test_course'
class ChapterFactory(XmlImportFactory):
"""Factory for <chapter> nodes"""
tag = 'chapter'
class SequenceFactory(XmlImportFactory):
"""Factory for <sequential> nodes"""
tag = 'sequential'
class VerticalFactory(XmlImportFactory):
"""Factory for <vertical> nodes"""
tag = 'vertical'
class ProblemFactory(XmlImportFactory):
"""Factory for <problem> nodes"""
tag = 'problem'
text = '<h1>Empty Problem!</h1>'
class HtmlFactory(XmlImportFactory):
"""Factory for <html> nodes"""
tag = 'html'
|
StarcoderdataPython
|
11231919
|
<filename>cloudrail/knowledge/rules/aws/context_aware/disallow_resources_in_default_vpc_rule.py
from typing import List, Dict
from cloudrail.knowledge.context.aws.networking_config.network_entity import NetworkEntity
from cloudrail.knowledge.context.aws.aws_environment_context import AwsEnvironmentContext
from cloudrail.knowledge.rules.aws.aws_base_rule import AwsBaseRule
from cloudrail.knowledge.rules.base_rule import Issue
from cloudrail.knowledge.rules.rule_parameters.base_paramerter import ParameterType
class DisallowResourcesInDefaultVpcRule(AwsBaseRule):
def execute(self, env_context: AwsEnvironmentContext, parameters: Dict[ParameterType, any]) -> List[Issue]:
issues: List[Issue] = []
network_entity_list: List[NetworkEntity] = env_context.get_all_network_entities()
for entity in network_entity_list:
if entity.network_resource.vpc is not None and entity.network_resource.vpc.is_default: # some resources can be out of vpc
issues.append(Issue(self._format_evidence(entity.get_friendly_name()), entity, entity))
return issues
def get_id(self) -> str:
return "disallow_default_vpc"
@staticmethod
def _format_evidence(entity: str) -> str:
return f"~Default VPC~. `{entity}` is defined within the default VPC."
def should_run_rule(self, environment_context: AwsEnvironmentContext) -> bool:
return bool(environment_context.get_all_network_entities())
|
StarcoderdataPython
|
9679520
|
<filename>main.py
import pandas as pd
from constants import *
from formatting import format_player_data
# main execution function to read player list and generate fantasy teams.
def main():
data = pd.read_csv('data/fav_players.csv')
# Format
data = format_player_data(data)
print(data[:5])
# separate into postions
defenders = pd.concat([data[data[POSITION1] == 'Defender'], data[data[POSITION2] == 'Defender']])
forwards = pd.concat([data[data[POSITION1] == 'Forward'], data[data[POSITION2] == 'Forward']])
midfielders = pd.concat([data[data[POSITION1] == 'Midfielder'], data[data[POSITION2] == 'Midfielder']])
rucks = pd.concat([data[data[POSITION1] == 'Ruck'], data[data[POSITION2] == 'Ruck']])
print(rucks)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
1619748
|
#!/usr/bin/env python
# coding: utf-8
# demo
"""
Author: <NAME>
Email: <EMAIL>
Create_Date: 2019/05/21
"""
import torch
import torch.nn as nn
import torchvision.transforms as transforms
from torch.utils.data import DataLoader
torch.backends.cudnn.deterministic = True
torch.manual_seed(123)
import os, argparse, sys
import numpy as np
import glob
import matplotlib.pyplot as plt
plt.switch_backend('agg')
import warnings
warnings.filterwarnings("ignore")
from PIL import Image
sys.path.append('models')
import DepthNet
# =======================
# demo
# =======================
def demo(net, args):
data_dir = args.data_dir
img_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
for im in os.listdir(data_dir):
im_dir = os.path.join(data_dir, im)
print('Processing img: {}'.format(im_dir))
# Read image
img = Image.open(im_dir).convert('RGB')
ori_width, ori_height = img.size
int_width = args.img_size[0]
int_height = args.img_size[1]
img = img.resize((int_width, int_height), Image.ANTIALIAS)
tensor_img = img_transform(img)
# forward
input_img = torch.autograd.Variable(tensor_img.cuda().unsqueeze(0), volatile=True)
output = net(input_img)
# Normalization and save results
depth = output.squeeze().cpu().data.numpy()
min_d, max_d = depth.min(), depth.max()
depth_norm = (depth - min_d) / (max_d - min_d) * 255
depth_norm = depth_norm.astype(np.uint8)
image_pil = Image.fromarray(depth_norm)
output_dir = os.path.join(args.result_dir, im)
image_pil = image_pil.resize((ori_width, ori_height), Image.BILINEAR)
plt.imsave(output_dir, np.asarray(image_pil), cmap='inferno')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='MRDP Testing/Evaluation')
parser.add_argument('--img_size', default=[448, 448], type=list, help='Image size of network input')
parser.add_argument('--data_dir', default='examples', type=str, help='Data path')
parser.add_argument('--result_dir', default='demo_results', type=str, help='Directory for saving results, default: demo_results')
parser.add_argument('--gpu_id', default=0, type=int, help='GPU id, default:0')
args = parser.parse_args()
args.checkpoint = 'model.pth.tar'
if not os.path.exists(args.result_dir):
os.makedirs(args.result_dir)
gpu_id = args.gpu_id
torch.cuda.device(gpu_id)
net = DepthNet.DepthNet()
net = torch.nn.DataParallel(net, device_ids=[0]).cuda()
checkpoint = torch.load(args.checkpoint)
net.load_state_dict(checkpoint['state_dict'])
net.eval()
print('Begin to test ...')
with torch.no_grad():
demo(net, args)
print('Finished!')
|
StarcoderdataPython
|
5173181
|
#!/usr/bin/python2.5
"""
Package for general database editor web interface.
"""
|
StarcoderdataPython
|
317543
|
<gh_stars>0
import requests
# petition get
def generate_request_get(url, params={}):
response = requests.get(url, params=params)
if response.status_code == 200:
return response.json()
# petition put
def generate_request_put(url, data):
response = requests.put(url, data=data)
if response.status_code == 200:
return response.json()
# petition delete
def generate_request_delete(url):
response = requests.delete(url)
if response.status_code == 200:
return response.json()
# petition post
def generate_request_post(url, data):
response = requests.post(url, data=data)
if response.status_code == 200:
return response.json()
# get json clients
def get_clients(params={}):
response = generate_request_get('http://127.0.0.1:8000/api/v1/client', params)
if response:
clients = response.get('results')
return clients
return ""
# delete request credit
def delete_request(pk=None):
response = generate_request_delete(f'http://127.0.0.1:8000/api/v1/request/{pk}')
if response:
request = response.get('message')
return request
return ""
# put request credit
def put_request(pk=None, data={}):
response = generate_request_put(f'http://127.0.0.1:8000/api/v1/request/{pk}', data)
if response:
request = response.get('results')
return request
return ""
# post request credit
def post_request(pk=None, data={}):
response = generate_request_post(f'http://127.0.0.1:8000/api/v1/request/{pk}', data)
if response:
request = response.get('results')
return request
return ""
|
StarcoderdataPython
|
11295642
|
from django.urls import include, path
urlpatterns = [
path('profiles/', include('tango_user.urls')),
path('video/', include('video.urls')),
]
|
StarcoderdataPython
|
3292299
|
<gh_stars>1000+
# -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2019-12-28 23:15
from hanlp.components.ner_tf import TransformerNamedEntityRecognizerTF
from hanlp.datasets.ner.msra import MSRA_NER_CHAR_LEVEL_TRAIN, MSRA_NER_CHAR_LEVEL_DEV, MSRA_NER_CHAR_LEVEL_TEST
from tests import cdroot
cdroot()
recognizer = TransformerNamedEntityRecognizerTF()
save_dir = 'data/model/ner/ner_albert_base_zh_msra_sparse_categorical_crossentropy'
recognizer.fit(MSRA_NER_CHAR_LEVEL_TRAIN, MSRA_NER_CHAR_LEVEL_DEV, save_dir, transformer='albert_base_zh',
learning_rate=5e-5,
metrics='f1')
recognizer.load(save_dir)
print(recognizer.predict(list('上海华安工业(集团)公司董事长谭旭光和秘书张晚霞来到美国纽约现代艺术博物馆参观。')))
recognizer.evaluate(MSRA_NER_CHAR_LEVEL_TEST, save_dir=save_dir)
print(f'Model saved in {save_dir}')
|
StarcoderdataPython
|
1787113
|
<filename>finalists/jun2tong/utils/train_ni.py
import numpy as np
import torch
from torch.utils import data
from .common import check_ext_mem, check_ram_usage
from .wrapper import CustomTensorDataset
def train_net(optimizer, scheduler, model, criterion, data_loader, reg_coef,
train_ep, device="cpu"):
cur_ep = 0
stats = {"ram": [], "disk": []}
for ep in range(train_ep):
stats['disk'].append(check_ext_mem("cl_ext_mem"))
stats['ram'].append(check_ram_usage())
model.train()
print("training ep: ", ep)
correct_cnt, ave_loss = 0, 0
it = 0
for x_mb, y_mb, p_logits_mb in data_loader:
x_mb = x_mb.to(device)
y_mb = y_mb.to(device)
all_out = model(x_mb)[0]
loss = criterion["cls"](all_out, y_mb)
dist_loss = 0
if not isinstance(p_logits_mb, list):
p_logits_mb = p_logits_mb.to(device)
dist_loss = criterion["dist"](torch.log_softmax(all_out, dim=1),
torch.softmax(p_logits_mb, dim=1))
# dist_loss = criterion["dist"](all_out, p_logits_mb)
all_loss = loss + reg_coef*dist_loss
else:
all_loss = loss
ave_loss += all_loss.item()
optimizer.zero_grad()
all_loss.backward()
optimizer.step()
pred_label = torch.argmax(all_out, dim=1)
correct_cnt += (pred_label.detach() == y_mb).sum().cpu()
acc = correct_cnt.item() / ((it + 1) * y_mb.size(0))
ave_loss /= ((it + 1) * y_mb.size(0))
if it % 100 == 0:
# print(f'==>>> it: {it}, avg. loss: {ave_loss: .6f}, running train acc: {acc: .3f}')
print(f'==>>> it: {it}, dist. loss: {dist_loss: .6f}, cls loss: {loss: .6f}, running train acc: {acc: .3f}')
it += 1
cur_ep += 1
scheduler.step()
return stats
def nn_mean_classify(model, test_set, mb_size, Mus, preproc=None, use_cuda="cpu", verbose=True):
model.eval()
acc_x_task = []
stats = {'accs': [], 'acc': []}
preds = []
for (x, y), t in test_set:
if preproc:
x = preproc(x)
ds = CustomTensorDataset(x, torch.from_numpy(y).type(torch.LongTensor))
dataloader = torch.utils.data.DataLoader(ds, batch_size=mb_size, shuffle=False, num_workers=4)
model = model.to(use_cuda)
correct_cnt, ave_loss = 0, 0
with torch.no_grad():
for x_mb, y_mb in dataloader:
x_mb = x_mb.to(use_cuda)
feas = model.features(x_mb)
dists = torch.zeros(feas.shape[0], len(Mus))
for i in range(len(Mus)):
dists[:, i] = torch.sqrt(torch.pow(feas.sub(Mus[i].to(use_cuda)), 2).sum(dim=1))
# pred_label = torch.argmin(dists, dim=1)
pred_label = torch.topk(dists, 1, dim=1, largest=False)[1]
correct_cnt += sum(pred_label.view(-1).numpy() == np.array(y_mb))
preds += list(pred_label)
acc = correct_cnt / len(ds)
if verbose:
print(f'TEST Acc. Task {t}==>>> acc: {acc:.3f}')
acc_x_task.append(acc)
stats['accs'].append(acc)
stats['acc'].append(np.mean(acc_x_task))
return stats, preds
|
StarcoderdataPython
|
9618254
|
from random import randint
from time import sleep
def sorteia(lista):
print('Sorteando 5 valores')
for cont in range(0, 5):
n = randint(0, 10)
lista.append(n)
print(f' {n} ', end='', flush=True)
sleep(0.3)
print('PRONTO!')
def somapar():
soma = 0
for v in número:
if v % 2 == 0:
soma += v
print(soma)
número = list()
sorteia(número)
print(número, end='')
print('. A soma dos números pares vale ', end='')
somapar()
|
StarcoderdataPython
|
11307550
|
from melodically.harmony import midi_to_std, get_root, harmonic_affinities, modes_dict
class HarmonicState:
"""
This class allows to monitor and update an internal harmonic state,
influenced by external notes inputs
"""
def __init__(self, buffer_size=16):
# contains the input notes in std notation
self.noteBuffer = []
# max size of the buffer
self.bufferSize = buffer_size
# mode dictionary used to represent a modal scale
# root: root note of the scale
# mode_signature_index: index of the interval scheme of a modal scale
# mode_index: index of the mode inside a certain interval scheme
self.currentMode = {'root': 'C', 'mode_signature_index': 0, 'mode_index': 0}
def push_notes(self, new_notes):
"""
Pushes new note inside the buffer.
If the buffer overflows, the older notes are discarded.
:param new_notes: list of new notes
"""
self.noteBuffer = self.noteBuffer + new_notes
while len(self.noteBuffer) > self.bufferSize:
self.noteBuffer.pop(0) # removing old notes
def update_scale(self):
"""
Updates the currentMode attribute based on the notes in the buffer,
applying the harmonic_affinities function to them.
:return: currentMode
"""
notes_std = [n for n in self.noteBuffer]
if notes_std:
root = get_root(notes_std)
modes_affinities = harmonic_affinities(root, notes_std)
mode_signature_index = modes_affinities.index(max(modes_affinities))
tmp = modes_affinities[mode_signature_index] # sequence with the lowest distance
mode_index = tmp.index(max(tmp))
self.currentMode['root'] = root
self.currentMode['mode_signature_index'] = mode_signature_index
self.currentMode['mode_index'] = mode_index
return self.currentMode
def get_mode_notes(self):
"""
Gets the current notes of the current modal scale.
:return: list of notes of the current modal scale
"""
self.update_scale()
tmp = modes_dict[self.currentMode['root']]
tmp = tmp[self.currentMode['mode_signature_index']]
return tmp[self.currentMode['mode_index']]
|
StarcoderdataPython
|
1975353
|
from itertools import*
(n,), *d = [[*map(int, o.split())] for o in open(0)]
S, T = zip(*d)
a = [0] * -~max(T)
for s in S:
a[s] += 1
*a, = accumulate(a)
for s, t in d:
print(a[t] - a[s])
|
StarcoderdataPython
|
11312976
|
#!/usr/bin/env python
# coding=utf-8
"""
Copyright (C) 2010-2013, <NAME> <<EMAIL>>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Library General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
from __future__ import absolute_import
import requests
import json
from celery import shared_task, Task
from sdk.constants import WXMP_CONFIG, WXMP_ACCESS_TOKEN
@shared_task
def get_basic_info(open_id):
access_token = Task.app.db.hget(WXMP_ACCESS_TOKEN, 'access_token')
if not access_token or not open_id:
print "Failed to get access_token or open_id when in get_basic_info()"
return None
url = "https://api.weixin.qq.com/cgi-bin/user/info?access_token={0}&openid={1}&lang=zh_CN".format(
access_token, open_id
)
try:
resp = requests.get(url)
resp = json.loads(resp.content)
except Exception,e:
print "Failed to get basic user info because of: {0}".format(e)
return None
if not isinstance(resp, dict):
print "Invalid response format when get basic user info from Weixin server"
return None
if 'errcode' in resp.keys() and (resp['errcode'] != 0):
print "Error response when get basic user info from Weixin server: {0}".format(resp['errmsg'])
return None
return resp
|
StarcoderdataPython
|
3583193
|
<reponame>dmadea/Spectra-Manipulator<filename>spectramanipulator/dialogs/stylewidget_gui.py<gh_stars>1-10
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'spectramanipulator/dialogs\stylewidget_gui.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(419, 334)
self.gridLayout_2 = QtWidgets.QGridLayout(Form)
self.gridLayout_2.setObjectName("gridLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.combSymbol = QtWidgets.QComboBox(Form)
self.combSymbol.setObjectName("combSymbol")
self.gridLayout.addWidget(self.combSymbol, 6, 2, 1, 2)
self.label_7 = QtWidgets.QLabel(Form)
self.label_7.setObjectName("label_7")
self.gridLayout.addWidget(self.label_7, 8, 0, 1, 1)
self.label_6 = QtWidgets.QLabel(Form)
self.label_6.setObjectName("label_6")
self.gridLayout.addWidget(self.label_6, 7, 0, 1, 1)
self.btnSymBrushColor = QtWidgets.QPushButton(Form)
self.btnSymBrushColor.setAutoFillBackground(False)
self.btnSymBrushColor.setObjectName("btnSymBrushColor")
self.gridLayout.addWidget(self.btnSymBrushColor, 7, 2, 1, 1)
self.btnSymFillColor = QtWidgets.QPushButton(Form)
self.btnSymFillColor.setAutoFillBackground(False)
self.btnSymFillColor.setObjectName("btnSymFillColor")
self.gridLayout.addWidget(self.btnSymFillColor, 8, 2, 1, 1)
self.sbSymBrushAlpha = QtWidgets.QSpinBox(Form)
self.sbSymBrushAlpha.setMaximum(255)
self.sbSymBrushAlpha.setObjectName("sbSymBrushAlpha")
self.gridLayout.addWidget(self.sbSymBrushAlpha, 7, 3, 1, 1)
self.cbSymBrushDefault = QtWidgets.QCheckBox(Form)
self.cbSymBrushDefault.setObjectName("cbSymBrushDefault")
self.gridLayout.addWidget(self.cbSymBrushDefault, 7, 4, 1, 1)
self.cbSymFillDefault = QtWidgets.QCheckBox(Form)
self.cbSymFillDefault.setObjectName("cbSymFillDefault")
self.gridLayout.addWidget(self.cbSymFillDefault, 8, 4, 1, 1)
self.sbSymFillAlpha = QtWidgets.QSpinBox(Form)
self.sbSymFillAlpha.setMaximum(255)
self.sbSymFillAlpha.setObjectName("sbSymFillAlpha")
self.gridLayout.addWidget(self.sbSymFillAlpha, 8, 3, 1, 1)
self.label_5 = QtWidgets.QLabel(Form)
self.label_5.setObjectName("label_5")
self.gridLayout.addWidget(self.label_5, 6, 0, 1, 1)
self.label = QtWidgets.QLabel(Form)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 3, 0, 1, 1)
self.btnColor = QtWidgets.QPushButton(Form)
self.btnColor.setAutoFillBackground(False)
self.btnColor.setObjectName("btnColor")
self.gridLayout.addWidget(self.btnColor, 3, 2, 1, 1)
self.sbAlpha = QtWidgets.QSpinBox(Form)
self.sbAlpha.setMaximum(255)
self.sbAlpha.setObjectName("sbAlpha")
self.gridLayout.addWidget(self.sbAlpha, 3, 3, 1, 1)
self.cbColor = QtWidgets.QCheckBox(Form)
self.cbColor.setObjectName("cbColor")
self.gridLayout.addWidget(self.cbColor, 3, 4, 1, 1)
self.cbPlotLegend = QtWidgets.QCheckBox(Form)
self.cbPlotLegend.setObjectName("cbPlotLegend")
self.gridLayout.addWidget(self.cbPlotLegend, 10, 0, 1, 1)
self.cbLineWidth = QtWidgets.QCheckBox(Form)
self.cbLineWidth.setObjectName("cbLineWidth")
self.gridLayout.addWidget(self.cbLineWidth, 4, 4, 1, 1)
self.dsbLineWidth = QtWidgets.QDoubleSpinBox(Form)
self.dsbLineWidth.setDecimals(1)
self.dsbLineWidth.setSingleStep(0.1)
self.dsbLineWidth.setObjectName("dsbLineWidth")
self.gridLayout.addWidget(self.dsbLineWidth, 4, 2, 1, 2)
self.cbLineType = QtWidgets.QCheckBox(Form)
self.cbLineType.setObjectName("cbLineType")
self.gridLayout.addWidget(self.cbLineType, 5, 4, 1, 1)
self.label_3 = QtWidgets.QLabel(Form)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 5, 0, 1, 1)
self.label_4 = QtWidgets.QLabel(Form)
self.label_4.setObjectName("label_4")
self.gridLayout.addWidget(self.label_4, 4, 0, 1, 1)
self.label_2 = QtWidgets.QLabel(Form)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 2, 3, 1, 1)
self.combLineType = QtWidgets.QComboBox(Form)
self.combLineType.setObjectName("combLineType")
self.gridLayout.addWidget(self.combLineType, 5, 2, 1, 2)
self.label_8 = QtWidgets.QLabel(Form)
self.label_8.setObjectName("label_8")
self.gridLayout.addWidget(self.label_8, 9, 0, 1, 1)
self.dsbSymSize = QtWidgets.QDoubleSpinBox(Form)
self.dsbSymSize.setDecimals(1)
self.dsbSymSize.setSingleStep(0.1)
self.dsbSymSize.setObjectName("dsbSymSize")
self.gridLayout.addWidget(self.dsbSymSize, 9, 2, 1, 2)
self.verticalLayout.addLayout(self.gridLayout)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.btnSetDefauls = QtWidgets.QPushButton(Form)
self.btnSetDefauls.setAutoFillBackground(False)
self.btnSetDefauls.setObjectName("btnSetDefauls")
self.horizontalLayout.addWidget(self.btnSetDefauls)
self.btnCancel = QtWidgets.QPushButton(Form)
self.btnCancel.setAutoFillBackground(False)
self.btnCancel.setObjectName("btnCancel")
self.horizontalLayout.addWidget(self.btnCancel)
self.btnApply = QtWidgets.QPushButton(Form)
self.btnApply.setAutoFillBackground(False)
self.btnApply.setObjectName("btnApply")
self.horizontalLayout.addWidget(self.btnApply)
self.btnOK = QtWidgets.QPushButton(Form)
self.btnOK.setAutoFillBackground(False)
self.btnOK.setObjectName("btnOK")
self.horizontalLayout.addWidget(self.btnOK)
self.verticalLayout.addLayout(self.horizontalLayout)
self.gridLayout_2.addLayout(self.verticalLayout, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.label_7.setText(_translate("Form", "Symbol fill color"))
self.label_6.setText(_translate("Form", "Symbol brush color"))
self.btnSymBrushColor.setText(_translate("Form", "Pick color"))
self.btnSymFillColor.setText(_translate("Form", "Pick color"))
self.cbSymBrushDefault.setText(_translate("Form", "Set same as line"))
self.cbSymFillDefault.setText(_translate("Form", "Set same as line"))
self.label_5.setText(_translate("Form", "Symbol type"))
self.label.setText(_translate("Form", "Color"))
self.btnColor.setText(_translate("Form", "Pick color"))
self.cbColor.setText(_translate("Form", "Default"))
self.cbPlotLegend.setText(_translate("Form", "Plot legend"))
self.cbLineWidth.setText(_translate("Form", "Default"))
self.cbLineType.setText(_translate("Form", "Default"))
self.label_3.setText(_translate("Form", "Line type"))
self.label_4.setText(_translate("Form", "Line width"))
self.label_2.setText(_translate("Form", "Alpha"))
self.label_8.setText(_translate("Form", "Symbol size"))
self.btnSetDefauls.setText(_translate("Form", "Set defaults"))
self.btnCancel.setText(_translate("Form", "Cancel"))
self.btnApply.setText(_translate("Form", "Apply"))
self.btnOK.setText(_translate("Form", "OK"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
|
StarcoderdataPython
|
3535813
|
<reponame>WangErFeiZi/Mei
from flask_wtf import FlaskForm as Form
from flask_login import current_user
from ..models import User
from wtforms import ValidationError, StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import DataRequired, Length, Email, Regexp, EqualTo
class LoginForm(Form):
email = StringField('邮箱', validators=[DataRequired(), Length(1, 64), Email()])
password = PasswordField('密码', validators=[DataRequired()])
remember_me = BooleanField('记住我的登陆状态')
submit = SubmitField('登陆')
class ResetPasswordForm(Form):
password = PasswordField('输入新的密码', validators=[DataRequired(), EqualTo('password2', message='两次输入的密码不一致。')])
password2 = PasswordField('<PASSWORD>', validators=[DataRequired()])
submit = SubmitField('更改')
class ResetPasswordEmailForm(Form):
email = StringField('邮箱', validators=[DataRequired(), Length(1, 64), Email()])
submit = SubmitField('发送')
class ChangePasswordForm(Form):
old_password = PasswordField('输入原来密码', validators=[DataRequired()])
password = PasswordField('输入<PASSWORD>', validators=[DataRequired(), EqualTo('password2', message='两次输入的密码不一致。')])
password2 = PasswordField('<PASSWORD>', validators=[DataRequired()])
submit = SubmitField('更改')
def validate_old_password(self, field):
if not current_user.verify_password(field.data):
raise ValidationError('密码错误请重新输入。')
def validate_password(self, field):
if current_user.verify_password(field.data):
raise ValidationError('新密码不能与当前密码一样。')
class ChangeEmailForm(Form):
email = StringField('新的邮箱地址', validators=[DataRequired(), Length(1, 64), Email() ])
submit = SubmitField('发送')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('该邮箱已被使用。')
class RegistrationForm(Form):
email = StringField('邮箱', validators=[DataRequired(), Length(1, 64), Email() ])
username = StringField('昵称', validators=[DataRequired(), Length(1, 64),
Regexp('^[\u4e00-\u9fa5A-Za-z][\u4e00-\u9fa5A-Za-z0-9_.]*$',
0,
'Usernames must have only letters, '
'numbers, dots or underscrores.')])
password = PasswordField('密码', validators=[DataRequired(), EqualTo('password2', message='两次输入的密码不一致。')])
password2 = PasswordField('再次输入密码', validators=[DataRequired()])
submit = SubmitField('注册')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('该邮箱已被使用。')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('该昵称已被使用。')
|
StarcoderdataPython
|
9720965
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from libtbx.program_template import ProgramTemplate
from mmtbx.validation import rama_z
from mmtbx.validation.ramalyze import ramalyze
from mmtbx.validation.ramalyze import res_type_labels
from cctbx.maptbx.box import shift_and_box_model
from libtbx.utils import Sorry, null_out
from libtbx import Auto
import os
# =============================================================================
class Program(ProgramTemplate):
description = '''
mmtbx.rama_z: Tool to calculate Rama-Z score. Validation of Ramachandran plot.
Usage examples:
mmtbx.rama_z model1.pdb
'''
datatypes = ['model', 'phil']
master_phil_str = """\
write_HSL_models = False
.type = bool
write_HSL_plot = False
.type = bool
write_HSL_general_only = True
.type = bool
write_whole_plot = False
.type = bool
write_whole_general_only = True
.type = bool
"""
# write everything:
# write_HSL_models=True write_HSL_plot=True write_HSL_general_only=False write_whole_plot=True write_whole_general_only=False
# write only general plots:
# write_HSL_plot=True write_whole_plot=False
#
# ---------------------------------------------------------------------------
def validate(self):
print('Validating inputs', file=self.logger)
self.data_manager.has_models()
m = self.data_manager.get_model()
print ('Inputs OK', file=self.logger)
# ---------------------------------------------------------------------------
def _write_plots_if_needed(self, model, label, type_of_plot='whole'):
write_plot = getattr(self.params, "write_%s_plot" % type_of_plot)
write_general_only = getattr(self.params, "write_%s_general_only" % type_of_plot)
if write_plot:
self.rama = ramalyze(model.get_hierarchy(), out=null_out())
self.plots = self.rama.get_plots(
show_labels=False,
point_style='.',
markersize=3,
markeredgecolor="red",
dpi=300,
markerfacecolor="yellow")
plots_to_write = range(6)
if write_general_only:
plots_to_write = [0]
for i in plots_to_write:
file_label = res_type_labels[i].replace("/", "_")
fn = "%s.png" % self.get_default_output_filename(
prefix='%s_%s_' % (self.inp_fn, label),
suffix=file_label,
serial=Auto)
if os.path.isfile(fn) and not self.params.output.overwrite:
raise Sorry("%s already exists and overwrite is set to False." % fn)
print("Saving:", fn, file=self.logger)
self.plots[i].save_image(fn, dpi=300)
def run(self):
models = []
for model_name in self.data_manager.get_model_names():
models.append(self.data_manager.get_model(model_name))
# model = self.data_manager.get_model()
self.inp_fn = os.path.basename(self.data_manager.get_default_model_name())[:-4]
self.rama_z = rama_z.rama_z(
models = models,
log = self.logger)
if len(models) == 1:
model = models[0]
cs = model.crystal_symmetry()
if cs is None:
model = shift_and_box_model(model)
self._write_plots_if_needed(model, label='whole', type_of_plot='whole')
helix_sel, sheet_sel, loop_sel = self.rama_z.get_ss_selections()
if model.get_hierarchy().models_size() != 1:
print ("Warning! Outputting partial models and plots are not supported \
for multi-model files", file=self.logger)
else:
for sel, label in [(helix_sel, "helix"),
(sheet_sel, "sheet"),
(loop_sel, "loop")]:
selected_model = model.select(sel)
if self.params.write_HSL_models:
pdb_str = selected_model.model_as_pdb()
fn = "%s" % self.get_default_output_filename(
prefix='%s_' % self.inp_fn,
suffix=label,
serial=Auto)
print("Writing out partial model: %s" % fn, file=self.logger)
self.data_manager.write_model_file(selected_model, filename=fn)
self._write_plots_if_needed(selected_model, label, type_of_plot='HSL')
result = self.get_results()
res_info = self.rama_z.get_detailed_values()
print ("Individual residues info:", file=self.logger)
print ("Residue name, type, SS, (phi, psi), Z", file=self.logger)
for i in res_info:
print ('%4s %10s %1s (%7.2f, %7.2f) %7.4f' % (
i[2], res_type_labels[i[1]], i[3], i[4], i[5], i[6]), file=self.logger)
print(result.as_string(prefix=""), file = self.logger)
# ---------------------------------------------------------------------------
def get_results(self):
return self.rama_z.get_result()
|
StarcoderdataPython
|
3485588
|
<filename>appengine/findit/dto/flake_try_job_report.py
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from dto.isolated_tests import IsolatedTests
from dto.try_job_report import TryJobReport
class FlakeTryJobReport(TryJobReport):
"""Represents output of a flake try job."""
# Maps the step to the isolate sha of the compiled binaries.
isolated_tests = IsolatedTests
|
StarcoderdataPython
|
5080863
|
def solution(A):
if len(A) == 0:
return 0
else:
# head = A[0]
# tail = A[head]
# beans_list.append(tail)
# while head != tail:
# tail = A[tail]
# beans_list.append(tail)
all_beans_list = []
for index in range(len(A)):
beans_list = []
head = A[index]
tail = A[head]
if head not in beans_list:
beans_list.append(tail)
while head != tail:
tail = A[tail]
beans_list.append(tail)
all_beans_list.append(beans_list)
max_list = len(max(all_beans_list))
return max_list
if __name__ == '__main__':
A = [5,4,0,3,1,6,2]
result = solution(A)
print(result)
|
StarcoderdataPython
|
145952
|
<filename>old-code/move-sitelinks.py
import csv
from sys import *
reader = csv.DictReader(open('genes-wikipedia.tab', 'r'), delimiter='\t')
gqs = {}
for item in reader:
iturl = item.get('p')
qit = iturl[iturl.rfind('/')+1:]
lang = item.get('lang')
lemma = item.get('lemma')
d = gqs.get(qit)
if d is None:
d = {}
d[lang] = lemma
gqs[qit] = d
else:
d[lang] = lemma
reader = csv.DictReader(open('proteins-wikipedia.tab', 'r'), delimiter='\t')
pqs = {}
for item in reader:
iturl = item.get('p')
qit = iturl[iturl.rfind('/')+1:]
lang = item.get('lang')
lemma = item.get('lemma')
d = pqs.get(qit)
if d is None:
d = {}
d[lang] = lemma
pqs[qit] = d
else:
d[lang] = lemma
"""
its = set()
dits = set()
reader = csv.DictReader(open('t.tab', 'r'), delimiter='\t')
for item in reader:
iturl = item.get('g')
git = iturl[iturl.rfind('/')+1:]
if git in its:
dits.add(git)
else:
its.add(git)
reader = csv.DictReader(open('tt.tab', 'r'), delimiter='\t')
labs = {}
for item in reader:
iturl = item.get('p')
qit = iturl[iturl.rfind('/')+1:]
lab = item.get('pLabel')
uni = item.get('u')
labs[qit] = (lab,uni)
reader = csv.DictReader(open('t.tab', 'r'), delimiter='\t')
for item in reader:
iturl = item.get('g')
git = iturl[iturl.rfind('/')+1:]
if git not in dits:
continue
iturl = item.get('p')
pit = iturl[iturl.rfind('/')+1:]
pl = item.get('pLabel')
lem = item.get('lemma')
print('"","{}","{}","{}","{}"'.format(lem,labs.get(pit),git,pit))
"""
reader = csv.DictReader(open('ttt.csv', 'r'), delimiter=',')
for item in reader:
git = item.get('G')
pit = item.get('P')
lem = item.get('Name')
g = gqs.get(git)
p = pqs.get(pit)
l = []
if g is None:
continue
for glang in g.keys():
if p is None or p.get(glang) is None:
l.append((glang, g.get(glang)))
for link in l:
print('-{}|S{}wiki|""'.format(git, link[0]))
print('{}|Lde|"{}"'.format(pit,lem))
print('{}|Dde|"Protein in Homo sapiens"'.format(pit))
for link in l:
print('{}|S{}wiki|"{}"'.format(pit, link[0], link[1]))
|
StarcoderdataPython
|
12824081
|
<gh_stars>0
import string
import nltk
class Preprocesar:
def __init__(self, corpus):
self.corpus = corpus
def __call__(self, pad='<PAD>'):
"""
Realiza el preproceso del texto para obtener vectores a partir de tokens
a partir del texto, eliminando puntuación y palabras comunes del inglés.
:param corpus: vector de textos
:param pad: valor a utilizar para el padding, el cual se añade al vocabulario
:return: un vector con palabras para cada texto y el vocabulario generado con el ínndice
"""
nltk.download('punkt')
corpus_prep = []
vocab = []
for response in self.corpus:
response_tokenized = nltk.word_tokenize(response)
response_prep = []
# Eliminar puntuacion
for word in response_tokenized:
word = word.lower()
if word not in string.punctuation:
response_prep.append(word)
vocab.append(word)
corpus_prep.append(response_prep)
vocab.append(pad)
vocab = {x: index for index, x in enumerate(set(vocab))}
return corpus_prep, vocab
|
StarcoderdataPython
|
6602331
|
<gh_stars>1-10
#!/usr/bin/python
import re
from optparse import OptionParser
import sys
def fextract(text, start=None, end=None):
"""Return the text between regular expressions start and end."""
if type(text) is list:
text = ''.join(text)
if start is not None:
text = re.split(start, text)[1]
if end is not None:
text = re.split(end, text)[0]
return text
parser=OptionParser()
parser.add_option('-f', '--file', dest='filename', help='Input data file', default=None)
parser.add_option('-o', '--outputfile', dest='output', help='Output data file', default=None)
parser.add_option('-s', '--start', dest='start', help='Start regexp', default=None)
parser.add_option('-e', '--end', dest='end', help='End regexp', default=None)
parser.usage = 'extract.py -s start -e end [options]'
options, args = parser.parse_args(sys.argv[1:])
#print options, args
##if len(args) ==0:
## parser.print_usage()
## sys.exit()
##else:
## start = args[0]
## end = args[0]
if options.filename is not None:
text = open(options.filename,'r').readlines()
else:
text=sys.stdin.readlines()
if options.output is None:
output = sys.stdout
else:
output = open(options.output, 'w')
out = fextract(text, options.start, options.end)
output.writelines(out)
|
StarcoderdataPython
|
3457405
|
<reponame>MosyMosy/VDT
from lab.tsne import plot
# from lab.affines import plot
# from lab.layers import plot
# from lab.learning_curve import plot
# from lab.tsne import plot
# from lab.visual_domain import plot
# from lab.autoencoder import transfer
|
StarcoderdataPython
|
9754602
|
import math
import scipy.sparse as sp
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch_sparse
class SparseDropout(nn.Module):
def __init__(self, p):
super().__init__()
self.p = p
def forward(self, input):
value_dropped = F.dropout(input.storage.value(), self.p, self.training)
return torch_sparse.SparseTensor(
row=input.storage.row(), rowptr=input.storage.rowptr(), col=input.storage.col(),
value=value_dropped, sparse_sizes=input.sparse_sizes(), is_sorted=True)
class MixedDropout(nn.Module):
def __init__(self, p):
super().__init__()
self.dense_dropout = nn.Dropout(p)
self.sparse_dropout = SparseDropout(p)
def forward(self, input):
if isinstance(input, torch_sparse.SparseTensor):
return self.sparse_dropout(input)
else:
return self.dense_dropout(input)
class MixedLinear(nn.Module):
def __init__(self, in_features, out_features, bias=True):
super().__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = nn.Parameter(torch.Tensor(in_features, out_features))
if bias:
self.bias = nn.Parameter(torch.Tensor(out_features))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
# Our fan_in is interpreted by PyTorch as fan_out (swapped dimensions)
nn.init.kaiming_uniform_(self.weight, mode='fan_out', a=math.sqrt(5))
if self.bias is not None:
_, fan_out = nn.init._calculate_fan_in_and_fan_out(self.weight)
bound = 1 / math.sqrt(fan_out)
nn.init.uniform_(self.bias, -bound, bound)
def forward(self, input):
if isinstance(input, torch_sparse.SparseTensor):
res = input.matmul(self.weight)
if self.bias:
res += self.bias[None, :]
else:
if self.bias:
res = torch.addmm(self.bias, input, self.weight)
else:
res = input.matmul(self.weight)
return res
def extra_repr(self):
return 'in_features={}, out_features={}, bias={}'.format(
self.in_features, self.out_features, self.bias is not None)
def matrix_to_torch(X):
if sp.issparse(X):
return torch_sparse.SparseTensor.from_scipy(X)
else:
return torch.FloatTensor(X)
|
StarcoderdataPython
|
8157755
|
<reponame>felix9064/PythonProject
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Topic: 让对象支持上下文管理器,即兼容 with 语句
Desc : 为了让一个对象兼容 with 语句,你需要实现 __enter__() 和 __exit__() 方法
下面的例子是自定义一个网络连接
"""
from socket import socket, AF_INET, SOCK_STREAM
from functools import partial
class LazyConnection:
"""
这个类的关键特点在于它表示了一个网络连接,但是初始化的时候并不会做任何事情
(比如它并没有建立一个连接)。连接的建立和关闭是使用 with 语句自动完成的
"""
def __init__(self, address, family=AF_INET, type=SOCK_STREAM):
self.address = address
self.family = family
self.type = type
self.sock = None
def __enter__(self):
if self.sock is not None:
raise RuntimeError('Already Connected')
self.sock = socket(self.family, self.type)
self.sock.connect(self.address)
return self.sock
def __exit__(self, exc_type, exc_val, exc_tb):
self.sock.close()
self.sock = None
if __name__ == '__main__':
conn = LazyConnection(('www.python.org', 80))
with conn as s:
# conn.__enter__() 会执行,连接建立
s.send(b'GET /index.html HTTP/1.1\r\n')
s.send(b'Host: www.python.org\r\n')
s.send(b'\r\n')
resp = b''.join(iter(partial(s.recv, 8192), b''))
print(resp)
# conn.__exit__() 会执行,连接关闭
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.