blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
459c72af3cd511acf54b8b60834225780fea43e4 | 5a79600f6db7a8c65fa3182f822891d7fd68eeda | /tests/test_gpu_openacc.py | e1fc8aa0bbbf714cbefa9cc3d031f4e9e91790f1 | [
"MIT"
]
| permissive | alisiahkoohi/devito | 867fb05c89f24193951835227abdc271f42cc6e2 | f535a44dff12de2837eb6e3217a65ffb2d371cb8 | refs/heads/master | 2023-03-16T05:50:23.610576 | 2021-05-24T21:49:32 | 2021-05-24T22:21:40 | 128,473,180 | 0 | 0 | MIT | 2023-03-02T12:58:21 | 2018-04-06T21:41:54 | Python | UTF-8 | Python | false | false | 6,509 | py | import pytest
import numpy as np
from conftest import skipif
from devito import Grid, Function, TimeFunction, Eq, Operator, norm, solve
from devito.data import LEFT
from devito.ir.iet import FindNodes, Section, retrieve_iteration_tree
from examples.seismic import TimeAxis, RickerSource, Receiver
class TestCodeGeneration(object):
def test_basic(self):
grid = Grid(shape=(3, 3, 3))
u = TimeFunction(name='u', grid=grid)
op = Operator(Eq(u.forward, u + 1), platform='nvidiaX', language='openacc')
trees = retrieve_iteration_tree(op)
assert len(trees) == 1
assert trees[0][1].pragmas[0].value ==\
'acc parallel loop collapse(3) present(u)'
assert op.body[1].header[0].value ==\
('acc enter data copyin(u[0:u_vec->size[0]]'
'[0:u_vec->size[1]][0:u_vec->size[2]][0:u_vec->size[3]])')
assert str(op.body[1].footer[0]) == ''
assert op.body[1].footer[1].contents[0].value ==\
('acc exit data copyout(u[0:u_vec->size[0]]'
'[0:u_vec->size[1]][0:u_vec->size[2]][0:u_vec->size[3]])')
assert op.body[1].footer[1].contents[1].value ==\
('acc exit data delete(u[0:u_vec->size[0]]'
'[0:u_vec->size[1]][0:u_vec->size[2]][0:u_vec->size[3]])')
def test_streaming_postponed_deletion(self):
grid = Grid(shape=(10, 10, 10))
u = TimeFunction(name='u', grid=grid)
v = TimeFunction(name='v', grid=grid)
usave = TimeFunction(name='usave', grid=grid, save=10)
eqns = [Eq(u.forward, u + usave),
Eq(v.forward, v + u.forward.dx + usave)]
op = Operator(eqns, platform='nvidiaX', language='openacc',
opt=('streaming', 'orchestrate'))
sections = FindNodes(Section).visit(op)
assert len(sections) == 2
assert str(sections[1].body[0].body[0].footer[1]) ==\
('#pragma acc exit data delete(usave[time:1][0:usave_vec->size[1]]'
'[0:usave_vec->size[2]][0:usave_vec->size[3]])')
def test_streaming_with_host_loop(self):
grid = Grid(shape=(10, 10, 10))
f = Function(name='f', grid=grid)
u = TimeFunction(name='u', grid=grid, save=10)
eqns = [Eq(f, u),
Eq(u.forward, f + 1)]
op = Operator(eqns, platform='nvidiaX', language='openacc',
opt=('streaming', 'orchestrate'))
# Check generated code
assert len(op._func_table) == 2
assert 'init_device0' in op._func_table
assert 'prefetch_host_to_device0' in op._func_table
sections = FindNodes(Section).visit(op)
assert len(sections) == 2
s = sections[0].body[0].body[0]
assert str(s.body[3].footer[1]) == ('#pragma acc exit data delete'
'(u[time:1][0:u_vec->size[1]][0:u_vec'
'->size[2]][0:u_vec->size[3]])')
assert str(s.body[2]) == ('#pragma acc data present(u[time:1][0:u_vec->'
'size[1]][0:u_vec->size[2]][0:u_vec->size[3]])')
trees = retrieve_iteration_tree(op)
assert len(trees) == 3
assert 'present(f)' in str(trees[0][1].pragmas[0])
class TestOperator(object):
@skipif('nodevice')
def test_op_apply(self):
grid = Grid(shape=(3, 3, 3))
u = TimeFunction(name='u', grid=grid, dtype=np.int32)
op = Operator(Eq(u.forward, u + 1))
# Make sure we've indeed generated OpenACC code
assert 'acc parallel' in str(op)
time_steps = 1000
op.apply(time_M=time_steps)
assert np.all(np.array(u.data[0, :, :, :]) == time_steps)
@skipif('nodevice')
def test_iso_ac(self):
shape = (101, 101)
extent = (1000, 1000)
origin = (0., 0.)
v = np.empty(shape, dtype=np.float32)
v[:, :51] = 1.5
v[:, 51:] = 2.5
grid = Grid(shape=shape, extent=extent, origin=origin)
t0 = 0.
tn = 1000.
dt = 1.6
time_range = TimeAxis(start=t0, stop=tn, step=dt)
f0 = 0.010
src = RickerSource(name='src', grid=grid, f0=f0,
npoint=1, time_range=time_range)
domain_size = np.array(extent)
src.coordinates.data[0, :] = domain_size*.5
src.coordinates.data[0, -1] = 20.
rec = Receiver(name='rec', grid=grid, npoint=101, time_range=time_range)
rec.coordinates.data[:, 0] = np.linspace(0, domain_size[0], num=101)
rec.coordinates.data[:, 1] = 20.
u = TimeFunction(name="u", grid=grid, time_order=2, space_order=2)
m = Function(name='m', grid=grid)
m.data[:] = 1./(v*v)
pde = m * u.dt2 - u.laplace
stencil = Eq(u.forward, solve(pde, u.forward))
src_term = src.inject(field=u.forward, expr=src * dt**2 / m)
rec_term = rec.interpolate(expr=u.forward)
op = Operator([stencil] + src_term + rec_term)
# Make sure we've indeed generated OpenACC code
assert 'acc parallel' in str(op)
op(time=time_range.num-1, dt=dt)
assert np.isclose(norm(rec), 490.56, atol=1e-2, rtol=0)
class TestMPI(object):
@skipif('nodevice')
@pytest.mark.parallel(mode=2)
def test_basic(self):
grid = Grid(shape=(6, 6))
x, y = grid.dimensions
t = grid.stepping_dim
u = TimeFunction(name='u', grid=grid, space_order=2)
u.data[:] = 1.
expr = u[t, x, y-1] + u[t, x-1, y] + u[t, x, y] + u[t, x, y+1] + u[t, x+1, y]
op = Operator(Eq(u.forward, expr), platform='nvidiaX', language='openacc')
# Make sure we've indeed generated OpenACC+MPI code
assert 'acc parallel' in str(op)
assert len(op._func_table) == 4
op(time_M=1)
glb_pos_map = grid.distributor.glb_pos_map
if LEFT in glb_pos_map[x]:
assert np.all(u.data[0] == [[11., 16., 17., 17., 16., 11.],
[16., 23., 24., 24., 23., 16.],
[17., 24., 25., 25., 24., 17.]])
else:
assert np.all(u.data[0] == [[17., 24., 25., 25., 24., 17.],
[16., 23., 24., 24., 23., 16.],
[11., 16., 17., 17., 16., 11.]])
@skipif('nodevice')
@pytest.mark.parallel(mode=2)
def test_iso_ac(self):
TestOperator().test_iso_ac()
| [
"[email protected]"
]
| |
ac31523ba9787d027e63b488024b15c9e839e46c | f3bd271bf00325881fb5b2533b9ef7f7448a75ec | /classes/_point12.py | 3f67194c74253a3ea60ca8994c2d9259631a918f | []
| no_license | obaica/xcp2k | 7f99fc9d494859e16b9b0ea8e217b0493f4b2f59 | 6e15c2c95658f545102595dc1783f5e03a9e6916 | refs/heads/master | 2020-07-15T17:27:43.378835 | 2019-02-11T16:32:24 | 2019-02-11T16:32:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 396 | py | from xcp2k.inputsection import InputSection
class _point12(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Type = None
self.Atoms = []
self.Weights = []
self.Xyz = None
self._name = "POINT"
self._keywords = {'Xyz': 'XYZ', 'Type': 'TYPE'}
self._repeated_keywords = {'Weights': 'WEIGHTS', 'Atoms': 'ATOMS'}
| [
"[email protected]"
]
| |
332064ba5922ff92b2319eb3b292136ddec583f8 | 3fcc7957ed103ead0db8d4e6020c52403559e63b | /1557.py | 04bb7afeb9d7032e1dee17c65612b5604da1c506 | []
| no_license | gabrielreiss/URI | db3082bd89832bb4f45d2375db376454c2ff8f27 | 01bc927d1eee8eb16a16de786e981faa494088e8 | refs/heads/master | 2022-04-22T08:25:28.855996 | 2020-04-15T14:58:47 | 2020-04-15T14:58:47 | 255,950,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 593 | py | cont = False
while cont == False:
n = int(input())
if n == 0:
cont = True
else:
m = []
w, h = n, n
m = [[0 for x in range(w)] for y in range(h)]
for i in range(0, n):
for j in range(0, n):
m[i][j] = 2 ** (i+j)
T = len(str(m[n-1][n-1]))
for i in range(n):
for j in range(n):
m[i][j] = str(m[i][j])
while len(m[i][j]) < T:
m[i][j] = ' ' + m[i][j]
M = ' '.join(m[i])
print(M)
print()
| [
"[email protected]"
]
| |
6c050c0d77f4e5d5ec77c6bef6bca2540f25d9b6 | 461052f4a7197db023ad3deb864bf1784fdd7854 | /library/migrations/0003_auto_20200513_1625.py | 451151225554e0605b2693ef162763660f71eb46 | [
"MIT"
]
| permissive | ArRosid/training_drf | 1660a08272c09302b39adc8e19e3674a78863685 | 4369c8113a67bb3f18b6890210902f09d617569f | refs/heads/master | 2022-06-20T02:03:49.373355 | 2020-05-13T16:38:03 | 2020-05-13T16:38:03 | 263,639,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,060 | py | # Generated by Django 3.0.6 on 2020-05-13 16:25
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('library', '0002_book_modified_by'),
]
operations = [
migrations.AddField(
model_name='book',
name='deleted_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='book_deleted_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='book',
name='is_deleted',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='book',
name='modified_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='book_modified_by', to=settings.AUTH_USER_MODEL),
),
]
| [
"[email protected]"
]
| |
6cfcbb1a68c162aaf5754e4f590f3db98c8850b8 | ba48780406fd3c04ff7efbd60c8c477a3aaa0f27 | /src2/cv1/try_aux_freq.py | f9d89826ad6a3bea6a4fc559e1a63429ef74ce5e | []
| no_license | umpot/quora | 635d37f1602981d63cc50b5a8070297dce59c19a | ac10e6cd4e396c8b5958371f8e537e671067fd38 | refs/heads/master | 2020-12-30T13:46:07.614164 | 2017-09-18T10:22:07 | 2017-09-18T10:22:07 | 91,250,042 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,817 | py | import pandas as pd
import numpy as np
import seaborn as sns
import re
import os
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
sns.set(color_codes=True)
sns.set(style="whitegrid", color_codes=True)
pd.set_option('display.max_columns', 500)
pd.set_option('display.width', 5000)
pd.set_option('display.max_rows', 5000)
pd.set_option('display.max_colwidth', 100)
TARGET = 'is_duplicate'
qid1, qid2 = 'qid1', 'qid2'
question1, question2 = 'question1', 'question2'
lemmas_q1, lemmas_q2 = 'lemmas_q1', 'lemmas_q2'
stems_q1, stems_q2 = 'stems_q1', 'stems_q2'
tokens_q1, tokens_q2 = 'tokens_q1', 'tokens_q2'
ner_q1, ner_q2='ner_q1', 'ner_q2'
postag_q1, postag_q2='postag_q1', 'postag_q2'
data_folder = '../../../data/'
fp_train = data_folder + 'train.csv'
fp_test = data_folder + 'test.csv'
lemmas_train_fp = os.path.join(data_folder,'nlp','lemmas_train.csv')
lemmas_test_fp = os.path.join(data_folder,'nlp','lemmas_test.csv')
tokens_train_fp = os.path.join(data_folder,'nlp','tokens_train.csv')
tokens_test_fp = os.path.join(data_folder,'nlp','tokens_test.csv')
postag_train_fp = os.path.join(data_folder,'nlp','postag_train.csv')
postag_test_fp = os.path.join(data_folder,'nlp','postag_test.csv')
ner_train_fp = os.path.join(data_folder,'nlp','ner_train.csv')
ner_test_fp = os.path.join(data_folder,'nlp','ner_test.csv')
stems_train_fp = os.path.join(data_folder,'nlp','stems_train.csv')
stems_test_fp = os.path.join(data_folder,'nlp','stems_test.csv')
tfidf_with_stops_train_fp = os.path.join(data_folder,'tfidf','old' ,'tokens_with_stop_words_tfidf_train.csv')
tfidf_with_stops_test_fp = os.path.join(data_folder,'tfidf','old','tokens_with_stop_words_tfidf_test.csv')
magic_train_fp=os.path.join(data_folder, 'magic', 'magic_train.csv')
magic_test_fp=os.path.join(data_folder, 'magic', 'magic_test.csv')
magic2_train_fp = os.path.join(data_folder, 'magic', 'magic2_train.csv')
magic2_test_fp = os.path.join(data_folder, 'magic', 'magic2_test.csv')
common_words_train_fp = os.path.join(data_folder, 'basic','common_words_train.csv')
length_train_fp = os.path.join(data_folder, 'basic','lens_train.csv')
common_words_test_fp = os.path.join(data_folder, 'basic','common_words_test.csv')
length_test_fp = os.path.join(data_folder, 'basic','lens_test.csv')
TRAIN_METRICS_FP = [
data_folder + 'distances/'+ 'train_metrics_bool_lemmas.csv',
data_folder + 'distances/'+'train_metrics_bool_stems.csv',
data_folder + 'distances/'+'train_metrics_bool_tokens.csv',
data_folder + 'distances/'+'train_metrics_fuzzy_lemmas.csv',
data_folder + 'distances/'+'train_metrics_fuzzy_stems.csv',
data_folder + 'distances/'+'train_metrics_fuzzy_tokens.csv',
data_folder + 'distances/'+'train_metrics_sequence_lemmas.csv',
data_folder + 'distances/'+'train_metrics_sequence_stems.csv',
data_folder + 'distances/'+'train_metrics_sequence_tokens.csv'
]
TEST_METRICS_FP = [
data_folder + 'distances/'+ 'test_metrics_bool_lemmas.csv',
data_folder + 'distances/'+'test_metrics_bool_stems.csv',
data_folder + 'distances/'+'test_metrics_bool_tokens.csv',
data_folder + 'distances/'+'test_metrics_fuzzy_lemmas.csv',
data_folder + 'distances/'+'test_metrics_fuzzy_stems.csv',
data_folder + 'distances/'+'test_metrics_fuzzy_tokens.csv',
data_folder + 'distances/'+'test_metrics_sequence_lemmas.csv',
data_folder + 'distances/'+'test_metrics_sequence_stems.csv',
data_folder + 'distances/'+'test_metrics_sequence_tokens.csv'
]
trash_cols = [
"w_share_ratio_2_std_idf_dirty_lower_no_stops",
"w_share_ratio_2_smooth_idf_dirty_upper",
"w_share_ratio_2_std_idf_tokens_lower_no_stops",
"abi_jaccard_distance",
"len_char_diff_log",
"len_word_diff_log",
"len_word_expt_stop_diff_log",
"stop_words_num_q1",
"stop_words_num_q2",
"lemmas_kulsinski",
"lemmas_dice",
"lemmas_jaccard",
"stems_kulsinski",
"stems_dice",
"stems_jaccard",
"tokens_dice",
"tokens_jaccard",
"lemmas_partial_token_set_ratio",
"stems_partial_token_set_ratio",
"tokens_partial_token_set_ratio",
"lemmas_distance.jaccard",
"stems_distance.jaccard",
"tokens_distance.jaccard",
"w_share_ratio_2_smooth_idf_dirty_lower_no_stops",
"w_share_ratio_2_std_idf_dirty_upper",
"w_share_ratio_2_smooth_idf_tokens_lower",
"w_share_ratio_2_std_idf_tokens_lower",
"w_share_ratio_2_smooth_idf_tokens_lower_no_stops"
]
def del_trash_cols(df):
for col in trash_cols:
if col in df:
del df[col]
def load_train():
return pd.read_csv(fp_train, index_col='id')
def load_test():
return pd.read_csv(fp_test, index_col='test_id')
def load__train_metrics():
dfs = [pd.read_csv(fp, index_col='id') for fp in TRAIN_METRICS_FP]
return pd.concat(dfs, axis=1)
def load__test_metrics():
dfs = [pd.read_csv(fp, index_col='test_id') for fp in TEST_METRICS_FP]
return pd.concat(dfs, axis=1)
def load_train_all():
return pd.concat([
load_train(),
load_train_lemmas(),
load_train_stems(),
load_train_tokens(),
load_train_lengths(),
load_train_common_words(),
load__train_metrics(),
load_train_tfidf()
], axis=1)
def load_train_nlp():
return pd.concat([
load_train(),
load_train_postag(),
load_train_lemmas(),
load_train_stems(),
load_train_tokens(),
load_train_ner()
], axis=1)
def load_test_nlp():
return pd.concat([
load_test(),
load_test_postag(),
load_test_lemmas(),
load_test_stems(),
load_test_tokens(),
load_test_ner()
], axis=1)
def load_test_all():
return pd.concat([
load_test(),
load_test_lemmas(),
load_test_stems(),
load_test_tokens(),
load_test_lengths(),
load_test_common_words(),
load__test_metrics(),
load_test_tfidf()
], axis=1)
def load_train_test():
return pd.read_csv(fp_train, index_col='id'), pd.read_csv(fp_test, index_col='test_id')
def load_train_lemmas():
df = pd.read_csv(lemmas_train_fp, index_col='id')
df = df.fillna('')
for col in [lemmas_q1, lemmas_q2]:
df[col]=df[col].apply(str)
return df
def load_test_lemmas():
df = pd.read_csv(lemmas_test_fp, index_col='test_id')
df = df.fillna('')
for col in [lemmas_q1, lemmas_q2]:
df[col]=df[col].apply(str)
return df
def load_train_tfidf():
df = pd.read_csv(tfidf_with_stops_train_fp, index_col='id')
return df
def load_test_tfidf():
df = pd.read_csv(tfidf_with_stops_test_fp, index_col='test_id')
return df
def load_train_tokens():
df = pd.read_csv(tokens_train_fp, index_col='id')
df = df.fillna('')
return df
def load_test_tokens():
df = pd.read_csv(tokens_test_fp, index_col='test_id')
df = df.fillna('')
return df
def load_train_postag():
df = pd.read_csv(postag_train_fp, index_col='id')
return df
def load_test_postag():
df = pd.read_csv(postag_test_fp, index_col='test_id')
return df
def load_train_ner():
df = pd.read_csv(ner_train_fp, index_col='id')
return df
def load_test_ner():
df = pd.read_csv(ner_test_fp, index_col='test_id')
return df
def load_train_magic():
df = pd.concat([
pd.read_csv(magic_train_fp, index_col='id')[['freq_question1', 'freq_question2']],
pd.read_csv(magic2_train_fp, index_col='id')],
axis=1
)
return df
def load_test_magic():
df = pd.concat([
pd.read_csv(magic_test_fp, index_col='test_id')[['freq_question1', 'freq_question2']],
pd.read_csv(magic2_test_fp, index_col='test_id')],
axis=1
)
return df
def load_train_stems():
df = pd.read_csv(stems_train_fp, index_col='id')
df = df[['question1_porter', 'question2_porter']]
df = df.rename(columns={'question1_porter': 'stems_q1', 'question2_porter': 'stems_q2'})
df = df.fillna('')
for col in [stems_q1, stems_q2]:
df[col]=df[col].apply(str)
return df
def load_test_stems():
df = pd.read_csv(stems_test_fp, index_col='test_id')
df = df[['question1_porter', 'question2_porter']]
df = df.rename(columns={'question1_porter': 'stems_q1', 'question2_porter': 'stems_q2'})
df = df.fillna('')
for col in [stems_q1, stems_q2]:
df[col]=df[col].apply(str)
return df
def load_train_common_words():
df = pd.read_csv(common_words_train_fp, index_col='id')
return df
def load_test_common_words():
df = pd.read_csv(common_words_test_fp, index_col='test_id')
return df
def load_train_lengths():
df = pd.read_csv(length_train_fp, index_col='id')
return df
def load_test_lengths():
df = pd.read_csv(length_test_fp, index_col='test_id')
return df
def shuffle_df(df, random_state=42):
np.random.seed(random_state)
return df.iloc[np.random.permutation(len(df))]
def explore_target_ratio(df):
return {
'pos':1.0*len(df[df[TARGET]==1])/len(df),
'neg':1.0*len(df[df[TARGET]==0])/len(df)
}
# df = load_train_all()
######################################################################################
######################################################################################
######################################################################################
######################################################################################
#WH
wh_fp_train=os.path.join(data_folder, 'wh', 'wh_train.csv')
wh_fp_test=os.path.join(data_folder, 'wh', 'wh_test.csv')
def load_wh_train():
df = pd.read_csv(wh_fp_train, index_col='id')
return df
def load_wh_test():
df = pd.read_csv(wh_fp_test, index_col='test_id')
return df
######################################################################################
######################################################################################
######################################################################################
######################################################################################
######################################################################################
######################################################################################
######################################################################################
######################################################################################
upper_keywords_fp_train=os.path.join(data_folder, 'keywords', 'train_upper.csv')
upper_keywords_test=os.path.join(data_folder, 'keywords', 'test_upper.csv')
def load_upper_keywords_train():
df = pd.read_csv(upper_keywords_fp_train, index_col='id')
return df
def load_upper_keywords_test():
df = pd.read_csv(upper_keywords_test, index_col='test_id')
return df
######################################################################################
######################################################################################
######################################################################################
######################################################################################
######################################################################################
######################################################################################
######################################################################################
######################################################################################
one_upper_fp_train=os.path.join(data_folder, 'keywords', 'train_upper_freq_200.csv')
one_upper_fp_test=os.path.join(data_folder, 'keywords', 'test_upper_freq_200.csv')
def load_one_upper_train():
df = pd.read_csv(one_upper_fp_train, index_col='id')
return df
def load_one_upper_test():
df = pd.read_csv(one_upper_fp_test, index_col='test_id')
return df
######################################################################################
######################################################################################
######################################################################################
######################################################################################
import pandas as pd
import numpy as np
TARGET = 'is_duplicate'
INDEX_PREFIX= 100000000
#old
{'pos': 0.369197853026293,
'neg': 0.630802146973707}
#new
r1 = 0.174264424749
r0 = 0.825754788586
""""
p_old/(1+delta) = p_new
delta = (p_old/p_new)-1 = 1.1186071314214785
l = delta*N = 452241
"""
delta = 1.1186071314214785
def explore_target_ratio(df):
return {
'pos':1.0*len(df[df[TARGET]==1])/len(df),
'neg':1.0*len(df[df[TARGET]==0])/len(df)
}
def shuffle_df(df, random_state):
np.random.seed(random_state)
return df.iloc[np.random.permutation(len(df))]
def oversample_df(df, l, random_state):
df_pos = df[df[TARGET]==1]
df_neg = df[df[TARGET]==0]
df_neg_sampl = df_neg.sample(l, random_state=random_state, replace=True)
df=pd.concat([df_pos, df_neg, df_neg_sampl])
df = shuffle_df(df, random_state)
return df
def oversample(train_df, test_df, random_state=42):
l_train = int(delta * len(train_df))
l_test = int(delta * len(test_df))
return oversample_df(train_df, l_train, random_state), oversample_df(test_df, l_test, random_state)
############################################################3
############################################################3
############################################################3
train_avg_tokK_freq_fp=os.path.join(data_folder, 'top_k_freq', 'train_avg_K_tok_freq.csv')
test_avg_tokK_freq_fp=os.path.join(data_folder, 'top_k_freq', 'test_avg_K_tok_freq.csv')
def load_topNs_avg_tok_freq_train():
return pd.read_csv(train_avg_tokK_freq_fp, index_col='id')
def load_topNs_avg_tok_freq_test():
return pd.read_csv(test_avg_tokK_freq_fp, index_col='test_id')
############################################################3
############################################################3
############################################################3
abi_train_fp = os.path.join(data_folder, 'abishek', 'abi_train.csv')
abi_test_fp = os.path.join(data_folder, 'abishek', 'abi_test.csv')
def load_abi_train():
return pd.read_csv(abi_train_fp, index_col='id')
def load_abi_test():
return pd.read_csv(abi_test_fp, index_col='test_id')
############################################################3
############################################################3
############################################################3
max_k_cores_train_fp=os.path.join(data_folder,'magic' ,'max_k_cores_train.csv')
max_k_cores_test_fp=os.path.join(data_folder,'magic' ,'max_k_cores_test.csv')
def load_max_k_cores_train():
return pd.read_csv(max_k_cores_train_fp, index_col='id')
def load_max_k_cores_test():
return pd.read_csv(max_k_cores_test_fp, index_col='test_id')
############################################################3
############################################################3
############################################################3
glove_train_fp = os.path.join(data_folder, 'embeddings', 'glove_train.csv')
glove_test_fp = os.path.join(data_folder, 'embeddings', 'glove_test.csv')
def load_glove_metrics_train():
return pd.read_csv(glove_train_fp, index_col='id')
def load_glove_metrics_test():
return pd.read_csv(glove_test_fp, index_col='test_id')
############################################################3
############################################################3
############################################################3
lex_train_fp = os.path.join(data_folder, 'embeddings', 'lex_train.csv')
lex_test_fp = os.path.join(data_folder, 'embeddings', 'lex_test.csv')
def load_lex_metrics_train():
return pd.read_csv(lex_train_fp, index_col='id')
def load_lex_metrics_test():
return pd.read_csv(lex_test_fp, index_col='test_id')
############################################################3
############################################################3
############################################################3
word2vec_train_fp = os.path.join(data_folder, 'embeddings', 'word2vec_train.csv')
word2vec_test_fp = os.path.join(data_folder, 'embeddings', 'word2vec_test.csv')
def load_word2vec_metrics_train():
return pd.read_csv(word2vec_train_fp, index_col='id')
def load_word2vec_metrics_test():
return pd.read_csv(word2vec_test_fp, index_col='test_id')
############################################################3
############################################################3
############################################################3
embedings_list=['word2vec', 'glove', 'lex']
column_types = ['tokens', 'lemmas']
kur_pairs=[
('kur_q1vec_{}_{}'.format(col_type,emb), 'kur_q2vec_{}_{}'.format(col_type,emb))
for col_type in column_types for emb in embedings_list
]
skew_pairs=[
('skew_q1vec_{}_{}'.format(col_type,emb), 'skew_q2vec_{}_{}'.format(col_type,emb))
for col_type in column_types for emb in embedings_list
]
def add_kur_combinations(df):
for col1, col2 in kur_pairs+skew_pairs:
name = col1.replace('q1', '')
df['{}_abs_diff'.format(name)]=np.abs(df[col1]-df[col2])
df['{}_1div2_ratio'.format(name)]= df[col1]/df[col2]
df['{}_log_ratio'.format(name)]= np.abs(np.log(df[col1]/df[col2]))
df['{}_q1_ratio'.format(name)]=df[col1]/(df[col1]+df[col2])
df['{}_q2_ratio'.format(name)]=df[col2]/(df[col1]+df[col2])
############################################################3
############################################################3
############################################################3
aux_pairs_50_train_fp = os.path.join(data_folder, 'aux_pron', 'aux_pairs_50_train.csv')
aux_pairs_50_test_fp = os.path.join(data_folder, 'aux_pron', 'aux_pairs_50_test.csv')
def load_aux_pairs_50_train():
return pd.read_csv(aux_pairs_50_train_fp, index_col='id')
def load_aux_pairs_50_test():
return pd.read_csv(aux_pairs_50_test_fp, index_col='test_id')
############################################################3
############################################################3
############################################################3
train_pos_metrics_fp=os.path.join(data_folder, 'pos_metrics', 'train_pos_metrics.csv')
test_pos_metrics_fp=os.path.join(data_folder, 'pos_metrics', 'test_pos_metrics.csv')
def load_metrics_on_pos_train():
return pd.read_csv(train_pos_metrics_fp, index_col='id')
def load_metrics_on_pos_test():
return pd.read_csv(train_pos_metrics_fp, index_col='test_id')
############################################################3
############################################################3
############################################################3
import xgboost as xgb
import matplotlib.pyplot as plt
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import log_loss
import json
from time import sleep
import traceback
gc_host = '104.197.97.20'
local_host = '10.20.0.144'
user='ubik'
password='nfrf[eqyz'
def load_train_all_xgb():
train_df = pd.concat([
load_train(),
load_train_lengths(),
load_train_common_words(),
load__train_metrics(),
load_train_tfidf(),
load_train_magic(),
load_wh_train(),
load_one_upper_train(),
load_topNs_avg_tok_freq_train(),
load_abi_train(),
load_max_k_cores_train(),
load_word2vec_metrics_train(),
load_glove_metrics_train(),
load_lex_metrics_train(),
load_metrics_on_pos_train(),
load_aux_pairs_50_train()
# load_upper_keywords_train()
], axis=1)
cols_to_del = [qid1, qid2, question1, question2]
for col in cols_to_del:
del train_df[col]
return train_df
# def load_test_all_xgb():
# test_df = pd.concat([
# load_test_lengths(),
# load_test_common_words(),
# load__test_metrics(),
# load_train_tfidf(),
# load_test_magic(),
# load_wh_test(),
# load_one_upper_test(),
# load_topNs_avg_tok_freq_test(),
# # load_abi_test(),
# load_max_k_cores_test(),
# load_word2vec_metrics_test(),
# load_glove_metrics_test(),
# load_lex_metrics_test(),
# load_metrics_on_pos_train()
# ], axis=1)
#
#
# return test_df
def plot_errors(imp):
train_runs= [x['train'] for x in imp]
test_runs= [x['test'] for x in imp]
sz=len(train_runs[0])
x_axis=range(sz)
y_train = [np.mean([x[j] for x in train_runs]) for j in x_axis]
y_test = [np.mean([x[j] for x in test_runs]) for j in x_axis]
fig, ax = plt.subplots()
ax.plot(x_axis, y_train, label='train')
ax.plot(x_axis, y_test, label='test')
ax.legend()
plt.show()
def xgboost_per_tree_results(estimator):
results_on_test = estimator.evals_result()['validation_1']['logloss']
results_on_train = estimator.evals_result()['validation_0']['logloss']
return {
'train': results_on_train,
'test': results_on_test
}
def out_loss(loss):
print '====================================='
print '====================================='
print '====================================='
print loss
print '====================================='
print '====================================='
print '====================================='
def write_results(name,mongo_host, per_tree_res, losses, imp, features):
from pymongo import MongoClient
imp=[x.item() for x in imp]
features=list(features)
client = MongoClient(mongo_host, 27017)
client['admin'].authenticate(user, password)
db = client['xgb_cv']
collection = db[name]
try:
collection.insert_one({
'results': per_tree_res,
'losses': losses,
'importance':imp,
'features':features
})
except:
print 'error in mongo'
traceback.print_exc()
raise
# sleep(20)
def perform_xgb_cv(name, mongo_host):
df = load_train_all_xgb()
del_trash_cols(df)
add_kur_combinations(df)
folds =5
seed = 42
skf = StratifiedKFold(n_splits=folds, shuffle=True, random_state=seed)
losses = []
n_est=[]
counter = 0
for big_ind, small_ind in skf.split(np.zeros(len(df)), df[TARGET]):
big = df.iloc[big_ind]
small = df.iloc[small_ind]
print explore_target_ratio(big)
print explore_target_ratio(small)
big, small = oversample(big, small, seed)
print explore_target_ratio(big)
print explore_target_ratio(small)
train_target = big[TARGET]
del big[TARGET]
train_arr = big
test_target = small[TARGET]
del small[TARGET]
test_arr = small
# estimator = xgb.XGBClassifier(n_estimators=10000,
# subsample=0.6,
# # colsample_bytree=0.8,
# max_depth=7,
# objective='binary:logistic',
# learning_rate=0.02,
# base_score=0.2)
estimator = xgb.XGBClassifier(n_estimators=10000,
subsample=0.8,
colsample_bytree=0.8,
max_depth=5,
objective='binary:logistic',
nthread=-1
)
print test_arr.columns.values
print len(train_arr)
print len(test_arr)
eval_set = [(train_arr, train_target), (test_arr, test_target)]
estimator.fit(
train_arr, train_target,
eval_set=eval_set,
eval_metric='logloss',
verbose=True,
early_stopping_rounds=150
)
proba = estimator.predict_proba(test_arr)
loss = log_loss(test_target, proba)
out_loss(loss)
losses.append({'loss':loss, 'best_score':estimator.best_score, 'best_iteration':estimator.best_iteration})
per_tree_res = xgboost_per_tree_results(estimator)
ii = estimator.feature_importances_
n_est.append(estimator.best_iteration)
# xgb.plot_importance(estimator)
# plot_errors(stats)
write_results(name, mongo_host, per_tree_res, losses, ii, train_arr.columns)
out_loss('avg = {}'.format(np.mean(losses)))
name='try_aux_freq'
perform_xgb_cv(name, gc_host)
print '============================'
print 'DONE!'
print '============================'
| [
"[email protected]"
]
| |
dd258e1388ef102e9d77f492101ef00bda3bda1f | 0dc67428c50acf9dea7c17da9c603169a05e201c | /customer/urls.py | b58afe08ab753951ca3b7f89accd78318da1be54 | []
| no_license | sherrywilly/Razorpay | fe9a48ca9a9dd1d7d59ad959535e7ae2e6045305 | 3fe96ff7d6e988b3c276950e0615c0a4eeb1da8e | refs/heads/master | 2023-07-02T11:37:55.951514 | 2021-07-29T04:36:19 | 2021-07-29T04:36:19 | 390,238,480 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 978 | py | from django.urls import path
from .views import completed, create_contacts, create_fund_account, create_payout, index,payment, refund,verifyPayment
from .webhooks import VerifyPayHook
urlpatterns = [
path('',index,name="index"),
path('payment/continue/',payment,name="pay"),
path('handlerequest/',verifyPayment,name="verify"),
path('payment/<payid>/refund/',refund,name="refund"),
path('payments',completed),
# path('payment/refund/',refund,name="refund"),
path('payouts/<int:pk>/add_contact/',create_contacts,name="create"),
path('payouts/<int:id>/add_bank/',create_fund_account,name="create_bank"),
path('payouts/<int:id>/pay/',create_payout,name="create"),
# path('payouts/<int:id>/pay/',create_payout,name="create"),
#####################!-------------- HOOK URLS ----------------##########################
path('hooks/verify/',VerifyPayHook.as_view()),
# path('hooks/verify/refund/',VerifyRefundHook.as_view())
]
| [
"[email protected]"
]
| |
d80456331b4a047786914c0b00ae1b4e517dc147 | 3f06e7ae747e935f7a2d1e1bae27a764c36a77d1 | /day23.py | 28136ee107dd5557680c2c853d2ec3f553c3faa0 | []
| no_license | mn113/adventofcode2016 | 94465f36c46e9aa21d879d82e043e1db8c55c9da | 3a93b23519acbfe326b8bf7c056f1747bbea036a | refs/heads/master | 2022-12-11T22:57:21.937221 | 2022-12-04T16:37:24 | 2022-12-04T16:37:24 | 75,545,017 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,572 | py | #! /usr/bin/env python
# Find result of long-running instruction set
import sys
import time
def intOrRegisterValue(x):
if x in 'abcd':
return registers[x]
else:
return int(x)
registers = {x:0 for x in 'abcd'}
registers['a'] = 7 # Part 1
registers['a'] = 12 # Part 2
print "Start:" + str(registers)
with open('day23_input.txt') as fp:
lines = fp.readlines()
l = len(lines)
i = 0
while i < l:
line = lines[i]
sys.stdout.write(str(registers) + ' : ' + str(i) + ' : ' + line)
sys.stdout.write('\r')
sys.stdout.flush()
#print registers
#print i, '/', l, ':', line
words = line.split()
if words[0] == 'tgl':
toggleDist = intOrRegisterValue(words[1])
# Convert to integer:
j = i + toggleDist
if j < 0 or j >= l:
# Out of range, start next loop immediately
i = i + 1
continue
elif lines[j][:3] == 'inc': # Toggle inc to dec
lines[j] = 'dec' + lines[j][3:]
elif lines[j][:3] == 'dec': # Toggle dec to inc
lines[j] = 'inc' + lines[j][3:]
elif lines[j][:3] == 'tgl': # Toggle tgl to inc
lines[j] = 'inc' + lines[j][3:]
elif lines[j][:3] == 'jnz': # Toggle jnz to cpy
lines[j] = 'cpy' + lines[j][3:]
else:
# cpy doesn't change when toggled
pass
print "Altered", j, lines[j]
elif words[0] == 'inc':
reg = words[1]
registers[reg] = registers[reg] + 1
elif words[0] == 'dec':
reg = words[1]
registers[reg] = registers[reg] - 1
elif words[0] == 'cpy':
src = words[1]
dest = words[2]
# Copy register?
registers[dest] = intOrRegisterValue(src)
elif words[0] == 'jnz':
# Test:
to_test = intOrRegisterValue(words[1])
jumpval = intOrRegisterValue(words[2])
if to_test != 0:
i = i + jumpval
continue # start next loop immediately
elif words[0] == 'ADD':
times = 1
if len(words) > 3:
times = intOrRegisterValue(words[3])
registers[words[2]] = registers[words[2]] + times * registers[words[1]]
elif words[0] == 'ZER':
registers[words[1]] = 0
i = i + 1
time.sleep(0.0)
print "---"
print registers
| [
"[email protected]"
]
| |
43125388e7f13fb3f397da7be3da1133ae9fbb3d | 0b01cb61a4ae4ae236a354cbfa23064e9057e434 | /alipay/aop/api/response/KoubeiServindustryPortfolioDataCreateResponse.py | e5a08c2fa59d81fa90b88ce8d9d521a12247d995 | [
"Apache-2.0"
]
| permissive | hipacloud/alipay-sdk-python-all | e4aec2869bf1ea6f7c6fb97ac7cc724be44ecd13 | bdbffbc6d5c7a0a3dd9db69c99443f98aecf907d | refs/heads/master | 2022-11-14T11:12:24.441822 | 2020-07-14T03:12:15 | 2020-07-14T03:12:15 | 277,970,730 | 0 | 0 | Apache-2.0 | 2020-07-08T02:33:15 | 2020-07-08T02:33:14 | null | UTF-8 | Python | false | false | 778 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class KoubeiServindustryPortfolioDataCreateResponse(AlipayResponse):
def __init__(self):
super(KoubeiServindustryPortfolioDataCreateResponse, self).__init__()
self._portfolio_id = None
@property
def portfolio_id(self):
return self._portfolio_id
@portfolio_id.setter
def portfolio_id(self, value):
self._portfolio_id = value
def parse_response_content(self, response_content):
response = super(KoubeiServindustryPortfolioDataCreateResponse, self).parse_response_content(response_content)
if 'portfolio_id' in response:
self.portfolio_id = response['portfolio_id']
| [
"[email protected]"
]
| |
2e772f55c2c27f68f6544766d08317741c917946 | 0529196c4d0f8ac25afa8d657413d4fc1e6dd241 | /runnie0427/17675/17675.pypy3.py | 7063ffb72b39e59872c20046c6617499be8e6fde | []
| no_license | riyuna/boj | af9e1054737816ec64cbef5df4927c749808d04e | 06420dd38d4ac8e7faa9e26172b30c9a3d4e7f91 | refs/heads/master | 2023-03-17T17:47:37.198570 | 2021-03-09T06:11:41 | 2021-03-09T06:11:41 | 345,656,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,372 | py | <!DOCTYPE html>
<html lang="ko">
<head>
<title>Baekjoon Online Judge</title><meta name="viewport" content="width=device-width, initial-scale=1.0"><meta charset="utf-8"><meta name="author" content="스타트링크 (Startlink)"><meta name="keywords" content="ACM-ICPC, ICPC, 프로그래밍, 온라인 저지, 정보올림피아드, 코딩, 알고리즘, 대회, 올림피아드, 자료구조"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta property="og:type" content="website"><meta property="og:image" content="http://onlinejudgeimages.s3-ap-northeast-1.amazonaws.com/images/boj-og-1200.png"><meta property="og:site_name" content="Baekjoon Online Judge"><meta name="format-detection" content = "telephone=no"><meta name="msapplication-config" content="none"><link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png"><link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png"><link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png"><link rel="manifest" href="/site.webmanifest"><link rel="mask-icon" href="/safari-pinned-tab.svg" color="#0076c0"><meta name="msapplication-TileColor" content="#00aba9"><meta name="theme-color" content="#ffffff"><link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.2.0/css/bootstrap.min.css"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/style.css?version=20210107"><link href="https://fonts.googleapis.com/css?family=Noto+Sans+KR:400,700|Open+Sans:400,400i,700,700i|Source+Code+Pro&subset=korean" rel="stylesheet"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/css/connect.css?version=20210107"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/css/result.css?version=20210107"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/custom.css?version=20210107"><link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.6.3/css/font-awesome.css"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/theme-colors/blue.css?version=20210107"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/css/pace.css">
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-10874097-3"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'UA-10874097-3');
</script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/noty/3.1.4/noty.min.css" /><meta name="username" content="">
<link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/pages/page_404_error.css">
</head>
<body>
<div class="wrapper">
<div class="header no-print"><div class="topbar"><div class="container"><ul class="loginbar pull-right"><li><a href = "/register">회원가입</a></li><li class="topbar-devider"></li><li><a href = "/login?next=%2Fsource%2Fdownload%2F22645369">로그인</a></li></ul></div></div><div class="navbar navbar-default mega-menu" role="navigation"><div class="container"><div class="navbar-header"><button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-responsive-collapse"><span class="sr-only">Toggle navigation</span><span class="fa fa-bars"></span></button><a class="navbar-brand" href="/"><img id="logo-header" src="https://d2gd6pc034wcta.cloudfront.net/images/[email protected]" alt="Logo" data-retina></a></div><div class="collapse navbar-collapse navbar-responsive-collapse"><ul class="nav navbar-nav"><li class="dropdown mega-menu-fullwidth "><a href="javascript:void(0);" class="dropdown-toggle" data-toggle="dropdown">문제</a><ul class="dropdown-menu"><li><div class="mega-menu-content"><div class="container"><div class="row equal-height"><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>문제</h3></li><li><a href = "/problemset">전체 문제</a></li><li><a href = "/category">문제 출처</a></li><li><a href = "/step">단계별로 풀어보기</a></li><li><a href = "/problem/tags">알고리즘 분류</a></li><li><a href = "/problem/added">새로 추가된 문제</a></li><li><a href = "/problem/added/1">새로 추가된 영어 문제</a></li><li><a href = "/problem/ranking">문제 순위</a></li></ul></div><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>문제</h3></li><li><a href="/problem/only">푼 사람이 한 명인 문제</a></li><li><a href="/problem/nobody">아무도 못 푼 문제</a></li><li><a href="/problem/recent/submit">최근 제출된 문제</a></li><li><a href="/problem/recent/accepted">최근 풀린 문제</a></li><li><a href="/problem/random">랜덤</a></li></ul></div><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>출처</h3></li><li><a href = "/category/1">ICPC</a></li><li><a href = "/category/2">Olympiad</a></li><li><a href = "/category/55">한국정보올림피아드</a></li><li><a href = "/category/57">한국정보올림피아드시․도지역본선</a></li><li><a href = "/category/318">전국 대학생 프로그래밍 대회 동아리 연합</a></li><li><a href = "/category/5">대학교 대회</a></li><li><a href = "/category/428">카카오 코드 페스티벌</a></li><li><a href = "/category/215">Coder's High</a></li></ul></div><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>ICPC</h3></li><li><a href = "/category/7">Regionals</a></li><li><a href = "/category/4">World Finals</a></li><li><a href = "/category/211">Korea Regional</a></li><li><a href = "/category/34">Africa and the Middle East Regionals</a></li><li><a href = "/category/10">Europe Regionals</a></li><li><a href = "/category/103">Latin America Regionals</a></li><li><a href = "/category/8">North America Regionals</a></li><li><a href = "/category/92">South Pacific Regionals</a></li></ul></div></div></div></div></li></ul></li><li><a href = "/workbook/top">문제집</a></li><li><a href = "/contest/official/list">대회<span class='badge badge-red rounded-2x'>2</span></a></li><li><a href = "/status">채점 현황</a></li><li><a href = "/ranklist">랭킹</a></li><li><a href = "/board/list/all">게시판</a></li><li><a href = "/group/list/all">그룹</a></li><li><a href = "/blog/list">블로그</a></li><li><a href = "/lectures">강의</a></li><li><a href = "/search"><i class="fa fa-search search-btn"></i></a></li></ul></div></div></div></div><form action="/logout" method="post" id="logout_form"><input type='hidden' value='%2Fsource%2Fdownload%2F22645369' name="next"></form>
<div class="container content">
<div class="col-md-8 col-md-offset-2">
<div class="error-v1">
<span class="error-v1-title">404</span>
<span>Not found</span>
<div class="margin-bottom-20"></div>
</div>
<div class="text-center">
<span style="font-size:18px;">강의 슬라이드의 첨부 소스 코드가 404 에러가 뜨는 경우에는 링크를 복사/붙여넣기 해주세요.</span>
</div>
<div class="margin-bottom-40"></div>
</div>
</div>
<div class="footer-v3 no-print"><div class="footer"><div class="container"><div class="row"><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>Baekjoon Online Judge</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/about">소개</a></li><li><a href="/news">뉴스</a></li><li><a href="/live">생중계</a></li><li><a href="/poll">설문조사</a></li><li><a href="/blog">블로그</a></li><li><a href="/calendar">캘린더</a></li><li><a href="/donate">기부하기</a></li><li><a href="https://github.com/Startlink/BOJ-Feature-Request">기능 추가 요청</a></li><li><a href="https://github.com/Startlink/BOJ-spj">스페셜 저지 제작</a></li><li><a href="/labs">실험실</a></li></ul><div class="thumb-headline"><h2>채점 현황</h2></div><ul class="list-unstyled simple-list"><li><a href="/status">채점 현황</a></li></ul></div><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>문제</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/problemset">문제</a></li><li><a href="/step">단계별로 풀어보기</a></li><li><a href="/problem/tags">알고리즘 분류</a></li><li><a href="/problem/added">새로 추가된 문제</a></li><li><a href="/problem/added/1">새로 추가된 영어 문제</a></li><li><a href="/problem/ranking">문제 순위</a></li><li><a href="/problem/recent/submit">최근 제출된 문제</a></li><li><a href="/problem/recent/accepted">최근 풀린 문제</a></li><li><a href="/change">재채점 및 문제 수정</a></li></ul><div class="thumb-headline"><h2>유저 대회 / 고등학교 대회</h2></div><ul class="list-inline simple-list margin-bottom"><li><a href="/category/353">FunctionCup</a></li><li><a href="/category/319">kriiicon</a></li><li><a href="/category/420">구데기컵</a></li><li><a href="/category/358">꼬마컵</a></li><li><a href="/category/421">네블컵</a></li><li><a href="/category/413">소프트콘</a></li><li><a href="/category/416">웰노운컵</a></li><li><a href="/category/detail/1743">HYEA Cup</a></li><li><a href="/category/364">경기과학고등학교</a></li><li><a href="/category/417">대구과학고등학교</a></li><li><a href="/category/429">부산일과학고</a></li><li><a href="/category/435">서울과학고등학교</a></li><li><a href="/category/394">선린인터넷고등학교</a></li></ul></div><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>출처</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/category/1">ICPC</a></li><li><a href="/category/211">ICPC Korea Regional</a></li><li><a href="/category/2">Olympiad</a></li><li><a href="/category/55">한국정보올림피아드</a></li><li><a href="/category/57">한국정보올림피아드시․도지역본선</a></li><li><a href="/category/318">전국 대학생 프로그래밍 대회 동아리 연합</a></li><li><a href="/category/5">대학교 대회</a></li><li><a href="/category/428">카카오 코드 페스티벌</a></li><li><a href="/category/215">Coder's High</a></li></ul><div class="thumb-headline"><h2>대학교 대회</h2></div><ul class="list-inline simple-list"><li><a href="/category/320">KAIST</a></li><li><a href="/category/426">POSTECH</a></li><li><a href="/category/341">고려대학교</a></li><li><a href="/category/434">광주과학기술원</a></li><li><a href="/category/361">국민대학교</a></li><li><a href="/category/83">서강대학교</a></li><li><a href="/category/354">서울대학교</a></li><li><a href="/category/352">숭실대학교</a></li><li><a href="/category/408">아주대학교</a></li><li><a href="/category/334">연세대학교</a></li><li><a href="/category/336">인하대학교</a></li><li><a href="/category/347">전북대학교</a></li><li><a href="/category/400">중앙대학교</a></li><li><a href="/category/402">충남대학교</a></li><li><a href="/category/418">한양대 ERICA</a></li><li><a href="/category/363">홍익대학교</a></li><li><a href="/category/409">경인지역 6개대학 연합 프로그래밍 경시대회</a></li></ul></div><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>도움말</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/help/judge">채점 도움말 및 채점 환경</a></li><li><a href="/help/rejudge">재채점 안내</a></li><li><a href="/help/rte">런타임 에러 도움말</a></li><li><a href="/help/problem">문제 스타일 안내</a></li><li><a href="/help/language">컴파일 또는 실행 옵션, 컴파일러 버전, 언어 도움말</a></li><li><a href="/help/workbook">문제집 도움말</a></li><li><a href="/help/contest">대회 개최 안내</a></li><li><a href="/help/problem-add">문제 출제 안내</a></li><li><a href="/help/rule">이용 규칙</a></li><li><a href="/help/stat">통계 도움말</a></li><li><a href="/help/question">질문 도움말</a></li><li><a href="/help/faq">자주묻는 질문</a></li><li><a href="/help/lecture">강의 안내</a></li><li><a href="/help/short">짧은 주소 안내</a></li><li><a href="/help/ad">광고 안내</a></li></ul></div></div></div><div class="copyright"><div class="container"><div class="row"><div class="col-md-9 col-sm-12"><p>© 2021 All Rights Reserved. <a href="https://startlink.io">주식회사 스타트링크</a> | <a href="/terms">서비스 약관</a> | <a href="/privacy">개인정보 보호</a> | <a href="/terms/payment">결제 이용 약관</a> | <a href="https://boj.startlink.help/hc/ko">도움말</a> | <a href="http://startl.ink/2pmlJaY">광고 문의</a> | <a href="https://github.com/Startlink/update-note/blob/master/boj.md">업데이트 노트</a> | <a href="https://github.com/Startlink/update-note/blob/master/boj-issues.md">이슈</a> | <a href="https://github.com/Startlink/update-note/blob/master/boj-todo.md">TODO</a></p></div><div class="col-md-3 col-sm-12"><ul class="social-icons pull-right"><li><a href="https://www.facebook.com/onlinejudge" data-original-title="Facebook" class="rounded-x social_facebook"></a></li><li><a href="https://startlink.blog" data-original-title="Wordpress" class="rounded-x social_wordpress"></a></li></ul></div></div><div class="row"><div class="col-sm-12"><a href="https://startlink.io" class="hidden-xs"><img src="https://d2gd6pc034wcta.cloudfront.net/logo/startlink-logo-white-only.png" class="pull-right startlink-logo"></a><ul class="list-unstyled simple-list"><li>사업자 등록 번호: 541-88-00682</li><li>대표자명: 최백준</li><li>주소: 서울시 서초구 서초대로74길 29 서초파라곤 412호</li><li>전화번호: 02-521-0487 (이메일로 연락 주세요)</li><li>이메일: <a href="mailto:[email protected]">[email protected]</a></li><li>통신판매신고번호: 제 2017-서울서초-2193 호</li></ul></div><div class="col-xs-9"><p id="no-acm-icpc"></p></div><div class="col-xs-3"></div></div></div></div></div>
</div>
<div id="fb-root"></div><script>
window.fbAsyncInit = function() {
FB.init({
appId : '322026491226049',
cookie : true,
xfbml : true,
version : 'v2.8'
});
};
(function(d, s, id) {
var js, fjs = d.getElementsByTagName(s)[0];
if (d.getElementById(id)) return;
js = d.createElement(s); js.id = id;
js.src = "//connect.facebook.net/ko_KR/sdk.js";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));
</script>
<script>
!function(f,b,e,v,n,t,s){ if(f.fbq)return;n=f.fbq=function(){ n.callMethod?
n.callMethod.apply(n,arguments):n.queue.push(arguments) };if(!f._fbq)f._fbq=n;
n.push=n;n.loaded=!0;n.version='2.0';n.queue=[];t=b.createElement(e);t.async=!0;
t.src=v;s=b.getElementsByTagName(e)[0];s.parentNode.insertBefore(t,s) }(window,
document,'script','//connect.facebook.net/en_US/fbevents.js');
fbq('init', '1670563073163149');
fbq('track', 'PageView');
</script>
<noscript><img height="1" width="1" style="display:none" src="https://www.facebook.com/tr?id=1670563073163149&ev=PageView&noscript=1"/></noscript><script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script><script src="https://cdnjs.cloudflare.com/ajax/libs/jquery-migrate/3.0.1/jquery-migrate.min.js"></script><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.2.0/js/bootstrap.min.js"></script><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.21.0/moment.min.js"></script><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.21.0/locale/ko.js"></script><script type="text/javascript" src="https://ddo7jzca0m2vt.cloudfront.net/unify/js/app.min.js?version=20210107"></script><script type="text/javascript">jQuery(document).ready(function() {App.init(0);});</script><!--[if lt IE 9]><script src="https://ddo7jzca0m2vt.cloudfront.net/unify/plugins/respond.js"></script><script src="https://ddo7jzca0m2vt.cloudfront.net/unify/plugins/html5shiv.js"></script><script src="https://ddo7jzca0m2vt.cloudfront.net/unify/js/plugins/placeholder-IE-fixes.js"></script><![endif]--><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/pace/1.0.2/pace.min.js"></script><script src="https://js.pusher.com/4.2/pusher.min.js"></script><script src="https://cdnjs.cloudflare.com/ajax/libs/noty/3.1.4/noty.min.js"></script>
<script>
window.MathJax = {
tex: {
inlineMath: [ ['$', '$'], ['\\(', '\\)'] ],
displayMath: [ ['$$','$$'], ["\\[","\\]"] ],
processEscapes: true,
tags: "ams",
autoload: {
color: [],
colorv2: ['color']
},
packages: { '[+]': ['noerrors'] }
},
options: {
ignoreHtmlClass: "no-mathjax|redactor-editor",
processHtmlClass: 'mathjax',
enableMenu: false
},
chtml: {
scale: 0.9
},
loader: {
load: ['input/tex', 'output/chtml', '[tex]/noerrors'],
}
};
</script><script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script><script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script>
</body>
</html> | [
"[email protected]"
]
| |
b9e6149164b87a1472585e824d937adcc220d393 | c0cb1559188be071f1cd91d0adbad0ca850be0e1 | /problème dame dans un échéquier.py | 2dd481736794d0a2e70c1647a6960e629ebc9a32 | []
| no_license | mines-nancy-tcss5ac-2018/td1-cordel8u | d7299e661082d57ddf9ed2652c6ba345696d6641 | a766d874042bae2394aa84fded2ff683647d6ea5 | refs/heads/master | 2020-03-31T22:59:50.329544 | 2018-10-11T18:46:45 | 2018-10-11T18:46:45 | 152,639,007 | 0 | 0 | null | null | null | null | ISO-8859-1 | Python | false | false | 2,590 | py | from scipy import *
def case_libre(n,L,M):
#Prend en argument un tableau M correspondant à l'échéquier
#et une liste L correspondant au placement de la nouvelle dame
#il ressort un nouveau tableau où il est inscrit les cases
#où il est impossible de placer une nouvelle dame
S=array(zeros((n, n)))
for i in range (n):
for j in range (n):
S[i][j]=M[i][j]
for j in range(len(M)):
S [L[0]][j]=1
S [ j ][ L [1] ]=1
a=L[0]+1
b=L[1]+1
while a<n and b<n:
S[a][b]=1
a+=1
b+=1
a=L[0]+1
b=L[1]-1
while a<n and b>-1:
S[a][b]=1
a+=1
b-=1
a=L[0]-1
b=L[1]-1
while a>-1 and b>-1:
S[a][b]=1
a-=1
b-=1
a=L[0]-1
b=L[1]+1
while a>-1 and b<n:
S[a][b]=1
a-=1
b+=1
return(array(S))
def verif(M):
#vérifie si il reste des cases libre au placement d'une dame
z=False
for i in range (len(M)):
for j in range (len(M[i])):
if M[i][j]== 0:
z=True
return(z)
def indice(M):
#ressort l'indice d'une case libre au placement d'une dame
a=[-1,-1]
i=-1
while a==[-1,-1]:
i+=1
if 0 in M[i]:
K=list(M[i])
a=[i,K.index(0)]
return (a)
#M=array([[1,2,2],[1,4,0]])
#print(indice(M))
def iteration(d,n,L,N,compte):
#recherche les toutes les combinaisons possibles et
#ajoute plus 1 au compteur dès qu'il en trouve une
#fonction dont le fonctionnement est difficile à décrire mais je peux l'expliquer
#à l'oral son mécanisme grâce à des dessins
if d!=0 and verif(N[-1]):
L.append(indice(N[-1]))
N.append(case_libre(n,L[-1],N[-1]))
d-=1
return(iteration(d,n,L,N,compte))
if d==0:
compte+=1
a=L[-1]
del L[-1]
del N[-1]
N[-1][a[0]][a[1]]=1
d+=1
return(iteration(d,n,L,N,compte))
if d!=0 and not(verif(N[-1])):
if len(N)==1:
return(compte)
else:
a=L[-1]
del L[-1]
del N[-1]
N[-1][a[0]][a[1]]=1
d+=1
return(iteration(d,n,L,N,compte))
def solve(d,n):
compte=0
L=[]
N=[]
M=array(zeros((n, n)))
N.append(M)
return(iteration(d,n,L,N,compte))
print(solve(4,4))
| [
"[email protected]"
]
| |
b2262ac385c5fdf6442a2e8d4893d66427960a22 | b54d6a18bc5e86462c1f085386bc48065db5851c | /RandLinkVelDist.py | e43b4b1d5dc7d36505295ad323282e22a34e50c3 | []
| no_license | zoshs2/Percolation_Seoul | 5b5b8ebabe186fbc9e265fc190c3d0641e196517 | 69c0aa99d1f7a2fb9259681a1ed63794cbe5ea5c | refs/heads/main | 2023-07-28T20:50:13.393765 | 2021-09-28T13:25:31 | 2021-09-28T13:25:31 | 390,687,544 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,764 | py | import os
import pandas as pd
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from datetime import datetime
from statsmodels.nonparametric.kernel_regression import KernelReg
def RandLinkVelDist(date_dataset, sample=20, reg=False, time_step=5, savefig:'file_name'=False):
'''
Display the circadian velocity distribution of randomly-selected road samples.
'''
VEL_RESOLUTION = 5
timestep = int(time_step / VEL_RESOLUTION)
TIME = date_dataset.loc[0, ['PRCS_YEAR', 'PRCS_MON', 'PRCS_DAY', 'PRCS_HH', 'PRCS_MIN']].astype(np.int64).values
TIME = datetime(TIME[0], TIME[1], TIME[2], TIME[3], TIME[4])
filename_date = "s" + str(sample) + "_" + str(TIME.strftime("%Y%m%d"))
RandData = date_dataset[date_dataset['LINK_ID'].isin(np.random.choice(date_dataset['LINK_ID'].unique(), sample))].reset_index(drop=True)
TimeIdx = RandData.groupby(['PRCS_HH', 'PRCS_MIN'])['PRCS_SPD'].mean().index # mean() is just used to get a groupy time('Hour', 'Min') index.
time_xaxis = list(map(lambda x : str(format(x[0], '02d'))+':'+str(format(x[1], '02d')), TimeIdx))
time_xaxis = [datetime.strptime(i, '%H:%M') for i in time_xaxis]
RandIDs = RandData['LINK_ID'].unique()
fig = plt.figure(facecolor='w', figsize=(15, 8))
ax = plt.gca() # Get the Current Axes (GCA)
cmap = plt.get_cmap('gnuplot')
colors = [cmap(i) for i in np.linspace(0, 1, sample)]
for i, ID in enumerate(RandIDs):
RandOne = RandData[RandData['LINK_ID']==ID].sort_values(by=['PRCS_HH', 'PRCS_MIN'])
VelHist = RandOne['PRCS_SPD'].values
if reg is True:
VelShape = VelHist.shape[0]
kde = KernelReg(endog=VelHist, exog=np.arange(VelShape), var_type='c', bw=[5])
estimator = kde.fit(np.arange(VelShape))
estimator = np.reshape(estimator[0], VelShape)
plt.plot(time_xaxis, estimator, c=colors[i], label=str(ID))
continue
plt.plot(time_xaxis[::timestep], VelHist[::timestep], c=colors[i], label=str(ID))
fmt = mpl.dates.DateFormatter('%H:%M')
ax.xaxis.set_major_formatter(fmt)
fig.autofmt_xdate()
ax.set_ylabel('Velocity (km/h)', fontsize=18)
ax.set_xlabel('Time', fontsize=18)
if savefig is not False:
filename = savefig + "_RandLinkVelDist_" + filename_date
if reg is True:
filename = "(Reg)" + filename
with open(filename+'.txt', 'w') as f:
for ID in RandIDs:
f.write("{}\n".format(ID))
print(filename, ".txt saved on ", os.getcwd())
print(filename, ".png saved on ", os.getcwd())
plt.savefig(filename + ".png")
plt.show()
return | [
"[email protected]"
]
| |
22c7dca88ffb0a39939a59965d2870f48a2959c2 | bef807fa68ce7b4c77ddf4f4a030f9a5b798e419 | /GEN-SIM/configs/BulkGravToWW_narrow_M-3000_13TeV-madgraph-herwigpp.py | 3f3ea6ebfefea9d8c1a228279ebfc87446360a65 | []
| no_license | clelange/DibosonSignalProductionHerwig | 97cf93963eb27f450e9ad95c549622a1b24d2cdd | f1477a61a709f5338d07b3c8a04131a8bc301783 | refs/heads/master | 2021-01-11T14:19:09.108870 | 2017-02-08T16:26:21 | 2017-02-08T16:26:21 | 81,343,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,886 | py | # Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: Configuration/GenProduction/python/ThirteenTeV/Grav_Hadronizer_TuneEE5C_13TeV_madgraph_differentPDF_herwigpp_cff.py --filein dbs:/BulkGravToWW_narrow_M-3000_13TeV-madgraph/RunIIWinter15wmLHE-MCRUN2_71_V1-v1/LHE --fileout file:output.root --mc --eventcontent RAWSIM --customise SLHCUpgradeSimulations/Configuration/postLS1Customs.customisePostLS1,Configuration/DataProcessing/Utils.addMonitoring --datatier GEN-SIM --conditions MCRUN2_71_V1::All --beamspot Realistic50ns13TeVCollision --step GEN,SIM --magField 38T_PostLS1 --python_filename BulkGravToWW_narrow_M-3000_13TeV-madgraph-herwigpp_cfg.py --no_exec -n 29
import FWCore.ParameterSet.Config as cms
process = cms.Process('SIM')
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('SimGeneral.MixingModule.mixNoPU_cfi')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.Geometry.GeometrySimDB_cff')
process.load('Configuration.StandardSequences.MagneticField_38T_PostLS1_cff')
process.load('Configuration.StandardSequences.Generator_cff')
process.load('IOMC.EventVertexGenerators.VtxSmearedRealistic50ns13TeVCollision_cfi')
process.load('GeneratorInterface.Core.genFilterSummary_cff')
process.load('Configuration.StandardSequences.SimIdeal_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(29)
)
# Input source
process.source = cms.Source("PoolSource",
secondaryFileNames = cms.untracked.vstring(),
fileNames = cms.untracked.vstring('/store/mc/RunIIWinter15wmLHE/BulkGravToWW_narrow_M-3000_13TeV-madgraph/LHE/MCRUN2_71_V1-v1/00000/2632A0E8-C404-E511-9CFE-0025905C431A.root'),
inputCommands = cms.untracked.vstring('keep *',
'drop LHEXMLStringProduct_*_*_*'),
dropDescendantsOfDroppedBranches = cms.untracked.bool(False)
)
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('$Revision: 1.19 $'),
annotation = cms.untracked.string('Configuration/GenProduction/python/ThirteenTeV/Grav_Hadronizer_TuneEE5C_13TeV_madgraph_differentPDF_herwigpp_cff.py nevts:29'),
name = cms.untracked.string('Applications')
)
# Output definition
process.RAWSIMoutput = cms.OutputModule("PoolOutputModule",
splitLevel = cms.untracked.int32(0),
eventAutoFlushCompressedSize = cms.untracked.int32(5242880),
outputCommands = process.RAWSIMEventContent.outputCommands,
fileName = cms.untracked.string('file:output.root'),
dataset = cms.untracked.PSet(
filterName = cms.untracked.string(''),
dataTier = cms.untracked.string('GEN-SIM')
),
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('generation_step')
)
)
# Additional output definition
# Other statements
process.genstepfilter.triggerConditions=cms.vstring("generation_step")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'MCRUN2_71_V1::All', '')
process.generator = cms.EDFilter("ThePEGHadronizerFilter",
hwpp_cmsDefaults = cms.vstring('+hwpp_basicSetup',
'+hwpp_setParticlesStableForDetector'),
run = cms.string('LHC'),
repository = cms.string('HerwigDefaults.rpo'),
dataLocation = cms.string('${HERWIGPATH}'),
hwpp_setParticlesStableForDetector = cms.vstring('set /Herwig/Particles/mu-:Stable Stable',
'set /Herwig/Particles/mu+:Stable Stable',
'set /Herwig/Particles/Sigma-:Stable Stable',
'set /Herwig/Particles/Sigmabar+:Stable Stable',
'set /Herwig/Particles/Lambda0:Stable Stable',
'set /Herwig/Particles/Lambdabar0:Stable Stable',
'set /Herwig/Particles/Sigma+:Stable Stable',
'set /Herwig/Particles/Sigmabar-:Stable Stable',
'set /Herwig/Particles/Xi-:Stable Stable',
'set /Herwig/Particles/Xibar+:Stable Stable',
'set /Herwig/Particles/Xi0:Stable Stable',
'set /Herwig/Particles/Xibar0:Stable Stable',
'set /Herwig/Particles/Omega-:Stable Stable',
'set /Herwig/Particles/Omegabar+:Stable Stable',
'set /Herwig/Particles/pi+:Stable Stable',
'set /Herwig/Particles/pi-:Stable Stable',
'set /Herwig/Particles/K+:Stable Stable',
'set /Herwig/Particles/K-:Stable Stable',
'set /Herwig/Particles/K_S0:Stable Stable',
'set /Herwig/Particles/K_L0:Stable Stable'),
generatorModule = cms.string('/Herwig/Generators/LHCGenerator'),
eventHandlers = cms.string('/Herwig/EventHandlers'),
hwpp_basicSetup = cms.vstring('create ThePEG::RandomEngineGlue /Herwig/RandomGlue',
'set /Herwig/Generators/LHCGenerator:RandomNumberGenerator /Herwig/RandomGlue',
'set /Herwig/Generators/LHCGenerator:NumberOfEvents 10000000',
'set /Herwig/Generators/LHCGenerator:DebugLevel 1',
'set /Herwig/Generators/LHCGenerator:UseStdout 0',
'set /Herwig/Generators/LHCGenerator:PrintEvent 0',
'set /Herwig/Generators/LHCGenerator:MaxErrors 10000'),
herwigNewPhysics = cms.vstring('cd /Herwig/Particles',
'create ThePEG::ParticleData graviton',
'setup graviton 39 graviton 1000 0.0 0.0 0.0 0 0 5 0',
'cd /'),
hwpp_ue_EE5CEnergyExtrapol = cms.vstring('set /Herwig/UnderlyingEvent/MPIHandler:EnergyExtrapolation Power',
'set /Herwig/UnderlyingEvent/MPIHandler:ReferenceScale 7000.*GeV',
'set /Herwig/UnderlyingEvent/MPIHandler:Power 0.33',
'set /Herwig/UnderlyingEvent/MPIHandler:pTmin0 3.91*GeV'),
hwpp_ue_EE5C = cms.vstring('+hwpp_ue_EE5CEnergyExtrapol',
'set /Herwig/Hadronization/ColourReconnector:ColourReconnection Yes',
'set /Herwig/Hadronization/ColourReconnector:ReconnectionProbability 0.49',
'set /Herwig/Partons/RemnantDecayer:colourDisrupt 0.80',
'set /Herwig/UnderlyingEvent/MPIHandler:InvRadius 2.30',
'set /Herwig/UnderlyingEvent/MPIHandler:softInt Yes',
'set /Herwig/UnderlyingEvent/MPIHandler:twoComp Yes',
'set /Herwig/UnderlyingEvent/MPIHandler:DLmode 2'),
hwpp_pdf_CTEQ6LL_Hard_CUETHS1 = cms.vstring('+hwpp_pdf_CTEQ6L1_Hard_CUETHS1'),
hwpp_pdf_CTEQ6LL_Hard = cms.vstring('+hwpp_pdf_CTEQ6L1_Hard'),
hwpp_pdf_CTEQ6L1_Hard = cms.vstring('+hwpp_pdf_CTEQ6L1_Hard_Common',
'+hwpp_ue_EE5C'),
hwpp_pdf_CTEQ6L1_Common = cms.vstring('create ThePEG::LHAPDF /Herwig/Partons/cmsPDFSet ThePEGLHAPDF.so',
'set /Herwig/Partons/cmsPDFSet:PDFName cteq6ll.LHpdf',
'set /Herwig/Partons/cmsPDFSet:RemnantHandler /Herwig/Partons/HadronRemnants',
'set /Herwig/Particles/p+:PDF /Herwig/Partons/cmsPDFSet',
'set /Herwig/Particles/pbar-:PDF /Herwig/Partons/cmsPDFSet'),
hwpp_pdf_CTEQ6L1_CUETHS1 = cms.vstring('+hwpp_pdf_CTEQ6L1_Common',
'+hwpp_ue_CUETHS1'),
hwpp_pdf_CTEQ6L1 = cms.vstring('+hwpp_pdf_CTEQ6L1_Common',
'+hwpp_ue_EE5C'),
hwpp_pdf_CTEQ6LL_CUETHS1 = cms.vstring('+hwpp_pdf_CTEQ6L1_CUETHS1'),
hwpp_pdf_CTEQ6L1_Hard_Common = cms.vstring('create ThePEG::LHAPDF /Herwig/Partons/cmsHardPDFSet ThePEGLHAPDF.so',
'set /Herwig/Partons/cmsHardPDFSet:PDFName cteq6ll.LHpdf',
'set /Herwig/Partons/cmsHardPDFSet:RemnantHandler /Herwig/Partons/HadronRemnants'),
hwpp_pdf_CTEQ6L1_Hard_CUETHS1 = cms.vstring('+hwpp_pdf_CTEQ6L1_Hard_Common',
'+hwpp_ue_CUETHS1'),
hwpp_pdf_CTEQ6LL = cms.vstring('+hwpp_pdf_CTEQ6L1'),
hwpp_pdf_NNPDF30LO_Hard = cms.vstring('create ThePEG::LHAPDF /Herwig/Partons/cmsHardPDFSet ThePEGLHAPDF.so',
'set /Herwig/Partons/cmsHardPDFSet:PDFName NNPDF30_lo_as_0130.LHgrid',
'set /Herwig/Partons/cmsHardPDFSet:RemnantHandler /Herwig/Partons/HadronRemnants'),
hwpp_pdf_NNPDF30LO = cms.vstring('create ThePEG::LHAPDF /Herwig/Partons/cmsPDFSet ThePEGLHAPDF.so',
'set /Herwig/Partons/cmsPDFSet:PDFName NNPDF30_lo_as_0130.LHgrid',
'set /Herwig/Partons/cmsPDFSet:RemnantHandler /Herwig/Partons/HadronRemnants',
'set /Herwig/Particles/p+:PDF /Herwig/Partons/cmsPDFSet',
'set /Herwig/Particles/pbar-:PDF /Herwig/Partons/cmsPDFSet'),
hwpp_cm_13TeV = cms.vstring('set /Herwig/Generators/LHCGenerator:EventHandler:LuminosityFunction:Energy 13000.0',
'set /Herwig/Shower/Evolver:IntrinsicPtGaussian 2.2*GeV'),
hwpp_LHE_Powheg_Common = cms.vstring('+hwpp_LHE_Common',
'set /Herwig/Shower/Evolver:HardVetoMode Yes',
'set /Herwig/Shower/Evolver:HardVetoReadOption PrimaryCollision'),
hwpp_LHE_Powheg = cms.vstring('+hwpp_LHE_Powheg_Common',
'set /Herwig/EventHandlers/LHEReader:PDFA /Herwig/Partons/cmsPDFSet',
'set /Herwig/EventHandlers/LHEReader:PDFB /Herwig/Partons/cmsPDFSet'),
hwpp_LHE_MadGraph = cms.vstring('+hwpp_LHE_Common',
'set /Herwig/EventHandlers/LHEReader:PDFA /Herwig/Partons/cmsPDFSet',
'set /Herwig/EventHandlers/LHEReader:PDFB /Herwig/Partons/cmsPDFSet'),
hwpp_LHE_Common = cms.vstring('create ThePEG::Cuts /Herwig/Cuts/NoCuts',
'create ThePEG::LesHouchesInterface /Herwig/EventHandlers/LHEReader',
'set /Herwig/EventHandlers/LHEReader:Cuts /Herwig/Cuts/NoCuts',
'set /Herwig/EventHandlers/LHEReader:MomentumTreatment RescaleEnergy',
'set /Herwig/EventHandlers/LHEReader:WeightWarnings 0',
'set /Herwig/EventHandlers/LHEReader:InitPDFs 0',
'create ThePEG::LesHouchesEventHandler /Herwig/EventHandlers/LHEHandler',
'insert /Herwig/EventHandlers/LHEHandler:LesHouchesReaders 0 /Herwig/EventHandlers/LHEReader',
'set /Herwig/EventHandlers/LHEHandler:WeightOption VarNegWeight',
'set /Herwig/EventHandlers/LHEHandler:PartonExtractor /Herwig/Partons/QCDExtractor',
'set /Herwig/EventHandlers/LHEHandler:CascadeHandler /Herwig/Shower/ShowerHandler',
'set /Herwig/EventHandlers/LHEHandler:HadronizationHandler /Herwig/Hadronization/ClusterHadHandler',
'set /Herwig/EventHandlers/LHEHandler:DecayHandler /Herwig/Decays/DecayHandler',
'insert /Herwig/EventHandlers/LHEHandler:PreCascadeHandlers 0 /Herwig/NewPhysics/DecayHandler',
'set /Herwig/Generators/LHCGenerator:EventHandler /Herwig/EventHandlers/LHEHandler',
'set /Herwig/Shower/Evolver:MaxTry 100',
'set /Herwig/Shower/Evolver:HardVetoScaleSource Read',
'set /Herwig/Shower/KinematicsReconstructor:ReconstructionOption General',
'set /Herwig/Shower/KinematicsReconstructor:InitialInitialBoostOption LongTransBoost',
'+hwpp_MECorr_Common'),
hwpp_LHE_MadGraph_DifferentPDFs = cms.vstring('+hwpp_LHE_Common',
'set /Herwig/EventHandlers/LHEReader:PDFA /Herwig/Partons/cmsHardPDFSet',
'set /Herwig/EventHandlers/LHEReader:PDFB /Herwig/Partons/cmsHardPDFSet'),
hwpp_LHE_Powheg_DifferentPDFs = cms.vstring('+hwpp_LHE_Powheg_Common',
'set /Herwig/EventHandlers/LHEReader:PDFA /Herwig/Partons/cmsHardPDFSet',
'set /Herwig/EventHandlers/LHEReader:PDFB /Herwig/Partons/cmsHardPDFSet'),
hwpp_MECorr_On = cms.vstring('+hwpp_MECorr_Common',
'set /Herwig/Shower/Evolver:MECorrMode Yes'),
hwpp_MECorr_SoftOn = cms.vstring('+hwpp_MECorr_Common',
'set /Herwig/Shower/Evolver:MECorrMode Soft'),
hwpp_MECorr_Common = cms.vstring('set /Herwig/Shower/Evolver:MECorrMode No'),
hwpp_MECorr_HardOn = cms.vstring('+hwpp_MECorr_Common',
'set /Herwig/Shower/Evolver:MECorrMode Hard'),
hwpp_MECorr_Off = cms.vstring('+hwpp_MECorr_Common'),
configFiles = cms.vstring(),
crossSection = cms.untracked.double(-1),
parameterSets = cms.vstring('hwpp_cmsDefaults',
'herwigNewPhysics',
'hwpp_ue_EE5C',
'hwpp_cm_13TeV',
'hwpp_pdf_CTEQ6L1',
'hwpp_pdf_NNPDF30LO_Hard',
'hwpp_LHE_MadGraph_DifferentPDFs',
'hwpp_MECorr_Off'),
filterEfficiency = cms.untracked.double(1.0)
)
process.ProductionFilterSequence = cms.Sequence(process.generator)
# Path and EndPath definitions
process.generation_step = cms.Path(process.pgen)
process.simulation_step = cms.Path(process.psim)
process.genfiltersummary_step = cms.EndPath(process.genFilterSummary)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.RAWSIMoutput_step = cms.EndPath(process.RAWSIMoutput)
# Schedule definition
process.schedule = cms.Schedule(process.generation_step,process.genfiltersummary_step,process.simulation_step,process.endjob_step,process.RAWSIMoutput_step)
# filter all path with the production filter sequence
for path in process.paths:
getattr(process,path)._seq = process.ProductionFilterSequence * getattr(process,path)._seq
# customisation of the process.
# Automatic addition of the customisation function from Configuration.DataProcessing.Utils
from Configuration.DataProcessing.Utils import addMonitoring
#call to customisation function addMonitoring imported from Configuration.DataProcessing.Utils
process = addMonitoring(process)
# Automatic addition of the customisation function from SLHCUpgradeSimulations.Configuration.postLS1Customs
from SLHCUpgradeSimulations.Configuration.postLS1Customs import customisePostLS1
#call to customisation function customisePostLS1 imported from SLHCUpgradeSimulations.Configuration.postLS1Customs
process = customisePostLS1(process)
# End of customisation functions
| [
"[email protected]"
]
| |
c91a7b5364ed05d94b915ad3edca42e51af1ea75 | f11600b9a256bf6a2b584d127faddc27a0f0b474 | /normal/662.py | df18e5f15146532ef8f12376b46b3043f70c7355 | []
| no_license | longhao54/leetcode | 9c1f0ce4ca505ec33640dd9b334bae906acd2db5 | d156c6a13c89727f80ed6244cae40574395ecf34 | refs/heads/master | 2022-10-24T07:40:47.242861 | 2022-10-20T08:50:52 | 2022-10-20T08:50:52 | 196,952,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 526 | py | class Solution:
def widthOfBinaryTree(self, root: TreeNode) -> int:
ans = 1
dp = [(root,1)]
while dp:
s, m = float('inf'), float('-inf')
l = len(dp)
for i in range(l):
t, i = dp.pop(0)
if t.left:
dp.append((t.left, i*2))
if t.right:
dp.append((t.right, i*2+1))
s = min(s, i)
m = max(m, i)
ans = max(m-s+1, ans)
return ans
| [
"[email protected]"
]
| |
3fd3878a08b3f0b3f00dac287d62c71984f01380 | c1bd12405d244c5924a4b069286cd9baf2c63895 | /azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/application_gateway_available_waf_rule_sets_result_py3.py | 1d90cb1f1470bffbefbb643312ec48f97b2613b3 | [
"MIT"
]
| permissive | lmazuel/azure-sdk-for-python | 972708ad5902778004680b142874582a284a8a7c | b40e0e36cc00a82b7f8ca2fa599b1928240c98b5 | refs/heads/master | 2022-08-16T02:32:14.070707 | 2018-03-29T17:16:15 | 2018-03-29T17:16:15 | 21,287,134 | 1 | 3 | MIT | 2019-10-25T15:56:00 | 2014-06-27T19:40:56 | Python | UTF-8 | Python | false | false | 1,104 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ApplicationGatewayAvailableWafRuleSetsResult(Model):
"""Response for ApplicationGatewayAvailableWafRuleSets API service call.
:param value: The list of application gateway rule sets.
:type value:
list[~azure.mgmt.network.v2017_09_01.models.ApplicationGatewayFirewallRuleSet]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ApplicationGatewayFirewallRuleSet]'},
}
def __init__(self, *, value=None, **kwargs) -> None:
super(ApplicationGatewayAvailableWafRuleSetsResult, self).__init__(**kwargs)
self.value = value
| [
"[email protected]"
]
| |
56052fc5690dc0fbd9529a96cbe1b602c35676a9 | dfc827bf144be6edf735a8b59b000d8216e4bb00 | /CODE/postprocessing/Thesis/GaussBump/SimpleRead.py | d19224edb9ae2717dba2faecbed8532dbb7825c9 | []
| no_license | jordanpitt3141/ALL | c5f55e2642d4c18b63b4226ddf7c8ca492c8163c | 3f35c9d8e422e9088fe096a267efda2031ba0123 | refs/heads/master | 2020-07-12T16:26:59.684440 | 2019-05-08T04:12:26 | 2019-05-08T04:12:26 | 94,275,573 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,952 | py | import csv
from numpy.linalg import norm
from scipy import *
import os
from pylab import plot, show, legend,xlim,ylim,savefig,title,xlabel,ylabel,clf, loglog
from numpy import ones
wdir = "/home/jp/Documents/PhD/project/data/ThesisRedo2019/DryForced/FEVM2NoRegTol/12/"
sdir = "/home/jp/Documents/PhD/project/master/FigureData/ThesisRedo/DryForced/FEVM2/Ex/"
if not os.path.exists(sdir):
os.makedirs(sdir)
ts = "10.0"
gap = 8
s = wdir + "outList"+ts+"s.txt"
with open(s,'r') as file1:
readfile = csv.reader(file1, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
b = []
w = []
h = []
u = []
G = []
x = []
j = -1
for row in readfile:
if (j >= 0):
#ASPECTRAT/constantmultiplier
x.append(float(row[0]))
h.append(float(row[1]))
G.append(float(row[2]))
u.append(float(row[3]))
b.append(float(row[4]))
w.append(float(row[5]))
j = j + 1
x = array(x[::gap])
b = array(b[::gap])
w = array(w[::gap])
h = array(h[::gap])
u = array(u[::gap])
G = array(G[::gap])
n = len(x)
s = sdir + "Stage"+ts+"s.dat"
with open(s,'w') as file1:
for i in range(n):
s ="%3.8f%5s%1.20f\n" %(x[i]," ",w[i])
file1.write(s)
s = sdir + "Bed"+ts+"s.dat"
with open(s,'w') as file1:
for i in range(n):
s ="%3.8f%5s%1.20f\n" %(x[i]," ",b[i])
file1.write(s)
s = sdir + "h"+ts+"s.dat"
with open(s,'w') as file1:
for i in range(n):
s ="%3.8f%5s%1.20f\n" %(x[i]," ",h[i])
file1.write(s)
s = sdir + "u"+ts+"s.dat"
with open(s,'w') as file1:
for i in range(n):
s ="%3.8f%5s%1.20f\n" %(x[i]," ",u[i])
file1.write(s)
s = sdir + "G"+ts+"s.dat"
with open(s,'w') as file1:
for i in range(n):
s ="%3.8f%5s%1.20f\n" %(x[i]," ",G[i])
file1.write(s)
| [
"[email protected]"
]
| |
cd388b1fa34c8b7c139387d2f9da86e2be08a184 | bf9c1aa7ac16d467921affa7381dae301e0a1308 | /apps/articles/urls.py | 4ba8201ad1d278b04f4c849955da1484c39b3dd6 | []
| no_license | clincher/ecigar | ec12223bbbcad383e30ea588babee0a89b15db9d | f534bee7ede5c3af882792616c440c7736193fd0 | refs/heads/master | 2020-12-24T15:14:18.688748 | 2016-12-26T00:29:44 | 2016-12-26T00:29:44 | 2,352,445 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 418 | py | from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView, DetailView
from models import Article
urlpatterns = patterns('',
# Products
url(r'^stat.html$',
ListView.as_view(model=Article),
name='article_list'
),
url(r'^stat(?P<slug>[0-9A-Za-z-_.//]+).html$',
DetailView.as_view(model=Article),
name='article_detail'
),
)
| [
"[email protected]"
]
| |
d706cbc2c581af29582c417ee42d30c6d487eef0 | ad715f9713dc5c6c570a5ac51a18b11932edf548 | /tensorflow/lite/testing/op_tests/scatter_nd.py | 8a365ae5b96365937c5c2c28468aa81e1870ed84 | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0",
"BSD-2-Clause"
]
| permissive | rockzhuang/tensorflow | f1f31bc8edfa402b748c500efb97473c001bac95 | cb40c060b36c6a75edfefbc4e5fc7ee720273e13 | refs/heads/master | 2022-11-08T20:41:36.735747 | 2022-10-21T01:45:52 | 2022-10-21T01:45:52 | 161,580,587 | 27 | 11 | Apache-2.0 | 2019-01-23T11:00:44 | 2018-12-13T03:47:28 | C++ | UTF-8 | Python | false | false | 2,856 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for scatter_nd."""
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function()
def make_scatter_nd_tests(options):
"""Make a set of tests to do scatter_nd."""
test_parameters = [{
"indices_dtype": [tf.int32],
"indices_shape": [[4, 1]],
"indices_value": [[[4], [3], [1], [7]]],
"updates_dtype": [tf.int32, tf.int64, tf.float32, tf.bool],
"updates_shape": [[4]],
"shape_dtype": [tf.int32],
"shape_shape": [[1]],
"shape_value": [[8]]
}, {
"indices_dtype": [tf.int32],
"indices_shape": [[4, 2]],
"indices_value": [[[0, 0], [1, 0], [0, 2], [1, 2]]],
"updates_dtype": [tf.int32, tf.int64, tf.float32, tf.bool],
"updates_shape": [[4, 5]],
"shape_dtype": [tf.int32],
"shape_shape": [[3]],
"shape_value": [[2, 3, 5]]
}]
def build_graph(parameters):
"""Build the scatter_nd op testing graph."""
indices = tf.compat.v1.placeholder(
dtype=parameters["indices_dtype"],
name="indices",
shape=parameters["indices_shape"])
updates = tf.compat.v1.placeholder(
dtype=parameters["updates_dtype"],
name="updates",
shape=parameters["updates_shape"])
shape = tf.compat.v1.placeholder(
dtype=parameters["shape_dtype"],
name="shape",
shape=parameters["shape_shape"])
out = tf.scatter_nd(indices, updates, shape)
return [indices, updates, shape], [out]
def build_inputs(parameters, sess, inputs, outputs):
indices = np.array(parameters["indices_value"])
updates = create_tensor_data(parameters["updates_dtype"],
parameters["updates_shape"])
shape = np.array(parameters["shape_value"])
return [indices, updates, shape], sess.run(
outputs, feed_dict=dict(zip(inputs, [indices, updates, shape])))
make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
| [
"[email protected]"
]
| |
6c708d71414961bfd27dd63946aaa70d181350d5 | 6dc463ce97fc275787cfdef563317f3f7e4f5fcf | /radio_table_widget_app/widgets.py | 557a613116b686330885748746143cf0bdc904d1 | []
| no_license | chapkovski/table_radio_widget | 7ea7506d801213cb24a832096fbf88ab7eb89c92 | 320a2b2f5462c6abe8bd0a355b1b4ac8defe3adf | refs/heads/master | 2020-03-22T09:29:23.298900 | 2018-07-06T17:24:23 | 2018-07-06T17:24:23 | 139,840,943 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 647 | py | from django.forms import RadioSelect
class TableRadio(RadioSelect):
template_name = 'widgets/multiple_input.html'
option_template_name = 'widgets/input_option.html'
def __init__(self, top_row=None, bottom_row=None, attrs=None, choices=(), ):
self.top_row = top_row
self.bottom_row = bottom_row
return super().__init__(attrs, choices)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['top_row'] = self.top_row
context['bottom_row'] = self.bottom_row
context['col_width'] = 100 / len(self.choices)
return context
| [
"[email protected]"
]
| |
8e22d1ea23f7ca524327b2070d521659d9c3922e | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /3gziWsCxqGwGGZmr5_11.py | 6392e84ea4d1c88aaa1154379e9f4d945b640bab | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 763 | py | """
Given two integers as arguments, create a function that finds the largest
prime within the range of the two integers.
### Examples
fat_prime(2, 10) ➞ 7
# range [2, 3, 4, 5, 6, 7, 8, 9, 10] and the largest prime is 7.
fat_prime(10, 2) ➞ 7
# [10, 9, 8, 7, 6, 5, 4, 3, 2] and the largest prime is 7.
fat_prime(4, 24) ➞ 23
# range [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24] the largest prime is 23.
### Notes
All numbers will be positive integers.
"""
def fat_prime(a, b):
y = min(a,b)
z = max(a,b)
for i in range(z,y,-1):
if is_prime(i):
return i
return None
def is_prime(n):
for i in range(2,n):
if n % i == 0:
return False
return True
| [
"[email protected]"
]
| |
20a405147dc239db1af8b180b78f4310c43f38b0 | ae66ad38a7b19c01f1099d671dd127716a5d4c34 | /accounts/migrations/0025_auto_20180511_1233.py | e1d9e06c37affbd3e572a0f042dd681de84ec054 | []
| no_license | selbieh/django-freelacer-website | 6fd1eb009e9b30738bfa59fa78f530144b273231 | 0971a7fc3dc7e63a1909bb6adf3a84d7d9083324 | refs/heads/master | 2022-11-22T19:07:48.470928 | 2019-11-24T12:24:26 | 2019-11-24T12:24:26 | 172,359,908 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 579 | py | # Generated by Django 2.0.4 on 2018-05-11 10:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0024_auto_20180511_1229'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='pic',
field=models.ImageField(upload_to='profile/profile_pic'),
),
migrations.AlterField(
model_name='userprofile',
name='resume',
field=models.FileField(upload_to='profile/resume'),
),
]
| [
"[email protected]"
]
| |
02572ac0d7a899647d2e88f1a95a0f55337c7e01 | fc1c1e88a191b47f745625688d33555901fd8e9a | /meraki/models/protocol_4_enum.py | a5a84ca844f12ecbee618d6942e1886545423e86 | [
"MIT",
"Python-2.0"
]
| permissive | RaulCatalano/meraki-python-sdk | 9161673cfd715d147e0a6ddb556d9c9913e06580 | 9894089eb013318243ae48869cc5130eb37f80c0 | refs/heads/master | 2022-04-02T08:36:03.907147 | 2020-02-03T19:24:04 | 2020-02-03T19:24:04 | 416,889,849 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 556 | py | # -*- coding: utf-8 -*-
"""
meraki
This file was automatically generated for meraki by APIMATIC v2.0 ( https://apimatic.io ).
"""
class Protocol4Enum(object):
"""Implementation of the 'Protocol4' enum.
The protocol of the incoming packet. Can be one of "ANY", "TCP" or "UDP".
Default value is "ANY"
Attributes:
ANY: TODO: type description here.
TCP: TODO: type description here.
UDP: TODO: type description here.
"""
ANY = 'ANY'
TCP = 'TCP'
UDP = 'UDP'
| [
"[email protected]"
]
| |
651b60b515fe7843967505febf81ecf3864711a5 | a7f39db24ce26ab0f02650ffd97007222aa536c5 | /so.guishiwen.org_shiwen.py | 1c43a5299d50000c95b371c9ff9420f1d01ebc75 | []
| no_license | hpifu/py-ancient | 6f51067c4c6ef1adb8241994e03dccb29e35b501 | a845e86057432a39f8239263aa7bf0e97c3f4c76 | refs/heads/master | 2022-12-13T00:40:09.735917 | 2019-10-27T05:21:52 | 2019-10-27T05:21:52 | 204,660,319 | 0 | 0 | null | 2022-07-06T20:16:22 | 2019-08-27T08:48:26 | Python | UTF-8 | Python | false | false | 1,333 | py | #!/usr/bin/env python3
import requests
from pyquery import PyQuery as pq
www = "https://so.gushiwen.org/shiwen"
def getPage(url):
res = requests.get(
url,
headers={
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36",
}
)
return res.text
def analyst(text):
d = pq(text)
shis = []
sons = d("div.sons")
for son in sons.items():
name = son("p b").text()
if not name:
continue
infos = list(son("p.source a").items())
dynasty = infos[0].text()
author = infos[1].text()
content = son("div.contson").text()
tags = son("div.tag a").text()
shis.append({
"name": name,
"dynasty": dynasty,
"author": author,
"tags": tags,
"content": content,
})
next = d("div.pagesright a.amore").attr("href")
return shis, next
def main():
print(analyst(getPage(www+"/default_4A111111111111A1.aspx")))
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
db32253902147d6de63a312faa4dc7a41e150337 | c016088a3bdb255d4f5253185d27b5a4c75feb1b | /11_testing_your_code/11_3/employee.py | 919260baf759a0a8360fcd951f4ce7399a8e2888 | [
"MIT"
]
| permissive | simonhoch/python_basics | b0b7c37ff647b653bb4c16a116e5521fc6b438b6 | 4ecf12c074e641e3cdeb0a6690846eb9133f96af | refs/heads/master | 2021-04-03T10:11:10.660454 | 2018-03-13T20:04:46 | 2018-03-13T20:26:25 | 125,107,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | class Employee ():
"""Simple attempt to describe an employee"""
def __init__(self, first_name, last_name, annual_salary):
"""Initialization of the employee class"""
self.first_name = first_name.title()
self.last_name = last_name.title()
self.annual_salary = annual_salary
def give_a_raise(self, salary_raise=5000):
"""Add a raise for an employee"""
self.annual_salary += salary_raise
def edit_informations(self):
"""Edit information of a salary"""
print(self.first_name + ', ' + self.last_name + ', salary: '
+ str(self.annual_salary))
| [
"[email protected]"
]
| |
c08a06098466014eebcd6ca0f27fc1259e9c1c1a | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /directconnect_write_f/virtual-interface_delete.py | 80e3a4f24ad845af820b46f860e9d930d4b0b178 | []
| no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-instances.html
if __name__ == '__main__':
"""
associate-virtual-interface : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/directconnect/associate-virtual-interface.html
describe-virtual-interfaces : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/directconnect/describe-virtual-interfaces.html
"""
write_parameter("directconnect", "delete-virtual-interface") | [
"[email protected]"
]
| |
6e2d152884470b76fab1e4f8be0c8476ae6e0fb1 | f29e8c30b9f7b66cb66bfb634608adec74a4aee0 | /012desafio - ler preco, retorna desconto.py | f26b56ea286e0bc9a308999fc0202fc8be53aad4 | []
| no_license | brunoparodi/Curso-GUANABARA | acafe1390ccd2ba5648ca30f73f54b95a6c57201 | 16b7a293a54f1a471fa07830bc66709a88fceb79 | refs/heads/master | 2020-04-24T09:16:12.095977 | 2019-02-21T11:17:01 | 2019-02-21T11:17:01 | 171,857,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | preco = float(input('Informe o preço do produto: R$'))
desconto = preco * (5 / 100)
print('O preço com desconto é: R${:.2f}.'.format(preco - desconto))
| [
"[email protected]"
]
| |
faaabc87d530eda66341796909e94a28dc6d25c5 | b306aab9dcea2dd83dda700bc9f7b9f1a32cff3a | /CAIL2020/cocr/det_infer.py | a89f3a16932ef293bd9a8018db8f313597098ffd | [
"Apache-2.0"
]
| permissive | Tulpen/CAIL | d6ca9981c7ea2603ae61675ba330a9614cd9398d | c4cfa98ab4ecedbce34a7a5a186830486047540c | refs/heads/master | 2023-04-23T20:07:56.774530 | 2021-04-16T13:18:36 | 2021-04-16T13:18:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,839 | py | import argparse
import os
import sys
import pathlib
import torch
from torch import nn
from torchvision import transforms
from torchocr.networks import build_model
from torchocr.datasets.det_modules import ResizeShortSize
from torchocr.postprocess import build_post_process
import cv2
from matplotlib import pyplot as plt
from torchocr.utils import draw_ocr_box_txt, draw_bbox
class DetInfer:
def __init__(self, model_path):
ckpt = torch.load(model_path, map_location='cpu')
cfg = ckpt['cfg']
self.model = build_model(cfg.model)
state_dict = {}
for k, v in ckpt['state_dict'].items():
state_dict[k.replace('module.', '')] = v
self.model.load_state_dict(state_dict)
self.device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
self.model.to(self.device)
self.model.eval()
self.resize = ResizeShortSize(736, False)
self.post_proess = build_post_process(cfg.post_process)
self.transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=cfg.dataset.train.dataset.mean, std=cfg.dataset.train.dataset.std)
])
def predict(self, img, is_output_polygon=False):
# 预处理根据训练来
data = {'img': img, 'shape': [img.shape[:2]], 'text_polys': []}
data = self.resize(data)
tensor = self.transform(data['img'])
tensor = tensor.unsqueeze(dim=0)
tensor = tensor.to(self.device)
out = self.model(tensor)
box_list, score_list = self.post_proess(out, data['shape'], is_output_polygon=is_output_polygon)
box_list, score_list = box_list[0], score_list[0]
if len(box_list) > 0:
idx = [x.sum() > 0 for x in box_list]
box_list = [box_list[i] for i, v in enumerate(idx) if v]
score_list = [score_list[i] for i, v in enumerate(idx) if v]
else:
box_list, score_list = [], []
return box_list, score_list
def init_args():
import argparse
parser = argparse.ArgumentParser(description='PytorchOCR infer')
parser.add_argument('--model_path', required=False, type=str, help='rec model path', default=r'F:\CAIL\CAIL2020\cocr\model\db_ResNet50_vd_icdar2015withconfig.pth')
parser.add_argument('--img_path', required=False, type=str, help='img path for predict', default=r'F:\CAIL\CAIL2020\cocr\data\icdar2015\detection\test\imgs\img_2.jpg')
args = parser.parse_args()
return args
def resize(img, scale_percent = 60):
scale_percent = 60 # percent of original size
width = int(img.shape[1] * scale_percent / 100)
height = int(img.shape[0] * scale_percent / 100)
dim = (width, height)
# resize image
resized = cv2.resize(img, dim, interpolation=cv2.INTER_AREA)
return resized
if __name__ == '__main__':
# ===> 获取配置文件参数
parser = argparse.ArgumentParser(description='train')
parser.add_argument('--config', type=str, default='config/det.json',
help='train config file path')
parser.add_argument('-m','--model_path', required=False, type=str, help='rec model path', default=r'F:\CAIL\CAIL2020\cocr\model\det-model.bin')
parser.add_argument('-i','--img_path', required=False, type=str, help='img path for predict', default=r'F:\CAIL\CAIL2020\cocr\data\t2\architecture (1).jpg')
args = parser.parse_args()
# for i in range(1,11):
img = cv2.imread(args.img_path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
if img.shape[0] > 1500:
img = resize(img, img.shape[0]*100./1024)
model = DetInfer(args.model_path)
box_list, score_list = model.predict(img, is_output_polygon=True)
img = draw_ocr_box_txt(img, box_list)
img = draw_bbox(img, box_list)
plt.imshow(img)
plt.show()
| [
"[email protected]"
]
| |
4293b3acde9fd16c7d98f4e36d670978acca31a3 | f73f5f5d0770f731b5e76da39131ff36c9fde11e | /django_libs/tests/models_tests.py | 7c46c414daad0a285198d5f569b7e8cfa6ef2ad1 | [
"MIT"
]
| permissive | SurferTank/django-libs | fcede8d7dff4ea58c728d05ff0030a3ce892a08e | 6ad3f7cf5f9a7a4848557d73af4a93054b34e27f | refs/heads/master | 2021-02-09T01:28:32.153104 | 2020-10-26T03:11:23 | 2020-10-26T03:11:23 | 244,222,230 | 0 | 0 | MIT | 2020-03-01T20:55:22 | 2020-03-01T20:55:21 | null | UTF-8 | Python | false | false | 519 | py | """Tests for the models of the ``django_libs`` app."""
from django.test import TestCase
from ..models import ColorField
from ..widgets import ColorPickerWidget
class ColorFieldTestCase(TestCase):
"""Tests for the ``ColorField`` model."""
longMessage = True
def test_functions(self):
color_field = ColorField()
color_field.formfield
self.assertIsInstance(
color_field.formfield().widget, ColorPickerWidget, msg=(
'Should add the color field widget.'))
| [
"[email protected]"
]
| |
4467952bc3588edaf21b854d2cb536eb9a03be12 | 03034837c5f10d19fcc4dc51388f056ec43fd1d2 | /pro21.py | a5fa31a3792fe95d280dc9afd594c679ff30987a | []
| no_license | shaukhk01/project01 | e95c19844757c631f7ffbdd910b20316f49a945b | 79cfe784612fdbb4816c9fc3fc7222c845a3268f | refs/heads/master | 2020-06-26T07:20:49.844532 | 2019-08-20T06:06:40 | 2019-08-20T06:06:40 | 199,569,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | import re
def main():
matched = re.finditer('a{2}','abcdeaakkkaa')
for m in matched:
print(m.start(),'--',m.group())
main()
| [
"[email protected]"
]
| |
175b16aa461473aa8fbeb39f96459c4ddc826859 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02937/s405333986.py | 77f923e46db0a03de4ebb7d4023a7c9648601069 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | s=list(input())
t=list(input())
ns=len(s)
nt=len(t)
t_set=list(set(t))
s_set=list(set(s))
for i in range(len(t_set)):
if t_set[i] not in s_set:
print(-1)
exit()
from collections import defaultdict, deque
from bisect import bisect_right
ds=defaultdict(list)
for i in range(ns):
ds[s[i]].append(i)
components=[0]*26
for i in range(26):
components[i]=len(ds[chr(i+97)])
lt=[-1]*nt
ord('a')
last=-1
for i in range(nt):
j=bisect_right(ds[t[i]],last)
if j==components[ord(t[i])-97]:
lt[i]=ds[t[i]][0]
else:
lt[i]=ds[t[i]][j]
last=lt[i]
kuriage=0
for i in range(1,nt):
if lt[i]<=lt[i-1]:
kuriage+=1
print(kuriage*ns+lt[-1]+1) | [
"[email protected]"
]
| |
4ad57e5623a534930577b2344a2f132f793c8bb5 | 7dc295d045982180f89e2bca204148c715dcdd8c | /using_context/using_redirection.py | b3b04bf12790a109758b99bd39c6bd769572946a | []
| no_license | onionmccabbage/AdvancedPythonMay2021 | 0c582e2502672c5d0974a46da1f689ac44c41728 | 5194fb191f8d01521d54d6867084ae6845a3726c | refs/heads/main | 2023-04-20T07:32:29.119310 | 2021-05-13T14:53:01 | 2021-05-13T14:53:01 | 366,090,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,117 | py | # we can write a class to redirect the context (simple solution)
import sys # sys is in control of inputs and outputs
class Redirect:
'''
Provide an easy way to redirect the standard output
(which defaults to printing to the console)
'''
def __init__(self, new_stdout):
self.new_stdout = new_stdout
# we override __enter__ and __exit__
def __enter__(self):
'''implement a redirection'''
#store the current stdout
self.save_stdout = sys.stdout
#set a new stdout
sys.stdout = self.new_stdout # we have redefined a member of sys!!!!
def __exit__(self, exc_type, exc_value, exc_traceback):
'''restore the original stdout'''
sys.stdout = self.save_stdout
if __name__ == '__main__':
# print(sys.stdout)
# make use of our redicetion class
with open('mylog.txt', 'a') as fobj: # open a file access object
with Redirect(fobj):
print('this gets printed to our log file') # look - no file reference
print('this will print to the console') # back to stdout default
| [
"[email protected]"
]
| |
ba5bf4df83c1a5e401c6ac4d470108fae419940f | 25bb4e760769cc483a20f27b6312698891dce034 | /python/Closures and Decorators/decorators-2-name-directory-English.py | 529c7f5310dd4be88cfeca5669d97fbd3c92bd2b | []
| no_license | rangaeeeee/codes-hackerrank | e13d22adff1ef74974e34251d9bfac6cfd36f2b0 | ce7fdf7f336c10164fd2f779d4ed3713849d7c2b | refs/heads/master | 2021-01-19T17:07:28.451983 | 2017-09-01T18:05:33 | 2017-09-01T18:05:33 | 101,049,197 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | from operator import itemgetter
def person_lister(f):
def inner(people):
people=sorted(people,key=lambda x: int(x[2]))
return [f(p) for p in people]
return inner | [
"[email protected]"
]
| |
b6a695509f4c932fce5594d2924313a6581f08bd | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /_ORGS/NPM/node/deps/v8/tools/release/test_scripts.py | 7cf5d141301c575c3186e2488597b510374b586d | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause",
"SunPro"
]
| permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 34,860 | py | #!/usr/bin/env python
# Copyright 2013 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# for py2/py3 compatibility
from __future__ import print_function
import os
import shutil
import tempfile
import traceback
import unittest
import auto_push
from auto_push import LastReleaseBailout
import auto_roll
import common_includes
from common_includes import *
import create_release
from create_release import *
import merge_to_branch
from merge_to_branch import MergeToBranch
from auto_tag import AutoTag
import roll_merge
from roll_merge import RollMerge
TEST_CONFIG = {
"DEFAULT_CWD": None,
"BRANCHNAME": "test-prepare-push",
"PERSISTFILE_BASENAME": "/tmp/test-create-releases-tempfile",
"PATCH_FILE": "/tmp/test-v8-create-releases-tempfile-tempfile-patch",
"COMMITMSG_FILE": "/tmp/test-v8-create-releases-tempfile-commitmsg",
"CHROMIUM": "/tmp/test-create-releases-tempfile-chromium",
"SETTINGS_LOCATION": None,
"ALREADY_MERGING_SENTINEL_FILE":
"/tmp/test-merge-to-branch-tempfile-already-merging",
"TEMPORARY_PATCH_FILE": "/tmp/test-merge-to-branch-tempfile-temporary-patch",
}
AUTO_PUSH_ARGS = [
"-a", "[email protected]",
"-r", "[email protected]",
]
class ToplevelTest(unittest.TestCase):
def testSaniniziteVersionTags(self):
self.assertEquals("4.8.230", SanitizeVersionTag("4.8.230"))
self.assertEquals("4.8.230", SanitizeVersionTag("tags/4.8.230"))
self.assertEquals(None, SanitizeVersionTag("candidate"))
def testNormalizeVersionTags(self):
input = ["4.8.230",
"tags/4.8.230",
"tags/4.8.224.1",
"4.8.224.1",
"4.8.223.1",
"tags/4.8.223",
"tags/4.8.231",
"candidates"]
expected = ["4.8.230",
"4.8.230",
"4.8.224.1",
"4.8.224.1",
"4.8.223.1",
"4.8.223",
"4.8.231",
]
self.assertEquals(expected, NormalizeVersionTags(input))
def Cmd(*args, **kwargs):
"""Convenience function returning a shell command test expectation."""
return {
"name": "command",
"args": args,
"ret": args[-1],
"cb": kwargs.get("cb"),
"cwd": kwargs.get("cwd", TEST_CONFIG["DEFAULT_CWD"]),
}
def RL(text, cb=None):
"""Convenience function returning a readline test expectation."""
return {
"name": "readline",
"args": [],
"ret": text,
"cb": cb,
"cwd": None,
}
def URL(*args, **kwargs):
"""Convenience function returning a readurl test expectation."""
return {
"name": "readurl",
"args": args[:-1],
"ret": args[-1],
"cb": kwargs.get("cb"),
"cwd": None,
}
class SimpleMock(object):
def __init__(self):
self._recipe = []
self._index = -1
def Expect(self, recipe):
self._recipe = recipe
def Call(self, name, *args, **kwargs): # pragma: no cover
self._index += 1
try:
expected_call = self._recipe[self._index]
except IndexError:
raise NoRetryException("Calling %s %s" % (name, " ".join(args)))
if not isinstance(expected_call, dict):
raise NoRetryException("Found wrong expectation type for %s %s" %
(name, " ".join(args)))
if expected_call["name"] != name:
raise NoRetryException("Expected action: %s %s - Actual: %s" %
(expected_call["name"], expected_call["args"], name))
# Check if the given working directory matches the expected one.
if expected_call["cwd"] != kwargs.get("cwd"):
raise NoRetryException("Expected cwd: %s in %s %s - Actual: %s" %
(expected_call["cwd"],
expected_call["name"],
expected_call["args"],
kwargs.get("cwd")))
# The number of arguments in the expectation must match the actual
# arguments.
if len(args) > len(expected_call['args']):
raise NoRetryException("When calling %s with arguments, the "
"expectations must consist of at least as many arguments." %
name)
# Compare expected and actual arguments.
for (expected_arg, actual_arg) in zip(expected_call['args'], args):
if expected_arg != actual_arg:
raise NoRetryException("Expected: %s - Actual: %s" %
(expected_arg, actual_arg))
# The expected call contains an optional callback for checking the context
# at the time of the call.
if expected_call['cb']:
try:
expected_call['cb']()
except:
tb = traceback.format_exc()
raise NoRetryException("Caught exception from callback: %s" % tb)
# If the return value is an exception, raise it instead of returning.
if isinstance(expected_call['ret'], Exception):
raise expected_call['ret']
return expected_call['ret']
def AssertFinished(self): # pragma: no cover
if self._index < len(self._recipe) -1:
raise NoRetryException("Called mock too seldom: %d vs. %d" %
(self._index, len(self._recipe)))
class ScriptTest(unittest.TestCase):
def MakeEmptyTempFile(self):
handle, name = tempfile.mkstemp()
os.close(handle)
self._tmp_files.append(name)
return name
def MakeEmptyTempDirectory(self):
name = tempfile.mkdtemp()
self._tmp_files.append(name)
return name
def WriteFakeVersionFile(self, major=3, minor=22, build=4, patch=0):
version_file = os.path.join(TEST_CONFIG["DEFAULT_CWD"], VERSION_FILE)
if not os.path.exists(os.path.dirname(version_file)):
os.makedirs(os.path.dirname(version_file))
with open(version_file, "w") as f:
f.write(" // Some line...\n")
f.write("\n")
f.write("#define V8_MAJOR_VERSION %s\n" % major)
f.write("#define V8_MINOR_VERSION %s\n" % minor)
f.write("#define V8_BUILD_NUMBER %s\n" % build)
f.write("#define V8_PATCH_LEVEL %s\n" % patch)
f.write(" // Some line...\n")
f.write("#define V8_IS_CANDIDATE_VERSION 0\n")
def WriteFakeWatchlistsFile(self):
watchlists_file = os.path.join(TEST_CONFIG["DEFAULT_CWD"], WATCHLISTS_FILE)
if not os.path.exists(os.path.dirname(watchlists_file)):
os.makedirs(os.path.dirname(watchlists_file))
with open(watchlists_file, "w") as f:
content = """
'merges': [
# Only enabled on branches created with tools/release/create_release.py
# '[email protected]',
],
"""
f.write(content)
def MakeStep(self):
"""Convenience wrapper."""
options = ScriptsBase(TEST_CONFIG, self, self._state).MakeOptions([])
return MakeStep(step_class=Step, state=self._state,
config=TEST_CONFIG, side_effect_handler=self,
options=options)
def RunStep(self, script=CreateRelease, step_class=Step, args=None):
"""Convenience wrapper."""
args = args if args is not None else ["-m", "-a=author", "-r=reviewer", ]
return script(TEST_CONFIG, self, self._state).RunSteps([step_class], args)
def Call(self, fun, *args, **kwargs):
print("Calling %s with %s and %s" % (str(fun), str(args), str(kwargs)))
def Command(self, cmd, args="", prefix="", pipe=True, cwd=None):
print("%s %s" % (cmd, args))
print("in %s" % cwd)
return self._mock.Call("command", cmd + " " + args, cwd=cwd)
def ReadLine(self):
return self._mock.Call("readline")
def ReadURL(self, url, params):
if params is not None:
return self._mock.Call("readurl", url, params)
else:
return self._mock.Call("readurl", url)
def Sleep(self, seconds):
pass
def GetUTCStamp(self):
return "1000000"
def Expect(self, *args):
"""Convenience wrapper."""
self._mock.Expect(*args)
def setUp(self):
self._mock = SimpleMock()
self._tmp_files = []
self._state = {}
TEST_CONFIG["DEFAULT_CWD"] = self.MakeEmptyTempDirectory()
def tearDown(self):
if os.path.exists(TEST_CONFIG["PERSISTFILE_BASENAME"]):
shutil.rmtree(TEST_CONFIG["PERSISTFILE_BASENAME"])
# Clean up temps. Doesn't work automatically.
for name in self._tmp_files:
if os.path.isfile(name):
os.remove(name)
if os.path.isdir(name):
shutil.rmtree(name)
self._mock.AssertFinished()
def testGitMock(self):
self.Expect([Cmd("git --version", "git version 1.2.3"),
Cmd("git dummy", "")])
self.assertEquals("git version 1.2.3", self.MakeStep().Git("--version"))
self.assertEquals("", self.MakeStep().Git("dummy"))
def testCommonPrepareDefault(self):
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("Y"),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
])
self.MakeStep().CommonPrepare()
self.MakeStep().PrepareBranch()
def testCommonPrepareNoConfirm(self):
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("n"),
])
self.MakeStep().CommonPrepare()
self.assertRaises(Exception, self.MakeStep().PrepareBranch)
def testCommonPrepareDeleteBranchFailure(self):
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("Y"),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], None),
])
self.MakeStep().CommonPrepare()
self.assertRaises(Exception, self.MakeStep().PrepareBranch)
def testInitialEnvironmentChecks(self):
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
os.environ["EDITOR"] = "vi"
self.Expect([
Cmd("which vi", "/usr/bin/vi"),
])
self.MakeStep().InitialEnvironmentChecks(TEST_CONFIG["DEFAULT_CWD"])
def testTagTimeout(self):
self.Expect([
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\"Title\" origin/tag_name", ""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\"Title\" origin/tag_name", ""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\"Title\" origin/tag_name", ""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\"Title\" origin/tag_name", ""),
])
args = ["--branch", "candidates", "ab12345"]
self._state["version"] = "tag_name"
self._state["commit_title"] = "Title"
self.assertRaises(Exception,
lambda: self.RunStep(RollMerge, TagRevision, args))
def testReadAndPersistVersion(self):
self.WriteFakeVersionFile(build=5)
step = self.MakeStep()
step.ReadAndPersistVersion()
self.assertEquals("3", step["major"])
self.assertEquals("22", step["minor"])
self.assertEquals("5", step["build"])
self.assertEquals("0", step["patch"])
def testRegex(self):
self.assertEqual("(issue 321)",
re.sub(r"BUG=v8:(.*)$", r"(issue \1)", "BUG=v8:321"))
self.assertEqual("(Chromium issue 321)",
re.sub(r"BUG=(.*)$", r"(Chromium issue \1)", "BUG=321"))
cl = " too little\n\ttab\ttab\n too much\n trailing "
cl = MSub(r"\t", r" ", cl)
cl = MSub(r"^ {1,7}([^ ])", r" \1", cl)
cl = MSub(r"^ {9,80}([^ ])", r" \1", cl)
cl = MSub(r" +$", r"", cl)
self.assertEqual(" too little\n"
" tab tab\n"
" too much\n"
" trailing", cl)
self.assertEqual("//\n#define V8_BUILD_NUMBER 3\n",
MSub(r"(?<=#define V8_BUILD_NUMBER)(?P<space>\s+)\d*$",
r"\g<space>3",
"//\n#define V8_BUILD_NUMBER 321\n"))
TAGS = """
4425.0
0.0.0.0
3.9.6
3.22.4
test_tag
"""
# Version as tag: 3.22.4.0. Version on master: 3.22.6.
# Make sure that the latest version is 3.22.6.0.
def testIncrementVersion(self):
self.Expect([
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git tag", self.TAGS),
Cmd("git checkout -f origin/master -- include/v8-version.h",
"", cb=lambda: self.WriteFakeVersionFile(3, 22, 6)),
])
self.RunStep(CreateRelease, IncrementVersion)
self.assertEquals("3", self._state["new_major"])
self.assertEquals("22", self._state["new_minor"])
self.assertEquals("7", self._state["new_build"])
self.assertEquals("0", self._state["new_patch"])
def testBootstrapper(self):
work_dir = self.MakeEmptyTempDirectory()
class FakeScript(ScriptsBase):
def _Steps(self):
return []
# Use the test configuration without the fake testing default work dir.
fake_config = dict(TEST_CONFIG)
del(fake_config["DEFAULT_CWD"])
self.Expect([
Cmd("fetch v8", "", cwd=work_dir),
])
FakeScript(fake_config, self).Run(["--work-dir", work_dir])
def testCreateRelease(self):
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
# The version file on master has build level 5.
self.WriteFakeVersionFile(build=5)
commit_msg = """Version 3.22.5
[email protected]"""
def CheckVersionCommit():
commit = FileToText(TEST_CONFIG["COMMITMSG_FILE"])
self.assertEquals(commit_msg, commit)
version = FileToText(
os.path.join(TEST_CONFIG["DEFAULT_CWD"], VERSION_FILE))
self.assertTrue(re.search(r"#define V8_MINOR_VERSION\s+22", version))
self.assertTrue(re.search(r"#define V8_BUILD_NUMBER\s+5", version))
self.assertFalse(re.search(r"#define V8_BUILD_NUMBER\s+6", version))
self.assertTrue(re.search(r"#define V8_PATCH_LEVEL\s+0", version))
self.assertTrue(
re.search(r"#define V8_IS_CANDIDATE_VERSION\s+0", version))
expectations = [
Cmd("git fetch origin +refs/heads/*:refs/heads/*", ""),
Cmd("git checkout -f origin/master", "", cb=self.WriteFakeWatchlistsFile),
Cmd("git branch", ""),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git tag", self.TAGS),
Cmd("git checkout -f origin/master -- include/v8-version.h",
"", cb=self.WriteFakeVersionFile),
Cmd("git log -1 --format=%H 3.22.4", "release_hash\n"),
Cmd("git log -1 --format=%s release_hash", "Version 3.22.4\n"),
Cmd("git log -1 --format=%H release_hash^", "abc3\n"),
Cmd("git log --format=%H abc3..push_hash", "rev1\n"),
Cmd("git push origin push_hash:refs/heads/3.22.5", ""),
Cmd("git reset --hard origin/master", ""),
Cmd("git new-branch work-branch --upstream origin/3.22.5", ""),
Cmd("git checkout -f 3.22.4 -- include/v8-version.h", "",
cb=self.WriteFakeVersionFile),
Cmd("git commit -aF \"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], "",
cb=CheckVersionCommit),
Cmd("git cl upload --send-mail "
"-f --bypass-hooks --no-autocc --message-file "
"\"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], ""),
Cmd("git cl land --bypass-hooks -f", ""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep="
"\"Version 3.22.5\" origin/3.22.5", "hsh_to_tag"),
Cmd("git tag 3.22.5 hsh_to_tag", ""),
Cmd("git push origin refs/tags/3.22.5:refs/tags/3.22.5", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git branch", "* master\n work-branch\n"),
Cmd("git branch -D work-branch", ""),
Cmd("git gc", ""),
]
self.Expect(expectations)
args = ["-a", "[email protected]",
"-r", "[email protected]",
"--revision", "push_hash"]
CreateRelease(TEST_CONFIG, self).Run(args)
# Note: The version file is on build number 5 again in the end of this test
# since the git command that merges to master is mocked out.
# Check for correct content of the WATCHLISTS file
watchlists_content = FileToText(os.path.join(TEST_CONFIG["DEFAULT_CWD"],
WATCHLISTS_FILE))
expected_watchlists_content = """
'merges': [
# Only enabled on branches created with tools/release/create_release.py
'[email protected]',
],
"""
self.assertEqual(watchlists_content, expected_watchlists_content)
C_V8_22624_LOG = """V8 CL.
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22624 123
"""
C_V8_123455_LOG = """V8 CL.
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@123455 123
"""
C_V8_123456_LOG = """V8 CL.
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@123456 123
"""
ROLL_COMMIT_MSG = """Update V8 to version 3.22.4.
Summary of changes available at:
https://chromium.googlesource.com/v8/v8/+log/last_rol..roll_hsh
Please follow these instructions for assigning/CC'ing issues:
https://v8.dev/docs/triage-issues
Please close rolling in case of a roll revert:
https://v8-roll.appspot.com/
This only works with a Google account.
CQ_INCLUDE_TRYBOTS=luci.chromium.try:linux-blink-rel
CQ_INCLUDE_TRYBOTS=luci.chromium.try:linux_optional_gpu_tests_rel
CQ_INCLUDE_TRYBOTS=luci.chromium.try:mac_optional_gpu_tests_rel
CQ_INCLUDE_TRYBOTS=luci.chromium.try:win_optional_gpu_tests_rel
CQ_INCLUDE_TRYBOTS=luci.chromium.try:android_optional_gpu_tests_rel
[email protected]"""
# Snippet from the original DEPS file.
FAKE_DEPS = """
vars = {
"v8_revision": "last_roll_hsh",
}
deps = {
"src/v8":
(Var("googlecode_url") % "v8") + "/" + Var("v8_branch") + "@" +
Var("v8_revision"),
}
"""
def testChromiumRollUpToDate(self):
TEST_CONFIG["CHROMIUM"] = self.MakeEmptyTempDirectory()
json_output_file = os.path.join(TEST_CONFIG["CHROMIUM"], "out.json")
TextToFile(self.FAKE_DEPS, os.path.join(TEST_CONFIG["CHROMIUM"], "DEPS"))
chrome_dir = TEST_CONFIG["CHROMIUM"]
self.Expect([
Cmd("git fetch origin", ""),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("gclient getdep -r src/v8", "last_roll_hsh", cwd=chrome_dir),
Cmd("git describe --tags last_roll_hsh", "3.22.4"),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git rev-list --max-age=395200 --tags",
"bad_tag\nroll_hsh\nhash_123"),
Cmd("git describe --tags bad_tag", ""),
Cmd("git describe --tags roll_hsh", "3.22.4"),
Cmd("git describe --tags hash_123", "3.22.3"),
Cmd("git describe --tags roll_hsh", "3.22.4"),
Cmd("git describe --tags hash_123", "3.22.3"),
])
result = auto_roll.AutoRoll(TEST_CONFIG, self).Run(
AUTO_PUSH_ARGS + [
"-c", TEST_CONFIG["CHROMIUM"],
"--json-output", json_output_file])
self.assertEquals(0, result)
json_output = json.loads(FileToText(json_output_file))
self.assertEquals("up_to_date", json_output["monitoring_state"])
def testChromiumRoll(self):
# Setup fake directory structures.
TEST_CONFIG["CHROMIUM"] = self.MakeEmptyTempDirectory()
json_output_file = os.path.join(TEST_CONFIG["CHROMIUM"], "out.json")
TextToFile(self.FAKE_DEPS, os.path.join(TEST_CONFIG["CHROMIUM"], "DEPS"))
TextToFile("", os.path.join(TEST_CONFIG["CHROMIUM"], ".git"))
chrome_dir = TEST_CONFIG["CHROMIUM"]
os.makedirs(os.path.join(chrome_dir, "v8"))
def WriteDeps():
TextToFile("Some line\n \"v8_revision\": \"22624\",\n some line",
os.path.join(chrome_dir, "DEPS"))
expectations = [
Cmd("git fetch origin", ""),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("gclient getdep -r src/v8", "last_roll_hsh", cwd=chrome_dir),
Cmd("git describe --tags last_roll_hsh", "3.22.3.1"),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git rev-list --max-age=395200 --tags",
"bad_tag\nroll_hsh\nhash_123"),
Cmd("git describe --tags bad_tag", ""),
Cmd("git describe --tags roll_hsh", "3.22.4"),
Cmd("git describe --tags hash_123", "3.22.3"),
Cmd("git describe --tags roll_hsh", "3.22.4"),
Cmd("git log -1 --format=%s roll_hsh", "Version 3.22.4\n"),
Cmd("git describe --tags roll_hsh", "3.22.4"),
Cmd("git describe --tags last_roll_hsh", "3.22.2.1"),
Cmd("git status -s -uno", "", cwd=chrome_dir),
Cmd("git checkout -f main", "", cwd=chrome_dir),
Cmd("git branch", "", cwd=chrome_dir),
Cmd("git pull", "", cwd=chrome_dir),
Cmd("git fetch origin", ""),
Cmd("git new-branch work-branch", "", cwd=chrome_dir),
Cmd("gclient setdep -r src/v8@roll_hsh", "", cb=WriteDeps,
cwd=chrome_dir),
Cmd(("git commit -am \"%s\" "
"--author \"[email protected] <[email protected]>\"" %
self.ROLL_COMMIT_MSG),
"", cwd=chrome_dir),
Cmd("git cl upload --send-mail -f "
"--cq-dry-run --set-bot-commit --bypass-hooks", "",
cwd=chrome_dir),
Cmd("git checkout -f main", "", cwd=chrome_dir),
Cmd("git branch -D work-branch", "", cwd=chrome_dir),
]
self.Expect(expectations)
args = ["-a", "[email protected]", "-c", chrome_dir,
"-r", "[email protected]", "--json-output", json_output_file]
auto_roll.AutoRoll(TEST_CONFIG, self).Run(args)
deps = FileToText(os.path.join(chrome_dir, "DEPS"))
self.assertTrue(re.search("\"v8_revision\": \"22624\"", deps))
json_output = json.loads(FileToText(json_output_file))
self.assertEquals("success", json_output["monitoring_state"])
def testCheckLastPushRecently(self):
self.Expect([
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git tag", self.TAGS),
Cmd("git log -1 --format=%H 3.22.4", "release_hash\n"),
Cmd("git log -1 --format=%s release_hash",
"Version 3.22.4 (based on abc3)\n"),
Cmd("git log --format=%H abc3..abc123", "\n"),
])
self._state["candidate"] = "abc123"
self.assertEquals(0, self.RunStep(
auto_push.AutoPush, LastReleaseBailout, AUTO_PUSH_ARGS))
def testAutoPush(self):
self.Expect([
Cmd("git fetch", ""),
Cmd("git fetch origin +refs/heads/lkgr:refs/heads/lkgr", ""),
Cmd("git show-ref -s refs/heads/lkgr", "abc123\n"),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git tag", self.TAGS),
Cmd("git log -1 --format=%H 3.22.4", "release_hash\n"),
Cmd("git log -1 --format=%s release_hash",
"Version 3.22.4 (based on abc3)\n"),
Cmd("git log --format=%H abc3..abc123", "some_stuff\n"),
])
auto_push.AutoPush(TEST_CONFIG, self).Run(AUTO_PUSH_ARGS + ["--push"])
state = json.loads(FileToText("%s-state.json"
% TEST_CONFIG["PERSISTFILE_BASENAME"]))
self.assertEquals("abc123", state["candidate"])
def testRollMerge(self):
TEST_CONFIG["ALREADY_MERGING_SENTINEL_FILE"] = self.MakeEmptyTempFile()
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
self.WriteFakeVersionFile(build=5)
os.environ["EDITOR"] = "vi"
extra_patch = self.MakeEmptyTempFile()
def VerifyPatch(patch):
return lambda: self.assertEquals(patch,
FileToText(TEST_CONFIG["TEMPORARY_PATCH_FILE"]))
msg = """Version 3.22.5.1 (cherry-pick)
Merged ab12345
Merged ab23456
Merged ab34567
Merged ab45678
Merged ab56789
Title4
Title2
Title3
Title1
Revert "Something"
BUG=123,234,345,456,567,v8:123
"""
def VerifyLand():
commit = FileToText(TEST_CONFIG["COMMITMSG_FILE"])
self.assertEquals(msg, commit)
version = FileToText(
os.path.join(TEST_CONFIG["DEFAULT_CWD"], VERSION_FILE))
self.assertTrue(re.search(r"#define V8_MINOR_VERSION\s+22", version))
self.assertTrue(re.search(r"#define V8_BUILD_NUMBER\s+5", version))
self.assertTrue(re.search(r"#define V8_PATCH_LEVEL\s+1", version))
self.assertTrue(
re.search(r"#define V8_IS_CANDIDATE_VERSION\s+0", version))
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* branch2\n"),
Cmd("git new-branch %s --upstream refs/remotes/origin/candidates" %
TEST_CONFIG["BRANCHNAME"], ""),
Cmd(("git log --format=%H --grep=\"Port ab12345\" "
"--reverse origin/master"),
"ab45678\nab23456"),
Cmd("git log -1 --format=%s ab45678", "Title1"),
Cmd("git log -1 --format=%s ab23456", "Title2"),
Cmd(("git log --format=%H --grep=\"Port ab23456\" "
"--reverse origin/master"),
""),
Cmd(("git log --format=%H --grep=\"Port ab34567\" "
"--reverse origin/master"),
"ab56789"),
Cmd("git log -1 --format=%s ab56789", "Title3"),
RL("Y"), # Automatically add corresponding ports (ab34567, ab56789)?
# Simulate git being down which stops the script.
Cmd("git log -1 --format=%s ab12345", None),
# Restart script in the failing step.
Cmd("git log -1 --format=%s ab12345", "Title4"),
Cmd("git log -1 --format=%s ab23456", "Title2"),
Cmd("git log -1 --format=%s ab34567", "Title3"),
Cmd("git log -1 --format=%s ab45678", "Title1"),
Cmd("git log -1 --format=%s ab56789", "Revert \"Something\""),
Cmd("git log -1 ab12345", "Title4\nBUG=123\nBUG=234"),
Cmd("git log -1 ab23456", "Title2\n BUG = v8:123,345"),
Cmd("git log -1 ab34567", "Title3\nBUG=567, 456"),
Cmd("git log -1 ab45678", "Title1\nBUG="),
Cmd("git log -1 ab56789", "Revert \"Something\"\nBUG=none"),
Cmd("git log -1 -p ab12345", "patch4"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch4")),
Cmd("git log -1 -p ab23456", "patch2"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch2")),
Cmd("git log -1 -p ab34567", "patch3"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch3")),
Cmd("git log -1 -p ab45678", "patch1"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch1")),
Cmd("git log -1 -p ab56789", "patch5\n"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch5\n")),
Cmd("git apply --index --reject \"%s\"" % extra_patch, ""),
RL("Y"), # Automatically increment patch level?
Cmd("git commit -aF \"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], ""),
RL("[email protected]"), # V8 reviewer.
Cmd("git cl upload --send-mail -r \"[email protected]\" "
"--bypass-hooks --cc \"[email protected]\"", ""),
Cmd("git checkout -f %s" % TEST_CONFIG["BRANCHNAME"], ""),
RL("LGTM"), # Enter LGTM for V8 CL.
Cmd("git cl presubmit", "Presubmit successfull\n"),
Cmd("git cl land -f --bypass-hooks", "Closing issue\n",
cb=VerifyLand),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\""
"Version 3.22.5.1 (cherry-pick)"
"\" refs/remotes/origin/candidates",
""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\""
"Version 3.22.5.1 (cherry-pick)"
"\" refs/remotes/origin/candidates",
"hsh_to_tag"),
Cmd("git tag 3.22.5.1 hsh_to_tag", ""),
Cmd("git push origin refs/tags/3.22.5.1:refs/tags/3.22.5.1", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
])
# ab12345 and ab34567 are patches. ab23456 (included) and ab45678 are the
# MIPS ports of ab12345. ab56789 is the MIPS port of ab34567.
args = ["-f", "-p", extra_patch, "--branch", "candidates",
"ab12345", "ab23456", "ab34567"]
# The first run of the script stops because of git being down.
self.assertRaises(GitFailedException,
lambda: RollMerge(TEST_CONFIG, self).Run(args))
# Test that state recovery after restarting the script works.
args += ["-s", "4"]
RollMerge(TEST_CONFIG, self).Run(args)
def testMergeToBranch(self):
TEST_CONFIG["ALREADY_MERGING_SENTINEL_FILE"] = self.MakeEmptyTempFile()
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
self.WriteFakeVersionFile(build=5)
os.environ["EDITOR"] = "vi"
extra_patch = self.MakeEmptyTempFile()
def VerifyPatch(patch):
return lambda: self.assertEquals(patch,
FileToText(TEST_CONFIG["TEMPORARY_PATCH_FILE"]))
info_msg = ("NOTE: This script will no longer automatically "
"update include/v8-version.h "
"and create a tag. This is done automatically by the autotag bot. "
"Please call the merge_to_branch.py with --help for more information.")
msg = """Merged: Squashed multiple commits.
Merged: Title4
Revision: ab12345
Merged: Title2
Revision: ab23456
Merged: Title3
Revision: ab34567
Merged: Title1
Revision: ab45678
Merged: Revert \"Something\"
Revision: ab56789
BUG=123,234,345,456,567,v8:123
NOTRY=true
NOPRESUBMIT=true
NOTREECHECKS=true
"""
def VerifyLand():
commit = FileToText(TEST_CONFIG["COMMITMSG_FILE"])
self.assertEquals(msg, commit)
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* branch2\n"),
Cmd("git new-branch %s --upstream refs/remotes/origin/candidates" %
TEST_CONFIG["BRANCHNAME"], ""),
Cmd(("git log --format=%H --grep=\"^[Pp]ort ab12345\" "
"--reverse origin/master"),
"ab45678\nab23456"),
Cmd("git log -1 --format=%s ab45678", "Title1"),
Cmd("git log -1 --format=%s ab23456", "Title2"),
Cmd(("git log --format=%H --grep=\"^[Pp]ort ab23456\" "
"--reverse origin/master"),
""),
Cmd(("git log --format=%H --grep=\"^[Pp]ort ab34567\" "
"--reverse origin/master"),
"ab56789"),
Cmd("git log -1 --format=%s ab56789", "Title3"),
RL("Y"), # Automatically add corresponding ports (ab34567, ab56789)?
# Simulate git being down which stops the script.
Cmd("git log -1 --format=%s ab12345", None),
# Restart script in the failing step.
Cmd("git log -1 --format=%s ab12345", "Title4"),
Cmd("git log -1 --format=%s ab23456", "Title2"),
Cmd("git log -1 --format=%s ab34567", "Title3"),
Cmd("git log -1 --format=%s ab45678", "Title1"),
Cmd("git log -1 --format=%s ab56789", "Revert \"Something\""),
Cmd("git log -1 ab12345", "Title4\nBUG=123\nBUG=234"),
Cmd("git log -1 ab23456", "Title2\n BUG = v8:123,345"),
Cmd("git log -1 ab34567", "Title3\nBug: 567, 456,345"),
Cmd("git log -1 ab45678", "Title1\nBug:"),
Cmd("git log -1 ab56789", "Revert \"Something\"\nBUG=none"),
Cmd("git log -1 -p ab12345", "patch4"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch4")),
Cmd("git log -1 -p ab23456", "patch2"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch2")),
Cmd("git log -1 -p ab34567", "patch3"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch3")),
Cmd("git log -1 -p ab45678", "patch1"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch1")),
Cmd("git log -1 -p ab56789", "patch5\n"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch5\n")),
Cmd("git apply --index --reject \"%s\"" % extra_patch, ""),
Cmd("git commit -aF \"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], ""),
RL("[email protected]"), # V8 reviewer.
Cmd("git cl upload --send-mail -r \"[email protected]\" "
"--bypass-hooks --cc \"[email protected]\"", ""),
Cmd("git checkout -f %s" % TEST_CONFIG["BRANCHNAME"], ""),
RL("LGTM"), # Enter LGTM for V8 CL.
Cmd("git cl presubmit", "Presubmit successfull\n"),
Cmd("git cl land -f --bypass-hooks", "Closing issue\n",
cb=VerifyLand),
Cmd("git checkout -f origin/master", ""),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
])
# ab12345 and ab34567 are patches. ab23456 (included) and ab45678 are the
# MIPS ports of ab12345. ab56789 is the MIPS port of ab34567.
args = ["-f", "-p", extra_patch, "--branch", "candidates",
"ab12345", "ab23456", "ab34567"]
# The first run of the script stops because of git being down.
self.assertRaises(GitFailedException,
lambda: MergeToBranch(TEST_CONFIG, self).Run(args))
# Test that state recovery after restarting the script works.
args += ["-s", "4"]
MergeToBranch(TEST_CONFIG, self).Run(args)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
26b18e37eff8d9418bc37752e4f8fe2f947df0b1 | 07ec5a0b3ba5e70a9e0fb65172ea6b13ef4115b8 | /lib/python3.6/site-packages/tensorflow/contrib/predictor/contrib_estimator_predictor.py | afeb0cc8d0fc8739a534d1ebdf77758c20ae8948 | []
| no_license | cronos91/ML-exercise | 39c5cd7f94bb90c57450f9a85d40c2f014900ea4 | 3b7afeeb6a7c87384049a9b87cac1fe4c294e415 | refs/heads/master | 2021-05-09T22:02:55.131977 | 2017-12-14T13:50:44 | 2017-12-14T13:50:44 | 118,736,043 | 0 | 0 | null | 2018-01-24T08:30:23 | 2018-01-24T08:30:22 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:f616a461aa1df558fae47ff2bc5c7d16ceb00620172d2724deccf893b3da6f46
size 3152
| [
"[email protected]"
]
| |
b327b6904a68a6fac9133923566f52491e3e7255 | 96db160b6075e49101686eb4947fefb2e0909985 | /Store/views.py | 3cba59847230429be847c64618fcdb291698a251 | []
| no_license | hdforoozan/Restaurant | 7c43b1c89e8edc504a27dac2515313b979069c88 | d9420dc5dcd42bcb6c5952474ef996845ec4381c | refs/heads/master | 2022-12-09T13:38:57.970747 | 2019-09-29T20:45:10 | 2019-09-29T20:45:10 | 208,814,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,784 | py | from datetime import datetime
from django.shortcuts import render
from .models import Store, Employee, Manager
from Food.models import Food
from django.urls import reverse_lazy
from django.views.generic import TemplateView,DetailView,ListView, CreateView,DeleteView,UpdateView
from django.contrib.auth.mixins import LoginRequiredMixin
from Cart.forms import CartAddFoodForm
from Order.models import Order
from Comment.forms import CommentForm
from Comment.models import Comment
from Food.forms import SearchForm
class HomePageView(TemplateView):
template_name = 'home.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['most_sell_foods'] = Food.objects.filter(name__icontains='p')
context['cheapest_foods'] = Food.objects.filter(price__lte=10)
context['search_form'] = SearchForm()
return context
##############################################################
# Store Model Views
##############################################################
class StoreListView(LoginRequiredMixin, ListView):
model = Store
context_object_name = 'stores'
class StoreDetailView(LoginRequiredMixin, DetailView):
model = Store
context_object_name = 'store'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
store = Store.objects.get(id=self.kwargs['pk'])
context['foods'] = Food.objects.filter(stores=store).filter(run_out=False)
context['employees'] = Employee.objects.filter(store__id=self.kwargs['pk'])
paid_orders = Order.objects.filter(paid=True)
monthly_income = 0
for item in paid_orders:
if item.store_id == self.kwargs['pk']:
monthly_income += item.get_total_cost()
context['monthly_income'] = monthly_income
return context
class StoreCreateView(LoginRequiredMixin, CreateView):
model = Store
fields = ['user','manager','foods','branch_num','image','pub_date','address']
class StoreUpdateView(LoginRequiredMixin, UpdateView):
model = Store
fields = ['manager','foods','branch_num','image','address']
context_object_name = 'store'
template_name = 'Store/store_update_form.html'
class StoreDeleteView(LoginRequiredMixin, DeleteView):
model = Store
success_url = reverse_lazy('store-list')
context_object_name = 'store'
class StoreFoodDetailView(LoginRequiredMixin, DetailView):
model = Store
context_object_name = 'store'
template_name = 'Store/store_food_detail.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
store = Store.objects.get(id=self.kwargs['pk'])
food = Food.objects.filter(stores=store).get(id=self.kwargs['food_id'])
context['food'] = food
context['cart_food_form'] = CartAddFoodForm()
context['comment_form'] = CommentForm()
comments = Comment.objects.filter(food=food)[:5]
comment_times = []
now = datetime.now()
date_format = "%Y-%m-%d %H:%M:%S"
time1 = now.strftime("%Y-%m-%d %H:%M:%S")
time_now = datetime.strptime(time1,date_format)
for comment in comments:
time2 = comment.created.strftime("%Y-%m-%d %H:%M:%S")
time_2 = now.strptime(time2,date_format)
diff_time = time_now - time_2
if diff_time.days > 0:
weeks = int(diff_time.days / 7)
months = int(diff_time.days / 30)
if months > 0:
comment_times.append('{} months ago'.format(months))
else:
if weeks > 0:
comment_times.append('{} weeks ago'.format(weeks))
else:
comment_times.append('{} days ago'.format(diff_time.days))
else:
hours = int(diff_time.seconds / (3600))
if hours > 0:
comment_times.append('{} hours ago'.format(hours))
else:
minutes = int((diff_time.seconds % 3600) / 60)
if minutes > 0:
comment_times.append('{} minutes ago'.format(minutes))
else:
comment_times.append('just now')
food_comments = zip(comments,comment_times)
context['food_comments'] = food_comments
self.request.session['store_id'] = store.id
return context
##############################################################
# Manager Model Views
###############################################################
class ManagerDetailView(LoginRequiredMixin, DetailView):
model = Manager
context_object_name = 'manager'
class ManagerUpdateView(LoginRequiredMixin, UpdateView):
model = Manager
fields = ['name','address','phone_num','education_degree','image']
context_object_name = 'manager'
template_name = 'Store/manager_update_form.html'
class ManagerDeleteView(LoginRequiredMixin, DeleteView):
model = Manager
success_url = reverse_lazy('store-list')
context_object_name = 'manager'
##############################################################
# Employee Model Views
###############################################################
class EmployeeDetailView(LoginRequiredMixin, DetailView):
model = Employee
context_object_name = 'employee'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
store_employees = Employee.objects.filter(store_id=self.kwargs['pk'])
employee = Employee.objects.get(id=self.kwargs['employee_id'])
if employee in store_employees:
context['employee'] = employee
else:
context['employee'] = None
return context
class EmployeeCreateView(LoginRequiredMixin, CreateView):
model = Employee
fields = ['store','name','address','phone_num','pub_date','image','position','education_degree','monthly_salary']
class EmployeeUpdateView(LoginRequiredMixin, UpdateView):
model = Employee
fields = ['name','address','phone_num','image','education_degree','position']
context_object_name = 'employee'
template_name = 'Store/employee_update_form.html'
class EmployeeDeleteView(LoginRequiredMixin, DeleteView):
model = Employee
success_url = reverse_lazy('store-detail')
context_object_name = 'employee'
| [
"[email protected]"
]
| |
5654cf482eb177451e9980b172ae3959c3598848 | c4249ce9e7cb26ae006bc9951ea676ae2250777b | /gamslib/tsp42/tsp42-scalar.py | 42958fd8ad72080b79cab8da32eb1cf7165217d6 | []
| no_license | vaidasj/alg-mod-rev | 79de3ef1e110f4bd07cbdef6951de2e4216f47f1 | a3ec6b5c21700a2f28ac6bf7db6aa22540748c6e | refs/heads/master | 2021-06-27T14:06:39.997411 | 2020-10-19T15:47:54 | 2020-10-19T15:47:54 | 180,074,989 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 80,771 | py | # MIP written by GAMS Convert at 12/13/18 10:32:27
#
# Equation counts
# Total E G L N X C B
# 43 43 0 0 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 862 1 861 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 2584 2584 0 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.b2 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b3 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b4 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b5 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b6 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b7 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b8 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b9 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b10 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b11 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b12 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b13 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b14 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b15 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b16 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b17 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b18 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b19 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b20 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b21 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b22 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b23 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b24 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b25 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b26 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b27 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b28 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b29 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b30 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b31 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b32 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b33 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b34 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b35 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b36 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b37 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b38 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b39 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b40 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b41 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b42 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b43 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b44 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b45 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b46 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b47 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b48 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b49 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b50 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b51 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b52 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b53 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b54 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b55 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b56 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b57 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b58 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b59 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b60 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b61 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b62 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b63 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b64 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b65 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b66 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b67 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b68 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b69 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b70 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b71 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b72 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b73 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b74 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b75 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b76 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b77 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b78 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b79 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b80 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b81 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b82 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b83 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b84 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b85 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b86 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b87 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b88 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b89 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b90 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b91 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b92 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b93 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b94 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b95 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b96 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b97 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b98 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b99 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b100 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b101 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b102 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b103 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b104 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b105 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b106 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b107 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b108 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b109 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b110 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b111 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b112 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b113 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b114 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b115 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b116 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b117 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b118 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b119 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b120 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b121 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b122 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b123 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b124 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b125 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b126 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b127 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b128 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b129 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b130 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b131 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b132 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b133 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b134 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b135 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b136 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b137 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b138 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b139 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b140 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b141 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b142 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b143 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b144 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b145 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b146 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b147 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b148 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b149 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b150 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b151 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b152 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b153 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b154 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b155 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b156 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b157 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b158 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b159 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b160 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b161 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b162 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b163 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b164 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b165 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b166 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b167 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b168 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b169 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b170 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b171 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b172 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b173 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b174 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b175 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b176 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b177 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b178 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b179 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b180 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b181 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b182 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b183 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b184 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b185 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b186 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b187 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b188 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b189 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b190 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b191 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b192 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b193 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b194 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b195 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b196 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b197 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b198 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b199 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b200 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b201 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b202 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b203 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b204 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b205 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b206 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b207 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b208 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b209 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b210 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b211 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b212 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b213 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b214 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b215 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b216 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b217 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b218 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b219 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b220 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b221 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b222 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b223 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b224 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b225 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b226 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b227 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b228 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b229 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b230 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b231 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b232 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b233 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b234 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b235 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b236 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b237 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b238 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b239 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b240 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b241 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b242 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b243 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b244 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b245 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b246 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b247 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b248 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b249 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b250 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b251 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b252 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b253 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b254 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b255 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b256 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b257 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b258 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b259 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b260 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b261 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b262 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b263 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b264 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b265 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b266 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b267 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b268 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b269 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b270 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b271 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b272 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b273 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b274 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b275 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b276 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b277 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b278 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b279 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b280 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b281 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b282 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b283 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b284 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b285 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b286 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b287 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b288 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b289 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b290 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b291 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b292 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b293 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b294 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b295 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b296 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b297 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b298 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b299 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b300 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b301 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b302 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b303 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b304 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b305 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b306 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b307 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b308 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b309 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b310 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b311 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b312 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b313 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b314 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b315 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b316 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b317 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b318 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b319 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b320 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b321 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b322 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b323 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b324 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b325 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b326 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b327 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b328 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b329 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b330 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b331 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b332 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b333 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b334 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b335 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b336 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b337 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b338 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b339 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b340 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b341 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b342 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b343 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b344 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b345 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b346 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b347 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b348 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b349 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b350 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b351 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b352 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b353 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b354 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b355 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b356 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b357 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b358 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b359 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b360 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b361 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b362 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b363 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b364 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b365 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b366 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b367 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b368 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b369 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b370 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b371 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b372 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b373 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b374 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b375 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b376 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b377 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b378 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b379 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b380 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b381 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b382 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b383 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b384 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b385 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b386 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b387 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b388 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b389 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b390 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b391 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b392 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b393 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b394 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b395 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b396 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b397 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b398 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b399 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b400 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b401 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b402 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b403 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b404 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b405 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b406 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b407 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b408 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b409 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b410 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b411 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b412 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b413 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b414 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b415 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b416 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b417 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b418 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b419 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b420 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b421 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b422 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b423 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b424 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b425 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b426 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b427 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b428 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b429 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b430 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b431 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b432 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b433 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b434 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b435 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b436 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b437 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b438 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b439 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b440 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b441 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b442 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b443 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b444 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b445 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b446 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b447 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b448 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b449 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b450 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b451 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b452 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b453 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b454 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b455 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b456 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b457 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b458 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b459 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b460 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b461 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b462 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b463 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b464 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b465 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b466 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b467 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b468 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b469 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b470 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b471 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b472 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b473 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b474 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b475 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b476 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b477 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b478 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b479 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b480 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b481 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b482 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b483 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b484 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b485 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b486 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b487 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b488 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b489 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b490 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b491 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b492 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b493 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b494 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b495 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b496 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b497 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b498 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b499 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b500 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b501 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b502 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b503 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b504 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b505 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b506 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b507 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b508 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b509 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b510 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b511 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b512 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b513 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b514 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b515 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b516 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b517 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b518 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b519 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b520 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b521 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b522 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b523 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b524 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b525 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b526 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b527 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b528 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b529 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b530 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b531 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b532 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b533 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b534 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b535 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b536 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b537 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b538 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b539 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b540 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b541 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b542 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b543 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b544 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b545 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b546 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b547 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b548 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b549 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b550 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b551 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b552 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b553 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b554 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b555 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b556 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b557 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b558 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b559 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b560 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b561 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b562 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b563 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b564 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b565 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b566 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b567 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b568 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b569 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b570 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b571 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b572 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b573 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b574 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b575 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b576 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b577 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b578 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b579 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b580 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b581 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b582 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b583 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b584 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b585 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b586 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b587 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b588 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b589 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b590 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b591 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b592 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b593 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b594 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b595 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b596 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b597 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b598 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b599 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b600 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b601 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b602 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b603 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b604 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b605 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b606 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b607 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b608 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b609 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b610 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b611 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b612 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b613 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b614 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b615 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b616 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b617 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b618 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b619 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b620 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b621 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b622 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b623 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b624 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b625 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b626 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b627 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b628 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b629 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b630 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b631 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b632 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b633 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b634 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b635 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b636 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b637 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b638 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b639 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b640 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b641 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b642 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b643 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b644 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b645 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b646 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b647 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b648 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b649 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b650 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b651 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b652 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b653 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b654 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b655 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b656 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b657 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b658 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b659 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b660 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b661 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b662 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b663 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b664 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b665 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b666 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b667 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b668 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b669 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b670 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b671 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b672 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b673 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b674 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b675 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b676 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b677 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b678 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b679 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b680 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b681 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b682 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b683 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b684 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b685 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b686 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b687 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b688 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b689 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b690 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b691 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b692 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b693 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b694 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b695 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b696 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b697 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b698 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b699 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b700 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b701 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b702 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b703 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b704 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b705 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b706 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b707 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b708 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b709 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b710 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b711 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b712 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b713 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b714 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b715 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b716 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b717 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b718 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b719 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b720 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b721 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b722 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b723 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b724 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b725 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b726 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b727 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b728 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b729 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b730 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b731 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b732 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b733 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b734 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b735 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b736 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b737 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b738 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b739 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b740 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b741 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b742 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b743 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b744 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b745 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b746 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b747 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b748 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b749 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b750 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b751 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b752 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b753 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b754 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b755 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b756 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b757 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b758 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b759 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b760 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b761 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b762 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b763 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b764 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b765 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b766 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b767 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b768 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b769 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b770 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b771 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b772 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b773 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b774 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b775 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b776 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b777 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b778 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b779 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b780 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b781 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b782 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b783 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b784 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b785 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b786 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b787 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b788 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b789 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b790 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b791 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b792 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b793 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b794 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b795 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b796 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b797 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b798 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b799 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b800 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b801 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b802 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b803 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b804 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b805 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b806 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b807 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b808 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b809 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b810 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b811 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b812 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b813 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b814 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b815 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b816 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b817 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b818 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b819 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b820 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b821 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b822 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b823 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b824 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b825 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b826 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b827 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b828 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b829 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b830 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b831 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b832 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b833 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b834 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b835 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b836 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b837 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b838 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b839 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b840 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b841 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b842 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b843 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b844 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b845 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b846 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b847 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b848 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b849 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b850 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b851 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b852 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b853 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b854 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b855 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b856 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b857 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b858 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b859 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b860 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b861 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b862 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr= 8*m.b2 + 39*m.b3 + 45*m.b4 + 37*m.b5 + 47*m.b6 + 9*m.b7 + 50*m.b8 + 49*m.b9 + 21*m.b10
+ 15*m.b11 + 61*m.b12 + 62*m.b13 + 21*m.b14 + 20*m.b15 + 17*m.b16 + 58*m.b17 + 60*m.b18
+ 16*m.b19 + 17*m.b20 + 18*m.b21 + 6*m.b22 + 59*m.b23 + 60*m.b24 + 15*m.b25 + 20*m.b26
+ 26*m.b27 + 17*m.b28 + 10*m.b29 + 62*m.b30 + 66*m.b31 + 20*m.b32 + 25*m.b33 + 31*m.b34
+ 22*m.b35 + 15*m.b36 + 5*m.b37 + 81*m.b38 + 81*m.b39 + 40*m.b40 + 44*m.b41 + 50*m.b42
+ 41*m.b43 + 35*m.b44 + 24*m.b45 + 20*m.b46 + 103*m.b47 + 107*m.b48 + 62*m.b49 + 67*m.b50
+ 72*m.b51 + 63*m.b52 + 57*m.b53 + 46*m.b54 + 41*m.b55 + 23*m.b56 + 108*m.b57 + 117*m.b58
+ 66*m.b59 + 71*m.b60 + 77*m.b61 + 68*m.b62 + 61*m.b63 + 51*m.b64 + 46*m.b65 + 26*m.b66
+ 11*m.b67 + 145*m.b68 + 149*m.b69 + 104*m.b70 + 108*m.b71 + 114*m.b72 + 106*m.b73 + 99*m.b74
+ 88*m.b75 + 84*m.b76 + 63*m.b77 + 49*m.b78 + 40*m.b79 + 181*m.b80 + 185*m.b81 + 140*m.b82
+ 144*m.b83 + 150*m.b84 + 142*m.b85 + 135*m.b86 + 124*m.b87 + 120*m.b88 + 99*m.b89 + 85*m.b90
+ 76*m.b91 + 35*m.b92 + 187*m.b93 + 191*m.b94 + 146*m.b95 + 150*m.b96 + 156*m.b97 + 142*m.b98
+ 137*m.b99 + 130*m.b100 + 125*m.b101 + 105*m.b102 + 90*m.b103 + 81*m.b104 + 41*m.b105
+ 10*m.b106 + 161*m.b107 + 170*m.b108 + 120*m.b109 + 124*m.b110 + 130*m.b111 + 115*m.b112
+ 110*m.b113 + 104*m.b114 + 105*m.b115 + 90*m.b116 + 72*m.b117 + 62*m.b118 + 34*m.b119
+ 31*m.b120 + 27*m.b121 + 142*m.b122 + 146*m.b123 + 101*m.b124 + 104*m.b125 + 111*m.b126
+ 97*m.b127 + 91*m.b128 + 85*m.b129 + 86*m.b130 + 75*m.b131 + 51*m.b132 + 59*m.b133 + 29*m.b134
+ 53*m.b135 + 48*m.b136 + 21*m.b137 + 174*m.b138 + 178*m.b139 + 133*m.b140 + 138*m.b141
+ 143*m.b142 + 129*m.b143 + 123*m.b144 + 117*m.b145 + 118*m.b146 + 107*m.b147 + 83*m.b148
+ 84*m.b149 + 54*m.b150 + 46*m.b151 + 35*m.b152 + 26*m.b153 + 31*m.b154 + 185*m.b155
+ 186*m.b156 + 142*m.b157 + 143*m.b158 + 140*m.b159 + 130*m.b160 + 126*m.b161 + 124*m.b162
+ 128*m.b163 + 118*m.b164 + 93*m.b165 + 101*m.b166 + 72*m.b167 + 69*m.b168 + 58*m.b169
+ 58*m.b170 + 43*m.b171 + 26*m.b172 + 164*m.b173 + 165*m.b174 + 120*m.b175 + 123*m.b176
+ 124*m.b177 + 106*m.b178 + 106*m.b179 + 105*m.b180 + 110*m.b181 + 104*m.b182 + 86*m.b183
+ 97*m.b184 + 71*m.b185 + 93*m.b186 + 82*m.b187 + 62*m.b188 + 42*m.b189 + 45*m.b190 + 22*m.b191
+ 137*m.b192 + 139*m.b193 + 94*m.b194 + 96*m.b195 + 94*m.b196 + 80*m.b197 + 78*m.b198
+ 77*m.b199 + 84*m.b200 + 77*m.b201 + 56*m.b202 + 64*m.b203 + 65*m.b204 + 90*m.b205 + 87*m.b206
+ 58*m.b207 + 36*m.b208 + 68*m.b209 + 50*m.b210 + 30*m.b211 + 117*m.b212 + 122*m.b213
+ 77*m.b214 + 80*m.b215 + 83*m.b216 + 68*m.b217 + 62*m.b218 + 60*m.b219 + 61*m.b220 + 50*m.b221
+ 34*m.b222 + 42*m.b223 + 49*m.b224 + 82*m.b225 + 77*m.b226 + 60*m.b227 + 30*m.b228 + 62*m.b229
+ 70*m.b230 + 49*m.b231 + 21*m.b232 + 114*m.b233 + 118*m.b234 + 73*m.b235 + 78*m.b236
+ 84*m.b237 + 69*m.b238 + 63*m.b239 + 57*m.b240 + 59*m.b241 + 48*m.b242 + 28*m.b243 + 36*m.b244
+ 43*m.b245 + 77*m.b246 + 72*m.b247 + 45*m.b248 + 27*m.b249 + 59*m.b250 + 69*m.b251 + 55*m.b252
+ 27*m.b253 + 5*m.b254 + 85*m.b255 + 89*m.b256 + 44*m.b257 + 48*m.b258 + 53*m.b259 + 41*m.b260
+ 34*m.b261 + 28*m.b262 + 29*m.b263 + 22*m.b264 + 23*m.b265 + 35*m.b266 + 69*m.b267 + 105*m.b268
+ 102*m.b269 + 74*m.b270 + 56*m.b271 + 88*m.b272 + 99*m.b273 + 81*m.b274 + 54*m.b275 + 32*m.b276
+ 29*m.b277 + 77*m.b278 + 80*m.b279 + 36*m.b280 + 40*m.b281 + 46*m.b282 + 34*m.b283 + 27*m.b284
+ 19*m.b285 + 21*m.b286 + 14*m.b287 + 29*m.b288 + 40*m.b289 + 77*m.b290 + 114*m.b291
+ 111*m.b292 + 84*m.b293 + 64*m.b294 + 96*m.b295 + 107*m.b296 + 87*m.b297 + 60*m.b298
+ 40*m.b299 + 37*m.b300 + 8*m.b301 + 87*m.b302 + 89*m.b303 + 44*m.b304 + 46*m.b305 + 46*m.b306
+ 30*m.b307 + 28*m.b308 + 29*m.b309 + 32*m.b310 + 27*m.b311 + 36*m.b312 + 47*m.b313 + 78*m.b314
+ 116*m.b315 + 112*m.b316 + 84*m.b317 + 66*m.b318 + 98*m.b319 + 95*m.b320 + 75*m.b321
+ 47*m.b322 + 36*m.b323 + 39*m.b324 + 12*m.b325 + 11*m.b326 + 91*m.b327 + 93*m.b328 + 48*m.b329
+ 50*m.b330 + 48*m.b331 + 34*m.b332 + 32*m.b333 + 33*m.b334 + 36*m.b335 + 30*m.b336 + 34*m.b337
+ 45*m.b338 + 77*m.b339 + 115*m.b340 + 110*m.b341 + 83*m.b342 + 63*m.b343 + 97*m.b344
+ 91*m.b345 + 72*m.b346 + 44*m.b347 + 32*m.b348 + 36*m.b349 + 9*m.b350 + 15*m.b351 + 3*m.b352
+ 105*m.b353 + 106*m.b354 + 62*m.b355 + 63*m.b356 + 64*m.b357 + 47*m.b358 + 46*m.b359
+ 49*m.b360 + 54*m.b361 + 48*m.b362 + 46*m.b363 + 59*m.b364 + 85*m.b365 + 119*m.b366
+ 115*m.b367 + 88*m.b368 + 66*m.b369 + 98*m.b370 + 79*m.b371 + 59*m.b372 + 31*m.b373 + 36*m.b374
+ 42*m.b375 + 28*m.b376 + 33*m.b377 + 21*m.b378 + 20*m.b379 + 111*m.b380 + 113*m.b381
+ 69*m.b382 + 71*m.b383 + 66*m.b384 + 51*m.b385 + 53*m.b386 + 56*m.b387 + 61*m.b388 + 57*m.b389
+ 59*m.b390 + 71*m.b391 + 96*m.b392 + 130*m.b393 + 126*m.b394 + 98*m.b395 + 75*m.b396
+ 98*m.b397 + 85*m.b398 + 62*m.b399 + 38*m.b400 + 47*m.b401 + 53*m.b402 + 39*m.b403 + 42*m.b404
+ 29*m.b405 + 30*m.b406 + 12*m.b407 + 91*m.b408 + 92*m.b409 + 50*m.b410 + 51*m.b411 + 46*m.b412
+ 30*m.b413 + 34*m.b414 + 38*m.b415 + 43*m.b416 + 49*m.b417 + 60*m.b418 + 71*m.b419 + 103*m.b420
+ 141*m.b421 + 136*m.b422 + 109*m.b423 + 90*m.b424 + 115*m.b425 + 99*m.b426 + 81*m.b427
+ 53*m.b428 + 61*m.b429 + 62*m.b430 + 36*m.b431 + 34*m.b432 + 24*m.b433 + 28*m.b434 + 20*m.b435
+ 20*m.b436 + 83*m.b437 + 85*m.b438 + 42*m.b439 + 43*m.b440 + 38*m.b441 + 22*m.b442 + 26*m.b443
+ 32*m.b444 + 36*m.b445 + 51*m.b446 + 63*m.b447 + 75*m.b448 + 106*m.b449 + 142*m.b450
+ 140*m.b451 + 112*m.b452 + 93*m.b453 + 126*m.b454 + 108*m.b455 + 88*m.b456 + 60*m.b457
+ 64*m.b458 + 66*m.b459 + 39*m.b460 + 36*m.b461 + 27*m.b462 + 31*m.b463 + 28*m.b464 + 28*m.b465
+ 8*m.b466 + 89*m.b467 + 91*m.b468 + 55*m.b469 + 55*m.b470 + 50*m.b471 + 34*m.b472 + 39*m.b473
+ 44*m.b474 + 49*m.b475 + 63*m.b476 + 76*m.b477 + 87*m.b478 + 120*m.b479 + 155*m.b480
+ 150*m.b481 + 123*m.b482 + 100*m.b483 + 123*m.b484 + 109*m.b485 + 86*m.b486 + 62*m.b487
+ 71*m.b488 + 78*m.b489 + 52*m.b490 + 49*m.b491 + 39*m.b492 + 44*m.b493 + 35*m.b494 + 24*m.b495
+ 15*m.b496 + 12*m.b497 + 95*m.b498 + 97*m.b499 + 64*m.b500 + 63*m.b501 + 56*m.b502 + 42*m.b503
+ 49*m.b504 + 56*m.b505 + 60*m.b506 + 75*m.b507 + 86*m.b508 + 97*m.b509 + 126*m.b510
+ 160*m.b511 + 155*m.b512 + 128*m.b513 + 104*m.b514 + 128*m.b515 + 113*m.b516 + 90*m.b517
+ 67*m.b518 + 76*m.b519 + 82*m.b520 + 62*m.b521 + 59*m.b522 + 49*m.b523 + 53*m.b524 + 40*m.b525
+ 29*m.b526 + 25*m.b527 + 23*m.b528 + 11*m.b529 + 74*m.b530 + 81*m.b531 + 44*m.b532 + 43*m.b533
+ 35*m.b534 + 23*m.b535 + 30*m.b536 + 39*m.b537 + 44*m.b538 + 62*m.b539 + 78*m.b540 + 89*m.b541
+ 121*m.b542 + 159*m.b543 + 155*m.b544 + 127*m.b545 + 108*m.b546 + 136*m.b547 + 124*m.b548
+ 101*m.b549 + 75*m.b550 + 79*m.b551 + 81*m.b552 + 54*m.b553 + 50*m.b554 + 42*m.b555 + 46*m.b556
+ 43*m.b557 + 39*m.b558 + 23*m.b559 + 14*m.b560 + 14*m.b561 + 21*m.b562 + 67*m.b563 + 69*m.b564
+ 42*m.b565 + 41*m.b566 + 31*m.b567 + 25*m.b568 + 32*m.b569 + 41*m.b570 + 46*m.b571 + 64*m.b572
+ 83*m.b573 + 90*m.b574 + 130*m.b575 + 164*m.b576 + 160*m.b577 + 133*m.b578 + 114*m.b579
+ 146*m.b580 + 134*m.b581 + 111*m.b582 + 85*m.b583 + 84*m.b584 + 86*m.b585 + 59*m.b586
+ 52*m.b587 + 47*m.b588 + 51*m.b589 + 53*m.b590 + 49*m.b591 + 32*m.b592 + 24*m.b593 + 24*m.b594
+ 30*m.b595 + 9*m.b596 + 74*m.b597 + 76*m.b598 + 61*m.b599 + 60*m.b600 + 42*m.b601 + 44*m.b602
+ 51*m.b603 + 60*m.b604 + 66*m.b605 + 83*m.b606 + 102*m.b607 + 110*m.b608 + 147*m.b609
+ 185*m.b610 + 179*m.b611 + 155*m.b612 + 133*m.b613 + 159*m.b614 + 146*m.b615 + 122*m.b616
+ 98*m.b617 + 105*m.b618 + 107*m.b619 + 79*m.b620 + 71*m.b621 + 66*m.b622 + 70*m.b623
+ 70*m.b624 + 60*m.b625 + 48*m.b626 + 40*m.b627 + 36*m.b628 + 33*m.b629 + 25*m.b630 + 18*m.b631
+ 57*m.b632 + 59*m.b633 + 46*m.b634 + 41*m.b635 + 25*m.b636 + 30*m.b637 + 36*m.b638 + 47*m.b639
+ 52*m.b640 + 71*m.b641 + 93*m.b642 + 98*m.b643 + 136*m.b644 + 172*m.b645 + 172*m.b646
+ 148*m.b647 + 126*m.b648 + 158*m.b649 + 147*m.b650 + 124*m.b651 + 121*m.b652 + 97*m.b653
+ 99*m.b654 + 71*m.b655 + 65*m.b656 + 59*m.b657 + 63*m.b658 + 67*m.b659 + 62*m.b660 + 46*m.b661
+ 38*m.b662 + 37*m.b663 + 43*m.b664 + 23*m.b665 + 13*m.b666 + 17*m.b667 + 45*m.b668 + 46*m.b669
+ 41*m.b670 + 34*m.b671 + 20*m.b672 + 34*m.b673 + 38*m.b674 + 48*m.b675 + 53*m.b676 + 73*m.b677
+ 96*m.b678 + 99*m.b679 + 137*m.b680 + 176*m.b681 + 178*m.b682 + 151*m.b683 + 131*m.b684
+ 163*m.b685 + 159*m.b686 + 135*m.b687 + 108*m.b688 + 102*m.b689 + 103*m.b690 + 73*m.b691
+ 67*m.b692 + 64*m.b693 + 69*m.b694 + 75*m.b695 + 72*m.b696 + 54*m.b697 + 46*m.b698 + 49*m.b699
+ 54*m.b700 + 34*m.b701 + 24*m.b702 + 29*m.b703 + 12*m.b704 + 35*m.b705 + 37*m.b706 + 35*m.b707
+ 26*m.b708 + 18*m.b709 + 34*m.b710 + 36*m.b711 + 46*m.b712 + 51*m.b713 + 70*m.b714 + 93*m.b715
+ 97*m.b716 + 134*m.b717 + 171*m.b718 + 176*m.b719 + 151*m.b720 + 129*m.b721 + 161*m.b722
+ 163*m.b723 + 139*m.b724 + 118*m.b725 + 102*m.b726 + 101*m.b727 + 71*m.b728 + 65*m.b729
+ 65*m.b730 + 70*m.b731 + 84*m.b732 + 78*m.b733 + 58*m.b734 + 50*m.b735 + 56*m.b736 + 62*m.b737
+ 41*m.b738 + 32*m.b739 + 38*m.b740 + 21*m.b741 + 9*m.b742 + 29*m.b743 + 33*m.b744 + 30*m.b745
+ 21*m.b746 + 18*m.b747 + 35*m.b748 + 33*m.b749 + 40*m.b750 + 45*m.b751 + 65*m.b752 + 87*m.b753
+ 91*m.b754 + 117*m.b755 + 166*m.b756 + 171*m.b757 + 144*m.b758 + 125*m.b759 + 157*m.b760
+ 156*m.b761 + 139*m.b762 + 113*m.b763 + 95*m.b764 + 97*m.b765 + 67*m.b766 + 60*m.b767
+ 62*m.b768 + 67*m.b769 + 79*m.b770 + 82*m.b771 + 62*m.b772 + 53*m.b773 + 59*m.b774 + 66*m.b775
+ 45*m.b776 + 38*m.b777 + 45*m.b778 + 27*m.b779 + 15*m.b780 + 6*m.b781 + 3*m.b782 + 11*m.b783
+ 41*m.b784 + 37*m.b785 + 47*m.b786 + 57*m.b787 + 55*m.b788 + 58*m.b789 + 63*m.b790 + 83*m.b791
+ 105*m.b792 + 109*m.b793 + 147*m.b794 + 186*m.b795 + 188*m.b796 + 164*m.b797 + 144*m.b798
+ 176*m.b799 + 182*m.b800 + 161*m.b801 + 134*m.b802 + 119*m.b803 + 116*m.b804 + 86*m.b805
+ 78*m.b806 + 84*m.b807 + 88*m.b808 + 101*m.b809 + 108*m.b810 + 88*m.b811 + 80*m.b812
+ 86*m.b813 + 92*m.b814 + 71*m.b815 + 64*m.b816 + 71*m.b817 + 54*m.b818 + 41*m.b819 + 32*m.b820
+ 25*m.b821 + 5*m.b822 + 12*m.b823 + 55*m.b824 + 41*m.b825 + 53*m.b826 + 64*m.b827 + 61*m.b828
+ 61*m.b829 + 66*m.b830 + 84*m.b831 + 111*m.b832 + 113*m.b833 + 150*m.b834 + 186*m.b835
+ 192*m.b836 + 166*m.b837 + 147*m.b838 + 180*m.b839 + 188*m.b840 + 167*m.b841 + 140*m.b842
+ 124*m.b843 + 119*m.b844 + 90*m.b845 + 87*m.b846 + 90*m.b847 + 94*m.b848 + 107*m.b849
+ 114*m.b850 + 77*m.b851 + 86*m.b852 + 92*m.b853 + 98*m.b854 + 80*m.b855 + 74*m.b856 + 77*m.b857
+ 60*m.b858 + 48*m.b859 + 38*m.b860 + 32*m.b861 + 6*m.b862, sense=minimize)
m.c2 = Constraint(expr= m.b2 + m.b3 + m.b5 + m.b8 + m.b12 + m.b17 + m.b23 + m.b30 + m.b38 + m.b47 + m.b57 + m.b68
+ m.b80 + m.b93 + m.b107 + m.b122 + m.b138 + m.b155 + m.b173 + m.b192 + m.b212 + m.b233 + m.b255
+ m.b278 + m.b302 + m.b327 + m.b353 + m.b380 + m.b408 + m.b437 + m.b467 + m.b498 + m.b530
+ m.b563 + m.b597 + m.b632 + m.b668 + m.b705 + m.b743 + m.b782 + m.b822 == 2)
m.c3 = Constraint(expr= m.b2 + m.b4 + m.b6 + m.b9 + m.b13 + m.b18 + m.b24 + m.b31 + m.b39 + m.b48 + m.b58 + m.b69
+ m.b81 + m.b94 + m.b108 + m.b123 + m.b139 + m.b156 + m.b174 + m.b193 + m.b213 + m.b234 + m.b256
+ m.b279 + m.b303 + m.b328 + m.b354 + m.b381 + m.b409 + m.b438 + m.b468 + m.b499 + m.b531
+ m.b564 + m.b598 + m.b633 + m.b669 + m.b706 + m.b744 + m.b783 + m.b823 == 2)
m.c4 = Constraint(expr= m.b3 + m.b4 + m.b7 + m.b10 + m.b14 + m.b19 + m.b25 + m.b32 + m.b40 + m.b49 + m.b59 + m.b70
+ m.b82 + m.b95 + m.b109 + m.b124 + m.b140 + m.b157 + m.b175 + m.b194 + m.b214 + m.b235 + m.b257
+ m.b280 + m.b304 + m.b329 + m.b355 + m.b382 + m.b410 + m.b439 + m.b469 + m.b500 + m.b532
+ m.b565 + m.b599 + m.b634 + m.b670 + m.b707 + m.b745 + m.b784 + m.b824 == 2)
m.c5 = Constraint(expr= m.b5 + m.b6 + m.b7 + m.b11 + m.b15 + m.b20 + m.b26 + m.b33 + m.b41 + m.b50 + m.b60 + m.b71
+ m.b83 + m.b96 + m.b110 + m.b125 + m.b141 + m.b158 + m.b176 + m.b195 + m.b215 + m.b236 + m.b258
+ m.b281 + m.b305 + m.b330 + m.b356 + m.b383 + m.b411 + m.b440 + m.b470 + m.b501 + m.b533
+ m.b566 + m.b600 + m.b635 + m.b671 + m.b708 + m.b746 + m.b785 + m.b825 == 2)
m.c6 = Constraint(expr= m.b8 + m.b9 + m.b10 + m.b11 + m.b16 + m.b21 + m.b27 + m.b34 + m.b42 + m.b51 + m.b61 + m.b72
+ m.b84 + m.b97 + m.b111 + m.b126 + m.b142 + m.b159 + m.b177 + m.b196 + m.b216 + m.b237 + m.b259
+ m.b282 + m.b306 + m.b331 + m.b357 + m.b384 + m.b412 + m.b441 + m.b471 + m.b502 + m.b534
+ m.b567 + m.b601 + m.b636 + m.b672 + m.b709 + m.b747 + m.b786 + m.b826 == 2)
m.c7 = Constraint(expr= m.b12 + m.b13 + m.b14 + m.b15 + m.b16 + m.b22 + m.b28 + m.b35 + m.b43 + m.b52 + m.b62 + m.b73
+ m.b85 + m.b98 + m.b112 + m.b127 + m.b143 + m.b160 + m.b178 + m.b197 + m.b217 + m.b238 + m.b260
+ m.b283 + m.b307 + m.b332 + m.b358 + m.b385 + m.b413 + m.b442 + m.b472 + m.b503 + m.b535
+ m.b568 + m.b602 + m.b637 + m.b673 + m.b710 + m.b748 + m.b787 + m.b827 == 2)
m.c8 = Constraint(expr= m.b17 + m.b18 + m.b19 + m.b20 + m.b21 + m.b22 + m.b29 + m.b36 + m.b44 + m.b53 + m.b63 + m.b74
+ m.b86 + m.b99 + m.b113 + m.b128 + m.b144 + m.b161 + m.b179 + m.b198 + m.b218 + m.b239 + m.b261
+ m.b284 + m.b308 + m.b333 + m.b359 + m.b386 + m.b414 + m.b443 + m.b473 + m.b504 + m.b536
+ m.b569 + m.b603 + m.b638 + m.b674 + m.b711 + m.b749 + m.b788 + m.b828 == 2)
m.c9 = Constraint(expr= m.b23 + m.b24 + m.b25 + m.b26 + m.b27 + m.b28 + m.b29 + m.b37 + m.b45 + m.b54 + m.b64 + m.b75
+ m.b87 + m.b100 + m.b114 + m.b129 + m.b145 + m.b162 + m.b180 + m.b199 + m.b219 + m.b240
+ m.b262 + m.b285 + m.b309 + m.b334 + m.b360 + m.b387 + m.b415 + m.b444 + m.b474 + m.b505
+ m.b537 + m.b570 + m.b604 + m.b639 + m.b675 + m.b712 + m.b750 + m.b789 + m.b829 == 2)
m.c10 = Constraint(expr= m.b30 + m.b31 + m.b32 + m.b33 + m.b34 + m.b35 + m.b36 + m.b37 + m.b46 + m.b55 + m.b65 + m.b76
+ m.b88 + m.b101 + m.b115 + m.b130 + m.b146 + m.b163 + m.b181 + m.b200 + m.b220 + m.b241
+ m.b263 + m.b286 + m.b310 + m.b335 + m.b361 + m.b388 + m.b416 + m.b445 + m.b475 + m.b506
+ m.b538 + m.b571 + m.b605 + m.b640 + m.b676 + m.b713 + m.b751 + m.b790 + m.b830 == 2)
m.c11 = Constraint(expr= m.b38 + m.b39 + m.b40 + m.b41 + m.b42 + m.b43 + m.b44 + m.b45 + m.b46 + m.b56 + m.b66 + m.b77
+ m.b89 + m.b102 + m.b116 + m.b131 + m.b147 + m.b164 + m.b182 + m.b201 + m.b221 + m.b242
+ m.b264 + m.b287 + m.b311 + m.b336 + m.b362 + m.b389 + m.b417 + m.b446 + m.b476 + m.b507
+ m.b539 + m.b572 + m.b606 + m.b641 + m.b677 + m.b714 + m.b752 + m.b791 + m.b831 == 2)
m.c12 = Constraint(expr= m.b47 + m.b48 + m.b49 + m.b50 + m.b51 + m.b52 + m.b53 + m.b54 + m.b55 + m.b56 + m.b67 + m.b78
+ m.b90 + m.b103 + m.b117 + m.b132 + m.b148 + m.b165 + m.b183 + m.b202 + m.b222 + m.b243
+ m.b265 + m.b288 + m.b312 + m.b337 + m.b363 + m.b390 + m.b418 + m.b447 + m.b477 + m.b508
+ m.b540 + m.b573 + m.b607 + m.b642 + m.b678 + m.b715 + m.b753 + m.b792 + m.b832 == 2)
m.c13 = Constraint(expr= m.b57 + m.b58 + m.b59 + m.b60 + m.b61 + m.b62 + m.b63 + m.b64 + m.b65 + m.b66 + m.b67 + m.b79
+ m.b91 + m.b104 + m.b118 + m.b133 + m.b149 + m.b166 + m.b184 + m.b203 + m.b223 + m.b244
+ m.b266 + m.b289 + m.b313 + m.b338 + m.b364 + m.b391 + m.b419 + m.b448 + m.b478 + m.b509
+ m.b541 + m.b574 + m.b608 + m.b643 + m.b679 + m.b716 + m.b754 + m.b793 + m.b833 == 2)
m.c14 = Constraint(expr= m.b68 + m.b69 + m.b70 + m.b71 + m.b72 + m.b73 + m.b74 + m.b75 + m.b76 + m.b77 + m.b78 + m.b79
+ m.b92 + m.b105 + m.b119 + m.b134 + m.b150 + m.b167 + m.b185 + m.b204 + m.b224 + m.b245
+ m.b267 + m.b290 + m.b314 + m.b339 + m.b365 + m.b392 + m.b420 + m.b449 + m.b479 + m.b510
+ m.b542 + m.b575 + m.b609 + m.b644 + m.b680 + m.b717 + m.b755 + m.b794 + m.b834 == 2)
m.c15 = Constraint(expr= m.b80 + m.b81 + m.b82 + m.b83 + m.b84 + m.b85 + m.b86 + m.b87 + m.b88 + m.b89 + m.b90 + m.b91
+ m.b92 + m.b106 + m.b120 + m.b135 + m.b151 + m.b168 + m.b186 + m.b205 + m.b225 + m.b246
+ m.b268 + m.b291 + m.b315 + m.b340 + m.b366 + m.b393 + m.b421 + m.b450 + m.b480 + m.b511
+ m.b543 + m.b576 + m.b610 + m.b645 + m.b681 + m.b718 + m.b756 + m.b795 + m.b835 == 2)
m.c16 = Constraint(expr= m.b93 + m.b94 + m.b95 + m.b96 + m.b97 + m.b98 + m.b99 + m.b100 + m.b101 + m.b102 + m.b103
+ m.b104 + m.b105 + m.b106 + m.b121 + m.b136 + m.b152 + m.b169 + m.b187 + m.b206 + m.b226
+ m.b247 + m.b269 + m.b292 + m.b316 + m.b341 + m.b367 + m.b394 + m.b422 + m.b451 + m.b481
+ m.b512 + m.b544 + m.b577 + m.b611 + m.b646 + m.b682 + m.b719 + m.b757 + m.b796 + m.b836 == 2)
m.c17 = Constraint(expr= m.b107 + m.b108 + m.b109 + m.b110 + m.b111 + m.b112 + m.b113 + m.b114 + m.b115 + m.b116
+ m.b117 + m.b118 + m.b119 + m.b120 + m.b121 + m.b137 + m.b153 + m.b170 + m.b188 + m.b207
+ m.b227 + m.b248 + m.b270 + m.b293 + m.b317 + m.b342 + m.b368 + m.b395 + m.b423 + m.b452
+ m.b482 + m.b513 + m.b545 + m.b578 + m.b612 + m.b647 + m.b683 + m.b720 + m.b758 + m.b797
+ m.b837 == 2)
m.c18 = Constraint(expr= m.b122 + m.b123 + m.b124 + m.b125 + m.b126 + m.b127 + m.b128 + m.b129 + m.b130 + m.b131
+ m.b132 + m.b133 + m.b134 + m.b135 + m.b136 + m.b137 + m.b154 + m.b171 + m.b189 + m.b208
+ m.b228 + m.b249 + m.b271 + m.b294 + m.b318 + m.b343 + m.b369 + m.b396 + m.b424 + m.b453
+ m.b483 + m.b514 + m.b546 + m.b579 + m.b613 + m.b648 + m.b684 + m.b721 + m.b759 + m.b798
+ m.b838 == 2)
m.c19 = Constraint(expr= m.b138 + m.b139 + m.b140 + m.b141 + m.b142 + m.b143 + m.b144 + m.b145 + m.b146 + m.b147
+ m.b148 + m.b149 + m.b150 + m.b151 + m.b152 + m.b153 + m.b154 + m.b172 + m.b190 + m.b209
+ m.b229 + m.b250 + m.b272 + m.b295 + m.b319 + m.b344 + m.b370 + m.b397 + m.b425 + m.b454
+ m.b484 + m.b515 + m.b547 + m.b580 + m.b614 + m.b649 + m.b685 + m.b722 + m.b760 + m.b799
+ m.b839 == 2)
m.c20 = Constraint(expr= m.b155 + m.b156 + m.b157 + m.b158 + m.b159 + m.b160 + m.b161 + m.b162 + m.b163 + m.b164
+ m.b165 + m.b166 + m.b167 + m.b168 + m.b169 + m.b170 + m.b171 + m.b172 + m.b191 + m.b210
+ m.b230 + m.b251 + m.b273 + m.b296 + m.b320 + m.b345 + m.b371 + m.b398 + m.b426 + m.b455
+ m.b485 + m.b516 + m.b548 + m.b581 + m.b615 + m.b650 + m.b686 + m.b723 + m.b761 + m.b800
+ m.b840 == 2)
m.c21 = Constraint(expr= m.b173 + m.b174 + m.b175 + m.b176 + m.b177 + m.b178 + m.b179 + m.b180 + m.b181 + m.b182
+ m.b183 + m.b184 + m.b185 + m.b186 + m.b187 + m.b188 + m.b189 + m.b190 + m.b191 + m.b211
+ m.b231 + m.b252 + m.b274 + m.b297 + m.b321 + m.b346 + m.b372 + m.b399 + m.b427 + m.b456
+ m.b486 + m.b517 + m.b549 + m.b582 + m.b616 + m.b651 + m.b687 + m.b724 + m.b762 + m.b801
+ m.b841 == 2)
m.c22 = Constraint(expr= m.b192 + m.b193 + m.b194 + m.b195 + m.b196 + m.b197 + m.b198 + m.b199 + m.b200 + m.b201
+ m.b202 + m.b203 + m.b204 + m.b205 + m.b206 + m.b207 + m.b208 + m.b209 + m.b210 + m.b211
+ m.b232 + m.b253 + m.b275 + m.b298 + m.b322 + m.b347 + m.b373 + m.b400 + m.b428 + m.b457
+ m.b487 + m.b518 + m.b550 + m.b583 + m.b617 + m.b652 + m.b688 + m.b725 + m.b763 + m.b802
+ m.b842 == 2)
m.c23 = Constraint(expr= m.b212 + m.b213 + m.b214 + m.b215 + m.b216 + m.b217 + m.b218 + m.b219 + m.b220 + m.b221
+ m.b222 + m.b223 + m.b224 + m.b225 + m.b226 + m.b227 + m.b228 + m.b229 + m.b230 + m.b231
+ m.b232 + m.b254 + m.b276 + m.b299 + m.b323 + m.b348 + m.b374 + m.b401 + m.b429 + m.b458
+ m.b488 + m.b519 + m.b551 + m.b584 + m.b618 + m.b653 + m.b689 + m.b726 + m.b764 + m.b803
+ m.b843 == 2)
m.c24 = Constraint(expr= m.b233 + m.b234 + m.b235 + m.b236 + m.b237 + m.b238 + m.b239 + m.b240 + m.b241 + m.b242
+ m.b243 + m.b244 + m.b245 + m.b246 + m.b247 + m.b248 + m.b249 + m.b250 + m.b251 + m.b252
+ m.b253 + m.b254 + m.b277 + m.b300 + m.b324 + m.b349 + m.b375 + m.b402 + m.b430 + m.b459
+ m.b489 + m.b520 + m.b552 + m.b585 + m.b619 + m.b654 + m.b690 + m.b727 + m.b765 + m.b804
+ m.b844 == 2)
m.c25 = Constraint(expr= m.b255 + m.b256 + m.b257 + m.b258 + m.b259 + m.b260 + m.b261 + m.b262 + m.b263 + m.b264
+ m.b265 + m.b266 + m.b267 + m.b268 + m.b269 + m.b270 + m.b271 + m.b272 + m.b273 + m.b274
+ m.b275 + m.b276 + m.b277 + m.b301 + m.b325 + m.b350 + m.b376 + m.b403 + m.b431 + m.b460
+ m.b490 + m.b521 + m.b553 + m.b586 + m.b620 + m.b655 + m.b691 + m.b728 + m.b766 + m.b805
+ m.b845 == 2)
m.c26 = Constraint(expr= m.b278 + m.b279 + m.b280 + m.b281 + m.b282 + m.b283 + m.b284 + m.b285 + m.b286 + m.b287
+ m.b288 + m.b289 + m.b290 + m.b291 + m.b292 + m.b293 + m.b294 + m.b295 + m.b296 + m.b297
+ m.b298 + m.b299 + m.b300 + m.b301 + m.b326 + m.b351 + m.b377 + m.b404 + m.b432 + m.b461
+ m.b491 + m.b522 + m.b554 + m.b587 + m.b621 + m.b656 + m.b692 + m.b729 + m.b767 + m.b806
+ m.b846 == 2)
m.c27 = Constraint(expr= m.b302 + m.b303 + m.b304 + m.b305 + m.b306 + m.b307 + m.b308 + m.b309 + m.b310 + m.b311
+ m.b312 + m.b313 + m.b314 + m.b315 + m.b316 + m.b317 + m.b318 + m.b319 + m.b320 + m.b321
+ m.b322 + m.b323 + m.b324 + m.b325 + m.b326 + m.b352 + m.b378 + m.b405 + m.b433 + m.b462
+ m.b492 + m.b523 + m.b555 + m.b588 + m.b622 + m.b657 + m.b693 + m.b730 + m.b768 + m.b807
+ m.b847 == 2)
m.c28 = Constraint(expr= m.b327 + m.b328 + m.b329 + m.b330 + m.b331 + m.b332 + m.b333 + m.b334 + m.b335 + m.b336
+ m.b337 + m.b338 + m.b339 + m.b340 + m.b341 + m.b342 + m.b343 + m.b344 + m.b345 + m.b346
+ m.b347 + m.b348 + m.b349 + m.b350 + m.b351 + m.b352 + m.b379 + m.b406 + m.b434 + m.b463
+ m.b493 + m.b524 + m.b556 + m.b589 + m.b623 + m.b658 + m.b694 + m.b731 + m.b769 + m.b808
+ m.b848 == 2)
m.c29 = Constraint(expr= m.b353 + m.b354 + m.b355 + m.b356 + m.b357 + m.b358 + m.b359 + m.b360 + m.b361 + m.b362
+ m.b363 + m.b364 + m.b365 + m.b366 + m.b367 + m.b368 + m.b369 + m.b370 + m.b371 + m.b372
+ m.b373 + m.b374 + m.b375 + m.b376 + m.b377 + m.b378 + m.b379 + m.b407 + m.b435 + m.b464
+ m.b494 + m.b525 + m.b557 + m.b590 + m.b624 + m.b659 + m.b695 + m.b732 + m.b770 + m.b809
+ m.b849 == 2)
m.c30 = Constraint(expr= m.b380 + m.b381 + m.b382 + m.b383 + m.b384 + m.b385 + m.b386 + m.b387 + m.b388 + m.b389
+ m.b390 + m.b391 + m.b392 + m.b393 + m.b394 + m.b395 + m.b396 + m.b397 + m.b398 + m.b399
+ m.b400 + m.b401 + m.b402 + m.b403 + m.b404 + m.b405 + m.b406 + m.b407 + m.b436 + m.b465
+ m.b495 + m.b526 + m.b558 + m.b591 + m.b625 + m.b660 + m.b696 + m.b733 + m.b771 + m.b810
+ m.b850 == 2)
m.c31 = Constraint(expr= m.b408 + m.b409 + m.b410 + m.b411 + m.b412 + m.b413 + m.b414 + m.b415 + m.b416 + m.b417
+ m.b418 + m.b419 + m.b420 + m.b421 + m.b422 + m.b423 + m.b424 + m.b425 + m.b426 + m.b427
+ m.b428 + m.b429 + m.b430 + m.b431 + m.b432 + m.b433 + m.b434 + m.b435 + m.b436 + m.b466
+ m.b496 + m.b527 + m.b559 + m.b592 + m.b626 + m.b661 + m.b697 + m.b734 + m.b772 + m.b811
+ m.b851 == 2)
m.c32 = Constraint(expr= m.b437 + m.b438 + m.b439 + m.b440 + m.b441 + m.b442 + m.b443 + m.b444 + m.b445 + m.b446
+ m.b447 + m.b448 + m.b449 + m.b450 + m.b451 + m.b452 + m.b453 + m.b454 + m.b455 + m.b456
+ m.b457 + m.b458 + m.b459 + m.b460 + m.b461 + m.b462 + m.b463 + m.b464 + m.b465 + m.b466
+ m.b497 + m.b528 + m.b560 + m.b593 + m.b627 + m.b662 + m.b698 + m.b735 + m.b773 + m.b812
+ m.b852 == 2)
m.c33 = Constraint(expr= m.b467 + m.b468 + m.b469 + m.b470 + m.b471 + m.b472 + m.b473 + m.b474 + m.b475 + m.b476
+ m.b477 + m.b478 + m.b479 + m.b480 + m.b481 + m.b482 + m.b483 + m.b484 + m.b485 + m.b486
+ m.b487 + m.b488 + m.b489 + m.b490 + m.b491 + m.b492 + m.b493 + m.b494 + m.b495 + m.b496
+ m.b497 + m.b529 + m.b561 + m.b594 + m.b628 + m.b663 + m.b699 + m.b736 + m.b774 + m.b813
+ m.b853 == 2)
m.c34 = Constraint(expr= m.b498 + m.b499 + m.b500 + m.b501 + m.b502 + m.b503 + m.b504 + m.b505 + m.b506 + m.b507
+ m.b508 + m.b509 + m.b510 + m.b511 + m.b512 + m.b513 + m.b514 + m.b515 + m.b516 + m.b517
+ m.b518 + m.b519 + m.b520 + m.b521 + m.b522 + m.b523 + m.b524 + m.b525 + m.b526 + m.b527
+ m.b528 + m.b529 + m.b562 + m.b595 + m.b629 + m.b664 + m.b700 + m.b737 + m.b775 + m.b814
+ m.b854 == 2)
m.c35 = Constraint(expr= m.b530 + m.b531 + m.b532 + m.b533 + m.b534 + m.b535 + m.b536 + m.b537 + m.b538 + m.b539
+ m.b540 + m.b541 + m.b542 + m.b543 + m.b544 + m.b545 + m.b546 + m.b547 + m.b548 + m.b549
+ m.b550 + m.b551 + m.b552 + m.b553 + m.b554 + m.b555 + m.b556 + m.b557 + m.b558 + m.b559
+ m.b560 + m.b561 + m.b562 + m.b596 + m.b630 + m.b665 + m.b701 + m.b738 + m.b776 + m.b815
+ m.b855 == 2)
m.c36 = Constraint(expr= m.b563 + m.b564 + m.b565 + m.b566 + m.b567 + m.b568 + m.b569 + m.b570 + m.b571 + m.b572
+ m.b573 + m.b574 + m.b575 + m.b576 + m.b577 + m.b578 + m.b579 + m.b580 + m.b581 + m.b582
+ m.b583 + m.b584 + m.b585 + m.b586 + m.b587 + m.b588 + m.b589 + m.b590 + m.b591 + m.b592
+ m.b593 + m.b594 + m.b595 + m.b596 + m.b631 + m.b666 + m.b702 + m.b739 + m.b777 + m.b816
+ m.b856 == 2)
m.c37 = Constraint(expr= m.b597 + m.b598 + m.b599 + m.b600 + m.b601 + m.b602 + m.b603 + m.b604 + m.b605 + m.b606
+ m.b607 + m.b608 + m.b609 + m.b610 + m.b611 + m.b612 + m.b613 + m.b614 + m.b615 + m.b616
+ m.b617 + m.b618 + m.b619 + m.b620 + m.b621 + m.b622 + m.b623 + m.b624 + m.b625 + m.b626
+ m.b627 + m.b628 + m.b629 + m.b630 + m.b631 + m.b667 + m.b703 + m.b740 + m.b778 + m.b817
+ m.b857 == 2)
m.c38 = Constraint(expr= m.b632 + m.b633 + m.b634 + m.b635 + m.b636 + m.b637 + m.b638 + m.b639 + m.b640 + m.b641
+ m.b642 + m.b643 + m.b644 + m.b645 + m.b646 + m.b647 + m.b648 + m.b649 + m.b650 + m.b651
+ m.b652 + m.b653 + m.b654 + m.b655 + m.b656 + m.b657 + m.b658 + m.b659 + m.b660 + m.b661
+ m.b662 + m.b663 + m.b664 + m.b665 + m.b666 + m.b667 + m.b704 + m.b741 + m.b779 + m.b818
+ m.b858 == 2)
m.c39 = Constraint(expr= m.b668 + m.b669 + m.b670 + m.b671 + m.b672 + m.b673 + m.b674 + m.b675 + m.b676 + m.b677
+ m.b678 + m.b679 + m.b680 + m.b681 + m.b682 + m.b683 + m.b684 + m.b685 + m.b686 + m.b687
+ m.b688 + m.b689 + m.b690 + m.b691 + m.b692 + m.b693 + m.b694 + m.b695 + m.b696 + m.b697
+ m.b698 + m.b699 + m.b700 + m.b701 + m.b702 + m.b703 + m.b704 + m.b742 + m.b780 + m.b819
+ m.b859 == 2)
m.c40 = Constraint(expr= m.b705 + m.b706 + m.b707 + m.b708 + m.b709 + m.b710 + m.b711 + m.b712 + m.b713 + m.b714
+ m.b715 + m.b716 + m.b717 + m.b718 + m.b719 + m.b720 + m.b721 + m.b722 + m.b723 + m.b724
+ m.b725 + m.b726 + m.b727 + m.b728 + m.b729 + m.b730 + m.b731 + m.b732 + m.b733 + m.b734
+ m.b735 + m.b736 + m.b737 + m.b738 + m.b739 + m.b740 + m.b741 + m.b742 + m.b781 + m.b820
+ m.b860 == 2)
m.c41 = Constraint(expr= m.b743 + m.b744 + m.b745 + m.b746 + m.b747 + m.b748 + m.b749 + m.b750 + m.b751 + m.b752
+ m.b753 + m.b754 + m.b755 + m.b756 + m.b757 + m.b758 + m.b759 + m.b760 + m.b761 + m.b762
+ m.b763 + m.b764 + m.b765 + m.b766 + m.b767 + m.b768 + m.b769 + m.b770 + m.b771 + m.b772
+ m.b773 + m.b774 + m.b775 + m.b776 + m.b777 + m.b778 + m.b779 + m.b780 + m.b781 + m.b821
+ m.b861 == 2)
m.c42 = Constraint(expr= m.b782 + m.b783 + m.b784 + m.b785 + m.b786 + m.b787 + m.b788 + m.b789 + m.b790 + m.b791
+ m.b792 + m.b793 + m.b794 + m.b795 + m.b796 + m.b797 + m.b798 + m.b799 + m.b800 + m.b801
+ m.b802 + m.b803 + m.b804 + m.b805 + m.b806 + m.b807 + m.b808 + m.b809 + m.b810 + m.b811
+ m.b812 + m.b813 + m.b814 + m.b815 + m.b816 + m.b817 + m.b818 + m.b819 + m.b820 + m.b821
+ m.b862 == 2)
m.c43 = Constraint(expr= m.b822 + m.b823 + m.b824 + m.b825 + m.b826 + m.b827 + m.b828 + m.b829 + m.b830 + m.b831
+ m.b832 + m.b833 + m.b834 + m.b835 + m.b836 + m.b837 + m.b838 + m.b839 + m.b840 + m.b841
+ m.b842 + m.b843 + m.b844 + m.b845 + m.b846 + m.b847 + m.b848 + m.b849 + m.b850 + m.b851
+ m.b852 + m.b853 + m.b854 + m.b855 + m.b856 + m.b857 + m.b858 + m.b859 + m.b860 + m.b861
+ m.b862 == 2)
| [
"[email protected]"
]
| |
f8880d12b3954bf1f29a84e2fa0adf8ba9e779d6 | 52a4d869976a97498bdf56a8d0ff92cac138a136 | /Bioinformatics Textbook Track/Chapter 2/rosalind_ba2d.py | 238db719734990db59ec3be92ca629ff672af9ea | []
| no_license | aakibinesar/Rosalind | d726369a787d848cc378976b886189978a60a3a5 | 375bbdbfb16bf11b2f980701bbd0ba74a1605cdb | refs/heads/master | 2022-08-18T09:36:00.941080 | 2020-05-24T18:49:38 | 2020-05-24T18:49:38 | 264,722,651 | 0 | 0 | null | 2020-05-17T17:51:03 | 2020-05-17T17:40:59 | null | UTF-8 | Python | false | false | 1,561 | py | def greedymotifsearch(dna,k,t):
best = [s[:k] for s in dna]
for i in range(len(dna[0])-k+1):
tempbest = [dna[0][i:i+k]]
for m in range(1,t):
matrix = motifsToProfile(tempbest)
tempbest.append(profileMostProbablekmer(dna[m],k,matrix))
if score(tempbest) < score(best):
best = tempbest
return best
def score(motifs):
z = zip(*motifs)
thescore = 0
for string in z:
score = len(string) - max([string.count('A'), string.count('C'), string.count('G'), string.count('T')])
thescore += score
return thescore
def motifsToProfile(motifs):
d = {}
n = float(len(motifs))
z = list(zip(*motifs))
for i in range(len(z)):
d.setdefault('A', []).append(z[i].count('A')/n)
d.setdefault('C', []).append(z[i].count('C')/n)
d.setdefault('G', []).append(z[i].count('G')/n)
d.setdefault('T', []).append(z[i].count('T')/n)
return d
def profileMostProbablekmer(text, k , matrix):
maxp = None
probablekmer = None
for i in range(len(text)-k+1):
kmer = text[i:i+k]
pt = 1
for j in range(k):
p = matrix[kmer[j]][j]
pt *=p
if maxp == None or pt > maxp:
maxp = pt
probablekmer = kmer
return probablekmer
with open('rosalind_ba2d.txt') as f:
k,t = map(int,f.readline().rstrip().split(' '))
strings = [st.rstrip() for st in f.readlines()]
print('\n'.join(greedymotifsearch(strings,k,t))) # bug: may be wrong , try several times | [
"[email protected]"
]
| |
8d1dcda3139a9d6e5d1dcd75a2e85017e18a0a4a | 78c3082e9082b5b50435805723ae00a58ca88e30 | /03.AI알고리즘 소스코드/venv/Lib/site-packages/caffe2/python/operator_test/flatten_op_test.py | ba5fce81296a516900f9cabf049c0c697338ce54 | []
| no_license | jinStar-kimmy/algorithm | 26c1bc456d5319578110f3d56f8bd19122356603 | 59ae8afd8d133f59a6b8d8cee76790fd9dfe1ff7 | refs/heads/master | 2023-08-28T13:16:45.690232 | 2021-10-20T08:23:46 | 2021-10-20T08:23:46 | 419,217,105 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 960 | py |
from hypothesis import given
import numpy as np
from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
class TestFlatten(hu.HypothesisTestCase):
@given(X=hu.tensor(min_dim=2, max_dim=4),
**hu.gcs)
def test_flatten(self, X, gc, dc):
for axis in range(X.ndim + 1):
op = core.CreateOperator(
"Flatten",
["X"],
["Y"],
axis=axis)
def flatten_ref(X):
shape = X.shape
outer = np.prod(shape[:axis]).astype(int)
inner = np.prod(shape[axis:]).astype(int)
return np.copy(X).reshape(outer, inner),
self.assertReferenceChecks(gc, op, [X], flatten_ref)
# Check over multiple devices
self.assertDeviceChecks(dc, op, [X], [0])
if __name__ == "__main__":
import unittest
unittest.main()
| [
"[email protected]"
]
| |
7942307b39359f8d6f113c7197dbd8984a6e6eab | a4ea525e226d6c401fdb87a6e9adfdc5d07e6020 | /src/azure-cli/azure/cli/command_modules/network/aaz/latest/network/service_endpoint/policy/_list.py | 89e3a49e6bdaa393add44d973ce1ee228c59f2ef | [
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
]
| permissive | Azure/azure-cli | 13340eeca2e288e66e84d393fa1c8a93d46c8686 | a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca | refs/heads/dev | 2023-08-17T06:25:37.431463 | 2023-08-17T06:00:10 | 2023-08-17T06:00:10 | 51,040,886 | 4,018 | 3,310 | MIT | 2023-09-14T11:11:05 | 2016-02-04T00:21:51 | Python | UTF-8 | Python | false | false | 102,260 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"network service-endpoint policy list",
)
class List(AAZCommand):
"""List service endpoint policies.
:example: List service endpoint policies.
az network service-endpoint policy list --resource-group MyResourceGroup
"""
_aaz_info = {
"version": "2021-08-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/providers/microsoft.network/serviceendpointpolicies", "2021-08-01"],
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/serviceendpointpolicies", "2021-08-01"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
return self.build_paging(self._execute_operations, self._output)
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.resource_group = AAZResourceGroupNameArg()
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
condition_0 = has_value(self.ctx.args.resource_group) and has_value(self.ctx.subscription_id)
condition_1 = has_value(self.ctx.subscription_id) and has_value(self.ctx.args.resource_group) is not True
if condition_0:
self.ServiceEndpointPoliciesListByResourceGroup(ctx=self.ctx)()
if condition_1:
self.ServiceEndpointPoliciesList(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True)
next_link = self.deserialize_output(self.ctx.vars.instance.next_link)
return result, next_link
class ServiceEndpointPoliciesListByResourceGroup(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2021-08-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.next_link = AAZStrType(
serialized_name="nextLink",
flags={"read_only": True},
)
_schema_on_200.value = AAZListType()
value = cls._schema_on_200.value
value.Element = AAZObjectType()
_ListHelper._build_schema_service_endpoint_policy_read(value.Element)
return cls._schema_on_200
class ServiceEndpointPoliciesList(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/providers/Microsoft.Network/ServiceEndpointPolicies",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2021-08-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.next_link = AAZStrType(
serialized_name="nextLink",
flags={"read_only": True},
)
_schema_on_200.value = AAZListType()
value = cls._schema_on_200.value
value.Element = AAZObjectType()
_ListHelper._build_schema_service_endpoint_policy_read(value.Element)
return cls._schema_on_200
class _ListHelper:
"""Helper class for List"""
_schema_application_security_group_read = None
@classmethod
def _build_schema_application_security_group_read(cls, _schema):
if cls._schema_application_security_group_read is not None:
_schema.etag = cls._schema_application_security_group_read.etag
_schema.id = cls._schema_application_security_group_read.id
_schema.location = cls._schema_application_security_group_read.location
_schema.name = cls._schema_application_security_group_read.name
_schema.properties = cls._schema_application_security_group_read.properties
_schema.tags = cls._schema_application_security_group_read.tags
_schema.type = cls._schema_application_security_group_read.type
return
cls._schema_application_security_group_read = _schema_application_security_group_read = AAZObjectType()
application_security_group_read = _schema_application_security_group_read
application_security_group_read.etag = AAZStrType(
flags={"read_only": True},
)
application_security_group_read.id = AAZStrType()
application_security_group_read.location = AAZStrType()
application_security_group_read.name = AAZStrType(
flags={"read_only": True},
)
application_security_group_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
application_security_group_read.tags = AAZDictType()
application_security_group_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_application_security_group_read.properties
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
tags = _schema_application_security_group_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_application_security_group_read.etag
_schema.id = cls._schema_application_security_group_read.id
_schema.location = cls._schema_application_security_group_read.location
_schema.name = cls._schema_application_security_group_read.name
_schema.properties = cls._schema_application_security_group_read.properties
_schema.tags = cls._schema_application_security_group_read.tags
_schema.type = cls._schema_application_security_group_read.type
_schema_extended_location_read = None
@classmethod
def _build_schema_extended_location_read(cls, _schema):
if cls._schema_extended_location_read is not None:
_schema.name = cls._schema_extended_location_read.name
_schema.type = cls._schema_extended_location_read.type
return
cls._schema_extended_location_read = _schema_extended_location_read = AAZObjectType()
extended_location_read = _schema_extended_location_read
extended_location_read.name = AAZStrType()
extended_location_read.type = AAZStrType()
_schema.name = cls._schema_extended_location_read.name
_schema.type = cls._schema_extended_location_read.type
_schema_frontend_ip_configuration_read = None
@classmethod
def _build_schema_frontend_ip_configuration_read(cls, _schema):
if cls._schema_frontend_ip_configuration_read is not None:
_schema.etag = cls._schema_frontend_ip_configuration_read.etag
_schema.id = cls._schema_frontend_ip_configuration_read.id
_schema.name = cls._schema_frontend_ip_configuration_read.name
_schema.properties = cls._schema_frontend_ip_configuration_read.properties
_schema.type = cls._schema_frontend_ip_configuration_read.type
_schema.zones = cls._schema_frontend_ip_configuration_read.zones
return
cls._schema_frontend_ip_configuration_read = _schema_frontend_ip_configuration_read = AAZObjectType()
frontend_ip_configuration_read = _schema_frontend_ip_configuration_read
frontend_ip_configuration_read.etag = AAZStrType(
flags={"read_only": True},
)
frontend_ip_configuration_read.id = AAZStrType()
frontend_ip_configuration_read.name = AAZStrType()
frontend_ip_configuration_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
frontend_ip_configuration_read.type = AAZStrType(
flags={"read_only": True},
)
frontend_ip_configuration_read.zones = AAZListType()
properties = _schema_frontend_ip_configuration_read.properties
properties.gateway_load_balancer = AAZObjectType(
serialized_name="gatewayLoadBalancer",
)
cls._build_schema_sub_resource_read(properties.gateway_load_balancer)
properties.inbound_nat_pools = AAZListType(
serialized_name="inboundNatPools",
flags={"read_only": True},
)
properties.inbound_nat_rules = AAZListType(
serialized_name="inboundNatRules",
flags={"read_only": True},
)
properties.load_balancing_rules = AAZListType(
serialized_name="loadBalancingRules",
flags={"read_only": True},
)
properties.outbound_rules = AAZListType(
serialized_name="outboundRules",
flags={"read_only": True},
)
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
properties.private_ip_address_version = AAZStrType(
serialized_name="privateIPAddressVersion",
)
properties.private_ip_allocation_method = AAZStrType(
serialized_name="privateIPAllocationMethod",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.public_ip_address = AAZObjectType(
serialized_name="publicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.public_ip_address)
properties.public_ip_prefix = AAZObjectType(
serialized_name="publicIPPrefix",
)
cls._build_schema_sub_resource_read(properties.public_ip_prefix)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
inbound_nat_pools = _schema_frontend_ip_configuration_read.properties.inbound_nat_pools
inbound_nat_pools.Element = AAZObjectType()
cls._build_schema_sub_resource_read(inbound_nat_pools.Element)
inbound_nat_rules = _schema_frontend_ip_configuration_read.properties.inbound_nat_rules
inbound_nat_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(inbound_nat_rules.Element)
load_balancing_rules = _schema_frontend_ip_configuration_read.properties.load_balancing_rules
load_balancing_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(load_balancing_rules.Element)
outbound_rules = _schema_frontend_ip_configuration_read.properties.outbound_rules
outbound_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(outbound_rules.Element)
zones = _schema_frontend_ip_configuration_read.zones
zones.Element = AAZStrType()
_schema.etag = cls._schema_frontend_ip_configuration_read.etag
_schema.id = cls._schema_frontend_ip_configuration_read.id
_schema.name = cls._schema_frontend_ip_configuration_read.name
_schema.properties = cls._schema_frontend_ip_configuration_read.properties
_schema.type = cls._schema_frontend_ip_configuration_read.type
_schema.zones = cls._schema_frontend_ip_configuration_read.zones
_schema_ip_configuration_read = None
@classmethod
def _build_schema_ip_configuration_read(cls, _schema):
if cls._schema_ip_configuration_read is not None:
_schema.etag = cls._schema_ip_configuration_read.etag
_schema.id = cls._schema_ip_configuration_read.id
_schema.name = cls._schema_ip_configuration_read.name
_schema.properties = cls._schema_ip_configuration_read.properties
return
cls._schema_ip_configuration_read = _schema_ip_configuration_read = AAZObjectType()
ip_configuration_read = _schema_ip_configuration_read
ip_configuration_read.etag = AAZStrType(
flags={"read_only": True},
)
ip_configuration_read.id = AAZStrType()
ip_configuration_read.name = AAZStrType()
ip_configuration_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_ip_configuration_read.properties
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
properties.private_ip_allocation_method = AAZStrType(
serialized_name="privateIPAllocationMethod",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.public_ip_address = AAZObjectType(
serialized_name="publicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.public_ip_address)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
_schema.etag = cls._schema_ip_configuration_read.etag
_schema.id = cls._schema_ip_configuration_read.id
_schema.name = cls._schema_ip_configuration_read.name
_schema.properties = cls._schema_ip_configuration_read.properties
_schema_network_interface_ip_configuration_read = None
@classmethod
def _build_schema_network_interface_ip_configuration_read(cls, _schema):
if cls._schema_network_interface_ip_configuration_read is not None:
_schema.etag = cls._schema_network_interface_ip_configuration_read.etag
_schema.id = cls._schema_network_interface_ip_configuration_read.id
_schema.name = cls._schema_network_interface_ip_configuration_read.name
_schema.properties = cls._schema_network_interface_ip_configuration_read.properties
_schema.type = cls._schema_network_interface_ip_configuration_read.type
return
cls._schema_network_interface_ip_configuration_read = _schema_network_interface_ip_configuration_read = AAZObjectType()
network_interface_ip_configuration_read = _schema_network_interface_ip_configuration_read
network_interface_ip_configuration_read.etag = AAZStrType(
flags={"read_only": True},
)
network_interface_ip_configuration_read.id = AAZStrType()
network_interface_ip_configuration_read.name = AAZStrType()
network_interface_ip_configuration_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
network_interface_ip_configuration_read.type = AAZStrType()
properties = _schema_network_interface_ip_configuration_read.properties
properties.application_gateway_backend_address_pools = AAZListType(
serialized_name="applicationGatewayBackendAddressPools",
)
properties.application_security_groups = AAZListType(
serialized_name="applicationSecurityGroups",
)
properties.gateway_load_balancer = AAZObjectType(
serialized_name="gatewayLoadBalancer",
)
cls._build_schema_sub_resource_read(properties.gateway_load_balancer)
properties.load_balancer_backend_address_pools = AAZListType(
serialized_name="loadBalancerBackendAddressPools",
)
properties.load_balancer_inbound_nat_rules = AAZListType(
serialized_name="loadBalancerInboundNatRules",
)
properties.primary = AAZBoolType()
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
properties.private_ip_address_version = AAZStrType(
serialized_name="privateIPAddressVersion",
)
properties.private_ip_allocation_method = AAZStrType(
serialized_name="privateIPAllocationMethod",
)
properties.private_link_connection_properties = AAZObjectType(
serialized_name="privateLinkConnectionProperties",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.public_ip_address = AAZObjectType(
serialized_name="publicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.public_ip_address)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
properties.virtual_network_taps = AAZListType(
serialized_name="virtualNetworkTaps",
)
application_gateway_backend_address_pools = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools
application_gateway_backend_address_pools.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools.Element.properties
properties.backend_addresses = AAZListType(
serialized_name="backendAddresses",
)
properties.backend_ip_configurations = AAZListType(
serialized_name="backendIPConfigurations",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
backend_addresses = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools.Element.properties.backend_addresses
backend_addresses.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools.Element.properties.backend_addresses.Element
_element.fqdn = AAZStrType()
_element.ip_address = AAZStrType(
serialized_name="ipAddress",
)
backend_ip_configurations = _schema_network_interface_ip_configuration_read.properties.application_gateway_backend_address_pools.Element.properties.backend_ip_configurations
backend_ip_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_ip_configuration_read(backend_ip_configurations.Element)
application_security_groups = _schema_network_interface_ip_configuration_read.properties.application_security_groups
application_security_groups.Element = AAZObjectType()
cls._build_schema_application_security_group_read(application_security_groups.Element)
load_balancer_backend_address_pools = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools
load_balancer_backend_address_pools.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties
properties.backend_ip_configurations = AAZListType(
serialized_name="backendIPConfigurations",
flags={"read_only": True},
)
properties.drain_period_in_seconds = AAZIntType(
serialized_name="drainPeriodInSeconds",
)
properties.inbound_nat_rules = AAZListType(
serialized_name="inboundNatRules",
flags={"read_only": True},
)
properties.load_balancer_backend_addresses = AAZListType(
serialized_name="loadBalancerBackendAddresses",
)
properties.load_balancing_rules = AAZListType(
serialized_name="loadBalancingRules",
flags={"read_only": True},
)
properties.location = AAZStrType()
properties.outbound_rule = AAZObjectType(
serialized_name="outboundRule",
)
cls._build_schema_sub_resource_read(properties.outbound_rule)
properties.outbound_rules = AAZListType(
serialized_name="outboundRules",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.tunnel_interfaces = AAZListType(
serialized_name="tunnelInterfaces",
)
backend_ip_configurations = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.backend_ip_configurations
backend_ip_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_ip_configuration_read(backend_ip_configurations.Element)
inbound_nat_rules = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.inbound_nat_rules
inbound_nat_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(inbound_nat_rules.Element)
load_balancer_backend_addresses = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.load_balancer_backend_addresses
load_balancer_backend_addresses.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.load_balancer_backend_addresses.Element
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.load_balancer_backend_addresses.Element.properties
properties.admin_state = AAZStrType(
serialized_name="adminState",
)
properties.inbound_nat_rules_port_mapping = AAZListType(
serialized_name="inboundNatRulesPortMapping",
flags={"read_only": True},
)
properties.ip_address = AAZStrType(
serialized_name="ipAddress",
)
properties.load_balancer_frontend_ip_configuration = AAZObjectType(
serialized_name="loadBalancerFrontendIPConfiguration",
)
cls._build_schema_sub_resource_read(properties.load_balancer_frontend_ip_configuration)
properties.network_interface_ip_configuration = AAZObjectType(
serialized_name="networkInterfaceIPConfiguration",
)
cls._build_schema_sub_resource_read(properties.network_interface_ip_configuration)
properties.subnet = AAZObjectType()
cls._build_schema_sub_resource_read(properties.subnet)
properties.virtual_network = AAZObjectType(
serialized_name="virtualNetwork",
)
cls._build_schema_sub_resource_read(properties.virtual_network)
inbound_nat_rules_port_mapping = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.load_balancer_backend_addresses.Element.properties.inbound_nat_rules_port_mapping
inbound_nat_rules_port_mapping.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.load_balancer_backend_addresses.Element.properties.inbound_nat_rules_port_mapping.Element
_element.backend_port = AAZIntType(
serialized_name="backendPort",
)
_element.frontend_port = AAZIntType(
serialized_name="frontendPort",
)
_element.inbound_nat_rule_name = AAZStrType(
serialized_name="inboundNatRuleName",
)
load_balancing_rules = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.load_balancing_rules
load_balancing_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(load_balancing_rules.Element)
outbound_rules = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.outbound_rules
outbound_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(outbound_rules.Element)
tunnel_interfaces = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.tunnel_interfaces
tunnel_interfaces.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.tunnel_interfaces.Element
_element.identifier = AAZIntType()
_element.port = AAZIntType()
_element.protocol = AAZStrType()
_element.type = AAZStrType()
load_balancer_inbound_nat_rules = _schema_network_interface_ip_configuration_read.properties.load_balancer_inbound_nat_rules
load_balancer_inbound_nat_rules.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.load_balancer_inbound_nat_rules.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_ip_configuration_read.properties.load_balancer_inbound_nat_rules.Element.properties
properties.backend_address_pool = AAZObjectType(
serialized_name="backendAddressPool",
)
cls._build_schema_sub_resource_read(properties.backend_address_pool)
properties.backend_ip_configuration = AAZObjectType(
serialized_name="backendIPConfiguration",
)
cls._build_schema_network_interface_ip_configuration_read(properties.backend_ip_configuration)
properties.backend_port = AAZIntType(
serialized_name="backendPort",
)
properties.enable_floating_ip = AAZBoolType(
serialized_name="enableFloatingIP",
)
properties.enable_tcp_reset = AAZBoolType(
serialized_name="enableTcpReset",
)
properties.frontend_ip_configuration = AAZObjectType(
serialized_name="frontendIPConfiguration",
)
cls._build_schema_sub_resource_read(properties.frontend_ip_configuration)
properties.frontend_port = AAZIntType(
serialized_name="frontendPort",
)
properties.frontend_port_range_end = AAZIntType(
serialized_name="frontendPortRangeEnd",
)
properties.frontend_port_range_start = AAZIntType(
serialized_name="frontendPortRangeStart",
)
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.protocol = AAZStrType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
private_link_connection_properties = _schema_network_interface_ip_configuration_read.properties.private_link_connection_properties
private_link_connection_properties.fqdns = AAZListType(
flags={"read_only": True},
)
private_link_connection_properties.group_id = AAZStrType(
serialized_name="groupId",
flags={"read_only": True},
)
private_link_connection_properties.required_member_name = AAZStrType(
serialized_name="requiredMemberName",
flags={"read_only": True},
)
fqdns = _schema_network_interface_ip_configuration_read.properties.private_link_connection_properties.fqdns
fqdns.Element = AAZStrType()
virtual_network_taps = _schema_network_interface_ip_configuration_read.properties.virtual_network_taps
virtual_network_taps.Element = AAZObjectType()
cls._build_schema_virtual_network_tap_read(virtual_network_taps.Element)
_schema.etag = cls._schema_network_interface_ip_configuration_read.etag
_schema.id = cls._schema_network_interface_ip_configuration_read.id
_schema.name = cls._schema_network_interface_ip_configuration_read.name
_schema.properties = cls._schema_network_interface_ip_configuration_read.properties
_schema.type = cls._schema_network_interface_ip_configuration_read.type
_schema_network_interface_tap_configuration_read = None
@classmethod
def _build_schema_network_interface_tap_configuration_read(cls, _schema):
if cls._schema_network_interface_tap_configuration_read is not None:
_schema.etag = cls._schema_network_interface_tap_configuration_read.etag
_schema.id = cls._schema_network_interface_tap_configuration_read.id
_schema.name = cls._schema_network_interface_tap_configuration_read.name
_schema.properties = cls._schema_network_interface_tap_configuration_read.properties
_schema.type = cls._schema_network_interface_tap_configuration_read.type
return
cls._schema_network_interface_tap_configuration_read = _schema_network_interface_tap_configuration_read = AAZObjectType()
network_interface_tap_configuration_read = _schema_network_interface_tap_configuration_read
network_interface_tap_configuration_read.etag = AAZStrType(
flags={"read_only": True},
)
network_interface_tap_configuration_read.id = AAZStrType()
network_interface_tap_configuration_read.name = AAZStrType()
network_interface_tap_configuration_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
network_interface_tap_configuration_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_tap_configuration_read.properties
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.virtual_network_tap = AAZObjectType(
serialized_name="virtualNetworkTap",
)
cls._build_schema_virtual_network_tap_read(properties.virtual_network_tap)
_schema.etag = cls._schema_network_interface_tap_configuration_read.etag
_schema.id = cls._schema_network_interface_tap_configuration_read.id
_schema.name = cls._schema_network_interface_tap_configuration_read.name
_schema.properties = cls._schema_network_interface_tap_configuration_read.properties
_schema.type = cls._schema_network_interface_tap_configuration_read.type
_schema_network_interface_read = None
@classmethod
def _build_schema_network_interface_read(cls, _schema):
if cls._schema_network_interface_read is not None:
_schema.etag = cls._schema_network_interface_read.etag
_schema.extended_location = cls._schema_network_interface_read.extended_location
_schema.id = cls._schema_network_interface_read.id
_schema.location = cls._schema_network_interface_read.location
_schema.name = cls._schema_network_interface_read.name
_schema.properties = cls._schema_network_interface_read.properties
_schema.tags = cls._schema_network_interface_read.tags
_schema.type = cls._schema_network_interface_read.type
return
cls._schema_network_interface_read = _schema_network_interface_read = AAZObjectType()
network_interface_read = _schema_network_interface_read
network_interface_read.etag = AAZStrType(
flags={"read_only": True},
)
network_interface_read.extended_location = AAZObjectType(
serialized_name="extendedLocation",
)
cls._build_schema_extended_location_read(network_interface_read.extended_location)
network_interface_read.id = AAZStrType()
network_interface_read.location = AAZStrType()
network_interface_read.name = AAZStrType(
flags={"read_only": True},
)
network_interface_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
network_interface_read.tags = AAZDictType()
network_interface_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_read.properties
properties.auxiliary_mode = AAZStrType(
serialized_name="auxiliaryMode",
)
properties.dns_settings = AAZObjectType(
serialized_name="dnsSettings",
)
properties.dscp_configuration = AAZObjectType(
serialized_name="dscpConfiguration",
)
cls._build_schema_sub_resource_read(properties.dscp_configuration)
properties.enable_accelerated_networking = AAZBoolType(
serialized_name="enableAcceleratedNetworking",
)
properties.enable_ip_forwarding = AAZBoolType(
serialized_name="enableIPForwarding",
)
properties.hosted_workloads = AAZListType(
serialized_name="hostedWorkloads",
flags={"read_only": True},
)
properties.ip_configurations = AAZListType(
serialized_name="ipConfigurations",
)
properties.mac_address = AAZStrType(
serialized_name="macAddress",
flags={"read_only": True},
)
properties.migration_phase = AAZStrType(
serialized_name="migrationPhase",
)
properties.network_security_group = AAZObjectType(
serialized_name="networkSecurityGroup",
)
cls._build_schema_network_security_group_read(properties.network_security_group)
properties.nic_type = AAZStrType(
serialized_name="nicType",
)
properties.primary = AAZBoolType(
flags={"read_only": True},
)
properties.private_endpoint = AAZObjectType(
serialized_name="privateEndpoint",
)
cls._build_schema_private_endpoint_read(properties.private_endpoint)
properties.private_link_service = AAZObjectType(
serialized_name="privateLinkService",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.tap_configurations = AAZListType(
serialized_name="tapConfigurations",
flags={"read_only": True},
)
properties.virtual_machine = AAZObjectType(
serialized_name="virtualMachine",
)
cls._build_schema_sub_resource_read(properties.virtual_machine)
properties.vnet_encryption_supported = AAZBoolType(
serialized_name="vnetEncryptionSupported",
flags={"read_only": True},
)
properties.workload_type = AAZStrType(
serialized_name="workloadType",
)
dns_settings = _schema_network_interface_read.properties.dns_settings
dns_settings.applied_dns_servers = AAZListType(
serialized_name="appliedDnsServers",
flags={"read_only": True},
)
dns_settings.dns_servers = AAZListType(
serialized_name="dnsServers",
)
dns_settings.internal_dns_name_label = AAZStrType(
serialized_name="internalDnsNameLabel",
)
dns_settings.internal_domain_name_suffix = AAZStrType(
serialized_name="internalDomainNameSuffix",
flags={"read_only": True},
)
dns_settings.internal_fqdn = AAZStrType(
serialized_name="internalFqdn",
flags={"read_only": True},
)
applied_dns_servers = _schema_network_interface_read.properties.dns_settings.applied_dns_servers
applied_dns_servers.Element = AAZStrType()
dns_servers = _schema_network_interface_read.properties.dns_settings.dns_servers
dns_servers.Element = AAZStrType()
hosted_workloads = _schema_network_interface_read.properties.hosted_workloads
hosted_workloads.Element = AAZStrType()
ip_configurations = _schema_network_interface_read.properties.ip_configurations
ip_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_ip_configuration_read(ip_configurations.Element)
private_link_service = _schema_network_interface_read.properties.private_link_service
private_link_service.etag = AAZStrType(
flags={"read_only": True},
)
private_link_service.extended_location = AAZObjectType(
serialized_name="extendedLocation",
)
cls._build_schema_extended_location_read(private_link_service.extended_location)
private_link_service.id = AAZStrType()
private_link_service.location = AAZStrType()
private_link_service.name = AAZStrType(
flags={"read_only": True},
)
private_link_service.properties = AAZObjectType(
flags={"client_flatten": True},
)
private_link_service.tags = AAZDictType()
private_link_service.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_read.properties.private_link_service.properties
properties.alias = AAZStrType(
flags={"read_only": True},
)
properties.auto_approval = AAZObjectType(
serialized_name="autoApproval",
)
properties.enable_proxy_protocol = AAZBoolType(
serialized_name="enableProxyProtocol",
)
properties.fqdns = AAZListType()
properties.ip_configurations = AAZListType(
serialized_name="ipConfigurations",
)
properties.load_balancer_frontend_ip_configurations = AAZListType(
serialized_name="loadBalancerFrontendIpConfigurations",
)
properties.network_interfaces = AAZListType(
serialized_name="networkInterfaces",
flags={"read_only": True},
)
properties.private_endpoint_connections = AAZListType(
serialized_name="privateEndpointConnections",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.visibility = AAZObjectType()
auto_approval = _schema_network_interface_read.properties.private_link_service.properties.auto_approval
auto_approval.subscriptions = AAZListType()
subscriptions = _schema_network_interface_read.properties.private_link_service.properties.auto_approval.subscriptions
subscriptions.Element = AAZStrType()
fqdns = _schema_network_interface_read.properties.private_link_service.properties.fqdns
fqdns.Element = AAZStrType()
ip_configurations = _schema_network_interface_read.properties.private_link_service.properties.ip_configurations
ip_configurations.Element = AAZObjectType()
_element = _schema_network_interface_read.properties.private_link_service.properties.ip_configurations.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_read.properties.private_link_service.properties.ip_configurations.Element.properties
properties.primary = AAZBoolType()
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
properties.private_ip_address_version = AAZStrType(
serialized_name="privateIPAddressVersion",
)
properties.private_ip_allocation_method = AAZStrType(
serialized_name="privateIPAllocationMethod",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
load_balancer_frontend_ip_configurations = _schema_network_interface_read.properties.private_link_service.properties.load_balancer_frontend_ip_configurations
load_balancer_frontend_ip_configurations.Element = AAZObjectType()
cls._build_schema_frontend_ip_configuration_read(load_balancer_frontend_ip_configurations.Element)
network_interfaces = _schema_network_interface_read.properties.private_link_service.properties.network_interfaces
network_interfaces.Element = AAZObjectType()
cls._build_schema_network_interface_read(network_interfaces.Element)
private_endpoint_connections = _schema_network_interface_read.properties.private_link_service.properties.private_endpoint_connections
private_endpoint_connections.Element = AAZObjectType()
_element = _schema_network_interface_read.properties.private_link_service.properties.private_endpoint_connections.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_read.properties.private_link_service.properties.private_endpoint_connections.Element.properties
properties.link_identifier = AAZStrType(
serialized_name="linkIdentifier",
flags={"read_only": True},
)
properties.private_endpoint = AAZObjectType(
serialized_name="privateEndpoint",
)
cls._build_schema_private_endpoint_read(properties.private_endpoint)
properties.private_link_service_connection_state = AAZObjectType(
serialized_name="privateLinkServiceConnectionState",
)
cls._build_schema_private_link_service_connection_state_read(properties.private_link_service_connection_state)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
visibility = _schema_network_interface_read.properties.private_link_service.properties.visibility
visibility.subscriptions = AAZListType()
subscriptions = _schema_network_interface_read.properties.private_link_service.properties.visibility.subscriptions
subscriptions.Element = AAZStrType()
tags = _schema_network_interface_read.properties.private_link_service.tags
tags.Element = AAZStrType()
tap_configurations = _schema_network_interface_read.properties.tap_configurations
tap_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_tap_configuration_read(tap_configurations.Element)
tags = _schema_network_interface_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_network_interface_read.etag
_schema.extended_location = cls._schema_network_interface_read.extended_location
_schema.id = cls._schema_network_interface_read.id
_schema.location = cls._schema_network_interface_read.location
_schema.name = cls._schema_network_interface_read.name
_schema.properties = cls._schema_network_interface_read.properties
_schema.tags = cls._schema_network_interface_read.tags
_schema.type = cls._schema_network_interface_read.type
_schema_network_security_group_read = None
@classmethod
def _build_schema_network_security_group_read(cls, _schema):
if cls._schema_network_security_group_read is not None:
_schema.etag = cls._schema_network_security_group_read.etag
_schema.id = cls._schema_network_security_group_read.id
_schema.location = cls._schema_network_security_group_read.location
_schema.name = cls._schema_network_security_group_read.name
_schema.properties = cls._schema_network_security_group_read.properties
_schema.tags = cls._schema_network_security_group_read.tags
_schema.type = cls._schema_network_security_group_read.type
return
cls._schema_network_security_group_read = _schema_network_security_group_read = AAZObjectType()
network_security_group_read = _schema_network_security_group_read
network_security_group_read.etag = AAZStrType(
flags={"read_only": True},
)
network_security_group_read.id = AAZStrType()
network_security_group_read.location = AAZStrType()
network_security_group_read.name = AAZStrType(
flags={"read_only": True},
)
network_security_group_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
network_security_group_read.tags = AAZDictType()
network_security_group_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_security_group_read.properties
properties.default_security_rules = AAZListType(
serialized_name="defaultSecurityRules",
flags={"read_only": True},
)
properties.flow_logs = AAZListType(
serialized_name="flowLogs",
flags={"read_only": True},
)
properties.network_interfaces = AAZListType(
serialized_name="networkInterfaces",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.security_rules = AAZListType(
serialized_name="securityRules",
)
properties.subnets = AAZListType(
flags={"read_only": True},
)
default_security_rules = _schema_network_security_group_read.properties.default_security_rules
default_security_rules.Element = AAZObjectType()
cls._build_schema_security_rule_read(default_security_rules.Element)
flow_logs = _schema_network_security_group_read.properties.flow_logs
flow_logs.Element = AAZObjectType()
_element = _schema_network_security_group_read.properties.flow_logs.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.location = AAZStrType()
_element.name = AAZStrType(
flags={"read_only": True},
)
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.tags = AAZDictType()
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_security_group_read.properties.flow_logs.Element.properties
properties.enabled = AAZBoolType()
properties.flow_analytics_configuration = AAZObjectType(
serialized_name="flowAnalyticsConfiguration",
)
properties.format = AAZObjectType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.retention_policy = AAZObjectType(
serialized_name="retentionPolicy",
)
properties.storage_id = AAZStrType(
serialized_name="storageId",
flags={"required": True},
)
properties.target_resource_guid = AAZStrType(
serialized_name="targetResourceGuid",
flags={"read_only": True},
)
properties.target_resource_id = AAZStrType(
serialized_name="targetResourceId",
flags={"required": True},
)
flow_analytics_configuration = _schema_network_security_group_read.properties.flow_logs.Element.properties.flow_analytics_configuration
flow_analytics_configuration.network_watcher_flow_analytics_configuration = AAZObjectType(
serialized_name="networkWatcherFlowAnalyticsConfiguration",
)
network_watcher_flow_analytics_configuration = _schema_network_security_group_read.properties.flow_logs.Element.properties.flow_analytics_configuration.network_watcher_flow_analytics_configuration
network_watcher_flow_analytics_configuration.enabled = AAZBoolType()
network_watcher_flow_analytics_configuration.traffic_analytics_interval = AAZIntType(
serialized_name="trafficAnalyticsInterval",
)
network_watcher_flow_analytics_configuration.workspace_id = AAZStrType(
serialized_name="workspaceId",
)
network_watcher_flow_analytics_configuration.workspace_region = AAZStrType(
serialized_name="workspaceRegion",
)
network_watcher_flow_analytics_configuration.workspace_resource_id = AAZStrType(
serialized_name="workspaceResourceId",
)
format = _schema_network_security_group_read.properties.flow_logs.Element.properties.format
format.type = AAZStrType()
format.version = AAZIntType()
retention_policy = _schema_network_security_group_read.properties.flow_logs.Element.properties.retention_policy
retention_policy.days = AAZIntType()
retention_policy.enabled = AAZBoolType()
tags = _schema_network_security_group_read.properties.flow_logs.Element.tags
tags.Element = AAZStrType()
network_interfaces = _schema_network_security_group_read.properties.network_interfaces
network_interfaces.Element = AAZObjectType()
cls._build_schema_network_interface_read(network_interfaces.Element)
security_rules = _schema_network_security_group_read.properties.security_rules
security_rules.Element = AAZObjectType()
cls._build_schema_security_rule_read(security_rules.Element)
subnets = _schema_network_security_group_read.properties.subnets
subnets.Element = AAZObjectType()
cls._build_schema_subnet_read(subnets.Element)
tags = _schema_network_security_group_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_network_security_group_read.etag
_schema.id = cls._schema_network_security_group_read.id
_schema.location = cls._schema_network_security_group_read.location
_schema.name = cls._schema_network_security_group_read.name
_schema.properties = cls._schema_network_security_group_read.properties
_schema.tags = cls._schema_network_security_group_read.tags
_schema.type = cls._schema_network_security_group_read.type
_schema_private_endpoint_read = None
@classmethod
def _build_schema_private_endpoint_read(cls, _schema):
if cls._schema_private_endpoint_read is not None:
_schema.etag = cls._schema_private_endpoint_read.etag
_schema.extended_location = cls._schema_private_endpoint_read.extended_location
_schema.id = cls._schema_private_endpoint_read.id
_schema.location = cls._schema_private_endpoint_read.location
_schema.name = cls._schema_private_endpoint_read.name
_schema.properties = cls._schema_private_endpoint_read.properties
_schema.tags = cls._schema_private_endpoint_read.tags
_schema.type = cls._schema_private_endpoint_read.type
return
cls._schema_private_endpoint_read = _schema_private_endpoint_read = AAZObjectType()
private_endpoint_read = _schema_private_endpoint_read
private_endpoint_read.etag = AAZStrType(
flags={"read_only": True},
)
private_endpoint_read.extended_location = AAZObjectType(
serialized_name="extendedLocation",
)
cls._build_schema_extended_location_read(private_endpoint_read.extended_location)
private_endpoint_read.id = AAZStrType()
private_endpoint_read.location = AAZStrType()
private_endpoint_read.name = AAZStrType(
flags={"read_only": True},
)
private_endpoint_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
private_endpoint_read.tags = AAZDictType()
private_endpoint_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_private_endpoint_read.properties
properties.application_security_groups = AAZListType(
serialized_name="applicationSecurityGroups",
)
properties.custom_dns_configs = AAZListType(
serialized_name="customDnsConfigs",
)
properties.custom_network_interface_name = AAZStrType(
serialized_name="customNetworkInterfaceName",
)
properties.ip_configurations = AAZListType(
serialized_name="ipConfigurations",
)
properties.manual_private_link_service_connections = AAZListType(
serialized_name="manualPrivateLinkServiceConnections",
)
properties.network_interfaces = AAZListType(
serialized_name="networkInterfaces",
flags={"read_only": True},
)
properties.private_link_service_connections = AAZListType(
serialized_name="privateLinkServiceConnections",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
application_security_groups = _schema_private_endpoint_read.properties.application_security_groups
application_security_groups.Element = AAZObjectType()
cls._build_schema_application_security_group_read(application_security_groups.Element)
custom_dns_configs = _schema_private_endpoint_read.properties.custom_dns_configs
custom_dns_configs.Element = AAZObjectType()
_element = _schema_private_endpoint_read.properties.custom_dns_configs.Element
_element.fqdn = AAZStrType()
_element.ip_addresses = AAZListType(
serialized_name="ipAddresses",
)
ip_addresses = _schema_private_endpoint_read.properties.custom_dns_configs.Element.ip_addresses
ip_addresses.Element = AAZStrType()
ip_configurations = _schema_private_endpoint_read.properties.ip_configurations
ip_configurations.Element = AAZObjectType()
_element = _schema_private_endpoint_read.properties.ip_configurations.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_private_endpoint_read.properties.ip_configurations.Element.properties
properties.group_id = AAZStrType(
serialized_name="groupId",
)
properties.member_name = AAZStrType(
serialized_name="memberName",
)
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
manual_private_link_service_connections = _schema_private_endpoint_read.properties.manual_private_link_service_connections
manual_private_link_service_connections.Element = AAZObjectType()
cls._build_schema_private_link_service_connection_read(manual_private_link_service_connections.Element)
network_interfaces = _schema_private_endpoint_read.properties.network_interfaces
network_interfaces.Element = AAZObjectType()
cls._build_schema_network_interface_read(network_interfaces.Element)
private_link_service_connections = _schema_private_endpoint_read.properties.private_link_service_connections
private_link_service_connections.Element = AAZObjectType()
cls._build_schema_private_link_service_connection_read(private_link_service_connections.Element)
tags = _schema_private_endpoint_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_private_endpoint_read.etag
_schema.extended_location = cls._schema_private_endpoint_read.extended_location
_schema.id = cls._schema_private_endpoint_read.id
_schema.location = cls._schema_private_endpoint_read.location
_schema.name = cls._schema_private_endpoint_read.name
_schema.properties = cls._schema_private_endpoint_read.properties
_schema.tags = cls._schema_private_endpoint_read.tags
_schema.type = cls._schema_private_endpoint_read.type
_schema_private_link_service_connection_state_read = None
@classmethod
def _build_schema_private_link_service_connection_state_read(cls, _schema):
if cls._schema_private_link_service_connection_state_read is not None:
_schema.actions_required = cls._schema_private_link_service_connection_state_read.actions_required
_schema.description = cls._schema_private_link_service_connection_state_read.description
_schema.status = cls._schema_private_link_service_connection_state_read.status
return
cls._schema_private_link_service_connection_state_read = _schema_private_link_service_connection_state_read = AAZObjectType()
private_link_service_connection_state_read = _schema_private_link_service_connection_state_read
private_link_service_connection_state_read.actions_required = AAZStrType(
serialized_name="actionsRequired",
)
private_link_service_connection_state_read.description = AAZStrType()
private_link_service_connection_state_read.status = AAZStrType()
_schema.actions_required = cls._schema_private_link_service_connection_state_read.actions_required
_schema.description = cls._schema_private_link_service_connection_state_read.description
_schema.status = cls._schema_private_link_service_connection_state_read.status
_schema_private_link_service_connection_read = None
@classmethod
def _build_schema_private_link_service_connection_read(cls, _schema):
if cls._schema_private_link_service_connection_read is not None:
_schema.etag = cls._schema_private_link_service_connection_read.etag
_schema.id = cls._schema_private_link_service_connection_read.id
_schema.name = cls._schema_private_link_service_connection_read.name
_schema.properties = cls._schema_private_link_service_connection_read.properties
_schema.type = cls._schema_private_link_service_connection_read.type
return
cls._schema_private_link_service_connection_read = _schema_private_link_service_connection_read = AAZObjectType()
private_link_service_connection_read = _schema_private_link_service_connection_read
private_link_service_connection_read.etag = AAZStrType(
flags={"read_only": True},
)
private_link_service_connection_read.id = AAZStrType()
private_link_service_connection_read.name = AAZStrType()
private_link_service_connection_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
private_link_service_connection_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_private_link_service_connection_read.properties
properties.group_ids = AAZListType(
serialized_name="groupIds",
)
properties.private_link_service_connection_state = AAZObjectType(
serialized_name="privateLinkServiceConnectionState",
)
cls._build_schema_private_link_service_connection_state_read(properties.private_link_service_connection_state)
properties.private_link_service_id = AAZStrType(
serialized_name="privateLinkServiceId",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.request_message = AAZStrType(
serialized_name="requestMessage",
)
group_ids = _schema_private_link_service_connection_read.properties.group_ids
group_ids.Element = AAZStrType()
_schema.etag = cls._schema_private_link_service_connection_read.etag
_schema.id = cls._schema_private_link_service_connection_read.id
_schema.name = cls._schema_private_link_service_connection_read.name
_schema.properties = cls._schema_private_link_service_connection_read.properties
_schema.type = cls._schema_private_link_service_connection_read.type
_schema_public_ip_address_read = None
@classmethod
def _build_schema_public_ip_address_read(cls, _schema):
if cls._schema_public_ip_address_read is not None:
_schema.etag = cls._schema_public_ip_address_read.etag
_schema.extended_location = cls._schema_public_ip_address_read.extended_location
_schema.id = cls._schema_public_ip_address_read.id
_schema.location = cls._schema_public_ip_address_read.location
_schema.name = cls._schema_public_ip_address_read.name
_schema.properties = cls._schema_public_ip_address_read.properties
_schema.sku = cls._schema_public_ip_address_read.sku
_schema.tags = cls._schema_public_ip_address_read.tags
_schema.type = cls._schema_public_ip_address_read.type
_schema.zones = cls._schema_public_ip_address_read.zones
return
cls._schema_public_ip_address_read = _schema_public_ip_address_read = AAZObjectType()
public_ip_address_read = _schema_public_ip_address_read
public_ip_address_read.etag = AAZStrType(
flags={"read_only": True},
)
public_ip_address_read.extended_location = AAZObjectType(
serialized_name="extendedLocation",
)
cls._build_schema_extended_location_read(public_ip_address_read.extended_location)
public_ip_address_read.id = AAZStrType()
public_ip_address_read.location = AAZStrType()
public_ip_address_read.name = AAZStrType(
flags={"read_only": True},
)
public_ip_address_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
public_ip_address_read.sku = AAZObjectType()
public_ip_address_read.tags = AAZDictType()
public_ip_address_read.type = AAZStrType(
flags={"read_only": True},
)
public_ip_address_read.zones = AAZListType()
properties = _schema_public_ip_address_read.properties
properties.ddos_settings = AAZObjectType(
serialized_name="ddosSettings",
)
properties.delete_option = AAZStrType(
serialized_name="deleteOption",
)
properties.dns_settings = AAZObjectType(
serialized_name="dnsSettings",
)
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.ip_address = AAZStrType(
serialized_name="ipAddress",
)
properties.ip_configuration = AAZObjectType(
serialized_name="ipConfiguration",
)
cls._build_schema_ip_configuration_read(properties.ip_configuration)
properties.ip_tags = AAZListType(
serialized_name="ipTags",
)
properties.linked_public_ip_address = AAZObjectType(
serialized_name="linkedPublicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.linked_public_ip_address)
properties.migration_phase = AAZStrType(
serialized_name="migrationPhase",
)
properties.nat_gateway = AAZObjectType(
serialized_name="natGateway",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.public_ip_address_version = AAZStrType(
serialized_name="publicIPAddressVersion",
)
properties.public_ip_allocation_method = AAZStrType(
serialized_name="publicIPAllocationMethod",
)
properties.public_ip_prefix = AAZObjectType(
serialized_name="publicIPPrefix",
)
cls._build_schema_sub_resource_read(properties.public_ip_prefix)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.service_public_ip_address = AAZObjectType(
serialized_name="servicePublicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.service_public_ip_address)
ddos_settings = _schema_public_ip_address_read.properties.ddos_settings
ddos_settings.ddos_custom_policy = AAZObjectType(
serialized_name="ddosCustomPolicy",
)
cls._build_schema_sub_resource_read(ddos_settings.ddos_custom_policy)
ddos_settings.protected_ip = AAZBoolType(
serialized_name="protectedIP",
)
ddos_settings.protection_coverage = AAZStrType(
serialized_name="protectionCoverage",
)
dns_settings = _schema_public_ip_address_read.properties.dns_settings
dns_settings.domain_name_label = AAZStrType(
serialized_name="domainNameLabel",
)
dns_settings.fqdn = AAZStrType()
dns_settings.reverse_fqdn = AAZStrType(
serialized_name="reverseFqdn",
)
ip_tags = _schema_public_ip_address_read.properties.ip_tags
ip_tags.Element = AAZObjectType()
_element = _schema_public_ip_address_read.properties.ip_tags.Element
_element.ip_tag_type = AAZStrType(
serialized_name="ipTagType",
)
_element.tag = AAZStrType()
nat_gateway = _schema_public_ip_address_read.properties.nat_gateway
nat_gateway.etag = AAZStrType(
flags={"read_only": True},
)
nat_gateway.id = AAZStrType()
nat_gateway.location = AAZStrType()
nat_gateway.name = AAZStrType(
flags={"read_only": True},
)
nat_gateway.properties = AAZObjectType(
flags={"client_flatten": True},
)
nat_gateway.sku = AAZObjectType()
nat_gateway.tags = AAZDictType()
nat_gateway.type = AAZStrType(
flags={"read_only": True},
)
nat_gateway.zones = AAZListType()
properties = _schema_public_ip_address_read.properties.nat_gateway.properties
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.public_ip_addresses = AAZListType(
serialized_name="publicIpAddresses",
)
properties.public_ip_prefixes = AAZListType(
serialized_name="publicIpPrefixes",
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.subnets = AAZListType(
flags={"read_only": True},
)
public_ip_addresses = _schema_public_ip_address_read.properties.nat_gateway.properties.public_ip_addresses
public_ip_addresses.Element = AAZObjectType()
cls._build_schema_sub_resource_read(public_ip_addresses.Element)
public_ip_prefixes = _schema_public_ip_address_read.properties.nat_gateway.properties.public_ip_prefixes
public_ip_prefixes.Element = AAZObjectType()
cls._build_schema_sub_resource_read(public_ip_prefixes.Element)
subnets = _schema_public_ip_address_read.properties.nat_gateway.properties.subnets
subnets.Element = AAZObjectType()
cls._build_schema_sub_resource_read(subnets.Element)
sku = _schema_public_ip_address_read.properties.nat_gateway.sku
sku.name = AAZStrType()
tags = _schema_public_ip_address_read.properties.nat_gateway.tags
tags.Element = AAZStrType()
zones = _schema_public_ip_address_read.properties.nat_gateway.zones
zones.Element = AAZStrType()
sku = _schema_public_ip_address_read.sku
sku.name = AAZStrType()
sku.tier = AAZStrType()
tags = _schema_public_ip_address_read.tags
tags.Element = AAZStrType()
zones = _schema_public_ip_address_read.zones
zones.Element = AAZStrType()
_schema.etag = cls._schema_public_ip_address_read.etag
_schema.extended_location = cls._schema_public_ip_address_read.extended_location
_schema.id = cls._schema_public_ip_address_read.id
_schema.location = cls._schema_public_ip_address_read.location
_schema.name = cls._schema_public_ip_address_read.name
_schema.properties = cls._schema_public_ip_address_read.properties
_schema.sku = cls._schema_public_ip_address_read.sku
_schema.tags = cls._schema_public_ip_address_read.tags
_schema.type = cls._schema_public_ip_address_read.type
_schema.zones = cls._schema_public_ip_address_read.zones
_schema_security_rule_read = None
@classmethod
def _build_schema_security_rule_read(cls, _schema):
if cls._schema_security_rule_read is not None:
_schema.etag = cls._schema_security_rule_read.etag
_schema.id = cls._schema_security_rule_read.id
_schema.name = cls._schema_security_rule_read.name
_schema.properties = cls._schema_security_rule_read.properties
_schema.type = cls._schema_security_rule_read.type
return
cls._schema_security_rule_read = _schema_security_rule_read = AAZObjectType()
security_rule_read = _schema_security_rule_read
security_rule_read.etag = AAZStrType(
flags={"read_only": True},
)
security_rule_read.id = AAZStrType()
security_rule_read.name = AAZStrType()
security_rule_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
security_rule_read.type = AAZStrType()
properties = _schema_security_rule_read.properties
properties.access = AAZStrType(
flags={"required": True},
)
properties.description = AAZStrType()
properties.destination_address_prefix = AAZStrType(
serialized_name="destinationAddressPrefix",
)
properties.destination_address_prefixes = AAZListType(
serialized_name="destinationAddressPrefixes",
)
properties.destination_application_security_groups = AAZListType(
serialized_name="destinationApplicationSecurityGroups",
)
properties.destination_port_range = AAZStrType(
serialized_name="destinationPortRange",
)
properties.destination_port_ranges = AAZListType(
serialized_name="destinationPortRanges",
)
properties.direction = AAZStrType(
flags={"required": True},
)
properties.priority = AAZIntType()
properties.protocol = AAZStrType(
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.source_address_prefix = AAZStrType(
serialized_name="sourceAddressPrefix",
)
properties.source_address_prefixes = AAZListType(
serialized_name="sourceAddressPrefixes",
)
properties.source_application_security_groups = AAZListType(
serialized_name="sourceApplicationSecurityGroups",
)
properties.source_port_range = AAZStrType(
serialized_name="sourcePortRange",
)
properties.source_port_ranges = AAZListType(
serialized_name="sourcePortRanges",
)
destination_address_prefixes = _schema_security_rule_read.properties.destination_address_prefixes
destination_address_prefixes.Element = AAZStrType()
destination_application_security_groups = _schema_security_rule_read.properties.destination_application_security_groups
destination_application_security_groups.Element = AAZObjectType()
cls._build_schema_application_security_group_read(destination_application_security_groups.Element)
destination_port_ranges = _schema_security_rule_read.properties.destination_port_ranges
destination_port_ranges.Element = AAZStrType()
source_address_prefixes = _schema_security_rule_read.properties.source_address_prefixes
source_address_prefixes.Element = AAZStrType()
source_application_security_groups = _schema_security_rule_read.properties.source_application_security_groups
source_application_security_groups.Element = AAZObjectType()
cls._build_schema_application_security_group_read(source_application_security_groups.Element)
source_port_ranges = _schema_security_rule_read.properties.source_port_ranges
source_port_ranges.Element = AAZStrType()
_schema.etag = cls._schema_security_rule_read.etag
_schema.id = cls._schema_security_rule_read.id
_schema.name = cls._schema_security_rule_read.name
_schema.properties = cls._schema_security_rule_read.properties
_schema.type = cls._schema_security_rule_read.type
_schema_service_endpoint_policy_read = None
@classmethod
def _build_schema_service_endpoint_policy_read(cls, _schema):
if cls._schema_service_endpoint_policy_read is not None:
_schema.etag = cls._schema_service_endpoint_policy_read.etag
_schema.id = cls._schema_service_endpoint_policy_read.id
_schema.kind = cls._schema_service_endpoint_policy_read.kind
_schema.location = cls._schema_service_endpoint_policy_read.location
_schema.name = cls._schema_service_endpoint_policy_read.name
_schema.properties = cls._schema_service_endpoint_policy_read.properties
_schema.tags = cls._schema_service_endpoint_policy_read.tags
_schema.type = cls._schema_service_endpoint_policy_read.type
return
cls._schema_service_endpoint_policy_read = _schema_service_endpoint_policy_read = AAZObjectType()
service_endpoint_policy_read = _schema_service_endpoint_policy_read
service_endpoint_policy_read.etag = AAZStrType(
flags={"read_only": True},
)
service_endpoint_policy_read.id = AAZStrType()
service_endpoint_policy_read.kind = AAZStrType(
flags={"read_only": True},
)
service_endpoint_policy_read.location = AAZStrType()
service_endpoint_policy_read.name = AAZStrType(
flags={"read_only": True},
)
service_endpoint_policy_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
service_endpoint_policy_read.tags = AAZDictType()
service_endpoint_policy_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_service_endpoint_policy_read.properties
properties.contextual_service_endpoint_policies = AAZListType(
serialized_name="contextualServiceEndpointPolicies",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.service_alias = AAZStrType(
serialized_name="serviceAlias",
)
properties.service_endpoint_policy_definitions = AAZListType(
serialized_name="serviceEndpointPolicyDefinitions",
)
properties.subnets = AAZListType(
flags={"read_only": True},
)
contextual_service_endpoint_policies = _schema_service_endpoint_policy_read.properties.contextual_service_endpoint_policies
contextual_service_endpoint_policies.Element = AAZStrType()
service_endpoint_policy_definitions = _schema_service_endpoint_policy_read.properties.service_endpoint_policy_definitions
service_endpoint_policy_definitions.Element = AAZObjectType()
_element = _schema_service_endpoint_policy_read.properties.service_endpoint_policy_definitions.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType()
properties = _schema_service_endpoint_policy_read.properties.service_endpoint_policy_definitions.Element.properties
properties.description = AAZStrType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.service = AAZStrType()
properties.service_resources = AAZListType(
serialized_name="serviceResources",
)
service_resources = _schema_service_endpoint_policy_read.properties.service_endpoint_policy_definitions.Element.properties.service_resources
service_resources.Element = AAZStrType()
subnets = _schema_service_endpoint_policy_read.properties.subnets
subnets.Element = AAZObjectType()
cls._build_schema_subnet_read(subnets.Element)
tags = _schema_service_endpoint_policy_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_service_endpoint_policy_read.etag
_schema.id = cls._schema_service_endpoint_policy_read.id
_schema.kind = cls._schema_service_endpoint_policy_read.kind
_schema.location = cls._schema_service_endpoint_policy_read.location
_schema.name = cls._schema_service_endpoint_policy_read.name
_schema.properties = cls._schema_service_endpoint_policy_read.properties
_schema.tags = cls._schema_service_endpoint_policy_read.tags
_schema.type = cls._schema_service_endpoint_policy_read.type
_schema_sub_resource_read = None
@classmethod
def _build_schema_sub_resource_read(cls, _schema):
if cls._schema_sub_resource_read is not None:
_schema.id = cls._schema_sub_resource_read.id
return
cls._schema_sub_resource_read = _schema_sub_resource_read = AAZObjectType()
sub_resource_read = _schema_sub_resource_read
sub_resource_read.id = AAZStrType()
_schema.id = cls._schema_sub_resource_read.id
_schema_subnet_read = None
@classmethod
def _build_schema_subnet_read(cls, _schema):
if cls._schema_subnet_read is not None:
_schema.etag = cls._schema_subnet_read.etag
_schema.id = cls._schema_subnet_read.id
_schema.name = cls._schema_subnet_read.name
_schema.properties = cls._schema_subnet_read.properties
_schema.type = cls._schema_subnet_read.type
return
cls._schema_subnet_read = _schema_subnet_read = AAZObjectType()
subnet_read = _schema_subnet_read
subnet_read.etag = AAZStrType(
flags={"read_only": True},
)
subnet_read.id = AAZStrType()
subnet_read.name = AAZStrType()
subnet_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
subnet_read.type = AAZStrType()
properties = _schema_subnet_read.properties
properties.address_prefix = AAZStrType(
serialized_name="addressPrefix",
)
properties.address_prefixes = AAZListType(
serialized_name="addressPrefixes",
)
properties.application_gateway_ip_configurations = AAZListType(
serialized_name="applicationGatewayIpConfigurations",
)
properties.delegations = AAZListType()
properties.ip_allocations = AAZListType(
serialized_name="ipAllocations",
)
properties.ip_configuration_profiles = AAZListType(
serialized_name="ipConfigurationProfiles",
flags={"read_only": True},
)
properties.ip_configurations = AAZListType(
serialized_name="ipConfigurations",
flags={"read_only": True},
)
properties.nat_gateway = AAZObjectType(
serialized_name="natGateway",
)
cls._build_schema_sub_resource_read(properties.nat_gateway)
properties.network_security_group = AAZObjectType(
serialized_name="networkSecurityGroup",
)
cls._build_schema_network_security_group_read(properties.network_security_group)
properties.private_endpoint_network_policies = AAZStrType(
serialized_name="privateEndpointNetworkPolicies",
)
properties.private_endpoints = AAZListType(
serialized_name="privateEndpoints",
flags={"read_only": True},
)
properties.private_link_service_network_policies = AAZStrType(
serialized_name="privateLinkServiceNetworkPolicies",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.purpose = AAZStrType(
flags={"read_only": True},
)
properties.resource_navigation_links = AAZListType(
serialized_name="resourceNavigationLinks",
flags={"read_only": True},
)
properties.route_table = AAZObjectType(
serialized_name="routeTable",
)
properties.service_association_links = AAZListType(
serialized_name="serviceAssociationLinks",
flags={"read_only": True},
)
properties.service_endpoint_policies = AAZListType(
serialized_name="serviceEndpointPolicies",
)
properties.service_endpoints = AAZListType(
serialized_name="serviceEndpoints",
)
address_prefixes = _schema_subnet_read.properties.address_prefixes
address_prefixes.Element = AAZStrType()
application_gateway_ip_configurations = _schema_subnet_read.properties.application_gateway_ip_configurations
application_gateway_ip_configurations.Element = AAZObjectType()
_element = _schema_subnet_read.properties.application_gateway_ip_configurations.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.application_gateway_ip_configurations.Element.properties
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.subnet = AAZObjectType()
cls._build_schema_sub_resource_read(properties.subnet)
delegations = _schema_subnet_read.properties.delegations
delegations.Element = AAZObjectType()
_element = _schema_subnet_read.properties.delegations.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType()
properties = _schema_subnet_read.properties.delegations.Element.properties
properties.actions = AAZListType(
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.service_name = AAZStrType(
serialized_name="serviceName",
)
actions = _schema_subnet_read.properties.delegations.Element.properties.actions
actions.Element = AAZStrType()
ip_allocations = _schema_subnet_read.properties.ip_allocations
ip_allocations.Element = AAZObjectType()
cls._build_schema_sub_resource_read(ip_allocations.Element)
ip_configuration_profiles = _schema_subnet_read.properties.ip_configuration_profiles
ip_configuration_profiles.Element = AAZObjectType()
_element = _schema_subnet_read.properties.ip_configuration_profiles.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.ip_configuration_profiles.Element.properties
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
ip_configurations = _schema_subnet_read.properties.ip_configurations
ip_configurations.Element = AAZObjectType()
cls._build_schema_ip_configuration_read(ip_configurations.Element)
private_endpoints = _schema_subnet_read.properties.private_endpoints
private_endpoints.Element = AAZObjectType()
cls._build_schema_private_endpoint_read(private_endpoints.Element)
resource_navigation_links = _schema_subnet_read.properties.resource_navigation_links
resource_navigation_links.Element = AAZObjectType()
_element = _schema_subnet_read.properties.resource_navigation_links.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType(
flags={"read_only": True},
)
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.resource_navigation_links.Element.properties
properties.link = AAZStrType()
properties.linked_resource_type = AAZStrType(
serialized_name="linkedResourceType",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
route_table = _schema_subnet_read.properties.route_table
route_table.etag = AAZStrType(
flags={"read_only": True},
)
route_table.id = AAZStrType()
route_table.location = AAZStrType()
route_table.name = AAZStrType(
flags={"read_only": True},
)
route_table.properties = AAZObjectType(
flags={"client_flatten": True},
)
route_table.tags = AAZDictType()
route_table.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.route_table.properties
properties.disable_bgp_route_propagation = AAZBoolType(
serialized_name="disableBgpRoutePropagation",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
properties.routes = AAZListType()
properties.subnets = AAZListType(
flags={"read_only": True},
)
routes = _schema_subnet_read.properties.route_table.properties.routes
routes.Element = AAZObjectType()
_element = _schema_subnet_read.properties.route_table.properties.routes.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType()
properties = _schema_subnet_read.properties.route_table.properties.routes.Element.properties
properties.address_prefix = AAZStrType(
serialized_name="addressPrefix",
)
properties.has_bgp_override = AAZBoolType(
serialized_name="hasBgpOverride",
)
properties.next_hop_ip_address = AAZStrType(
serialized_name="nextHopIpAddress",
)
properties.next_hop_type = AAZStrType(
serialized_name="nextHopType",
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
subnets = _schema_subnet_read.properties.route_table.properties.subnets
subnets.Element = AAZObjectType()
cls._build_schema_subnet_read(subnets.Element)
tags = _schema_subnet_read.properties.route_table.tags
tags.Element = AAZStrType()
service_association_links = _schema_subnet_read.properties.service_association_links
service_association_links.Element = AAZObjectType()
_element = _schema_subnet_read.properties.service_association_links.Element
_element.etag = AAZStrType(
flags={"read_only": True},
)
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.service_association_links.Element.properties
properties.allow_delete = AAZBoolType(
serialized_name="allowDelete",
)
properties.link = AAZStrType()
properties.linked_resource_type = AAZStrType(
serialized_name="linkedResourceType",
)
properties.locations = AAZListType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
locations = _schema_subnet_read.properties.service_association_links.Element.properties.locations
locations.Element = AAZStrType()
service_endpoint_policies = _schema_subnet_read.properties.service_endpoint_policies
service_endpoint_policies.Element = AAZObjectType()
cls._build_schema_service_endpoint_policy_read(service_endpoint_policies.Element)
service_endpoints = _schema_subnet_read.properties.service_endpoints
service_endpoints.Element = AAZObjectType()
_element = _schema_subnet_read.properties.service_endpoints.Element
_element.locations = AAZListType()
_element.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
_element.service = AAZStrType()
locations = _schema_subnet_read.properties.service_endpoints.Element.locations
locations.Element = AAZStrType()
_schema.etag = cls._schema_subnet_read.etag
_schema.id = cls._schema_subnet_read.id
_schema.name = cls._schema_subnet_read.name
_schema.properties = cls._schema_subnet_read.properties
_schema.type = cls._schema_subnet_read.type
_schema_virtual_network_tap_read = None
@classmethod
def _build_schema_virtual_network_tap_read(cls, _schema):
if cls._schema_virtual_network_tap_read is not None:
_schema.etag = cls._schema_virtual_network_tap_read.etag
_schema.id = cls._schema_virtual_network_tap_read.id
_schema.location = cls._schema_virtual_network_tap_read.location
_schema.name = cls._schema_virtual_network_tap_read.name
_schema.properties = cls._schema_virtual_network_tap_read.properties
_schema.tags = cls._schema_virtual_network_tap_read.tags
_schema.type = cls._schema_virtual_network_tap_read.type
return
cls._schema_virtual_network_tap_read = _schema_virtual_network_tap_read = AAZObjectType()
virtual_network_tap_read = _schema_virtual_network_tap_read
virtual_network_tap_read.etag = AAZStrType(
flags={"read_only": True},
)
virtual_network_tap_read.id = AAZStrType()
virtual_network_tap_read.location = AAZStrType()
virtual_network_tap_read.name = AAZStrType(
flags={"read_only": True},
)
virtual_network_tap_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
virtual_network_tap_read.tags = AAZDictType()
virtual_network_tap_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_virtual_network_tap_read.properties
properties.destination_load_balancer_front_end_ip_configuration = AAZObjectType(
serialized_name="destinationLoadBalancerFrontEndIPConfiguration",
)
cls._build_schema_frontend_ip_configuration_read(properties.destination_load_balancer_front_end_ip_configuration)
properties.destination_network_interface_ip_configuration = AAZObjectType(
serialized_name="destinationNetworkInterfaceIPConfiguration",
)
cls._build_schema_network_interface_ip_configuration_read(properties.destination_network_interface_ip_configuration)
properties.destination_port = AAZIntType(
serialized_name="destinationPort",
)
properties.network_interface_tap_configurations = AAZListType(
serialized_name="networkInterfaceTapConfigurations",
flags={"read_only": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
flags={"read_only": True},
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
flags={"read_only": True},
)
network_interface_tap_configurations = _schema_virtual_network_tap_read.properties.network_interface_tap_configurations
network_interface_tap_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_tap_configuration_read(network_interface_tap_configurations.Element)
tags = _schema_virtual_network_tap_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_virtual_network_tap_read.etag
_schema.id = cls._schema_virtual_network_tap_read.id
_schema.location = cls._schema_virtual_network_tap_read.location
_schema.name = cls._schema_virtual_network_tap_read.name
_schema.properties = cls._schema_virtual_network_tap_read.properties
_schema.tags = cls._schema_virtual_network_tap_read.tags
_schema.type = cls._schema_virtual_network_tap_read.type
__all__ = ["List"]
| [
"[email protected]"
]
| |
0ad9c543040c66b73a4c0063a4834e93bf347cb7 | 19bcb4784f2ddda66d5ccf9eb268c45baf1f122c | /python/nn/results/get_results_aggr.py | 21dc15f59a6a51107391466207eeb449a8b19102 | [
"MIT"
]
| permissive | PeterJackNaylor/AutomaticWSI | bb76f9983479b1a1a6d7ad089eb9bb098da91136 | a26f3d8efff005dcf2d1a14705785579ce5484c8 | refs/heads/master | 2023-09-04T09:12:48.946814 | 2023-08-30T09:24:17 | 2023-08-30T09:24:17 | 226,664,370 | 1 | 1 | MIT | 2020-03-19T10:49:47 | 2019-12-08T12:30:52 | Python | UTF-8 | Python | false | false | 1,695 | py |
import os
from glob import glob
import pandas as pd
def get_options():
import argparse
parser = argparse.ArgumentParser(
description='takes a folder with ')
parser.add_argument('--path', required=True,
metavar="str", type=str,
help='folder where the result files can be found')
parser.add_argument('--name', required=True,
metavar="str", type=str,
help='name of the output')
args = parser.parse_args()
return args
def fres(st):
return st.split('at_res_')[1].split('___be')[0]
def fmodel(st):
return st.split('_for_')[0]
def fy(st):
return st.split('_for_')[1].split('_at_')[0]
def ftype(st):
return st.split('___best')[1]
def main():
options = get_options()
files = glob(os.path.join(options.path, '*best.csv'))
stats = pd.DataFrame()
for f in files:
table = pd.read_csv(f)
table = table.drop('Unnamed: 0', axis=1)
table['counts'] = table.shape[0]
table['mean'] = table.shape[0]
col = os.path.basename(f).split('.')[0]
stats[col + "mean"] = table.mean()
stats[col + "Std.Dev"] = table.std()
# stats[col + "Var"] = table.var()
stats = stats.T
stats['res'] = stats.apply(lambda x: fres(x.name), axis=1)
stats['model'] = stats.apply(lambda x: fmodel(x.name), axis=1)
stats['y'] = stats.apply(lambda x: fy(x.name), axis=1)
stats['type'] = stats.apply(lambda x: ftype(x.name), axis=1)
import pdb; pdb.set_trace()
stats = stats.set_index(['y', 'model', 'res', 'type'])
stats.to_csv(options.name)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
17fef6c5d241acb0b7bb102fad34566c88da3eff | ce5ce3764e75774c0b7eab47893987b9f311b1b9 | /.history/moments/views_20210527215915.py | aeb3ce256b14aff202e4f75fe445d9d3152b2c1b | []
| no_license | iSecloud/django-wechat | 410fb8a23b50dc2343c2f0167bbae560bf6e9694 | faaff9bb7f9454a63b2f8411d3577169b470baad | refs/heads/main | 2023-05-15T06:53:16.252422 | 2021-06-07T14:00:35 | 2021-06-07T14:00:35 | 374,685,069 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | from django.shortcuts import render
# Create your views here.
def home(request):
return render(request, 'homepage.html') | [
"[email protected]"
]
| |
a161fd86ce0916197d8943f40b551acd0ba600bc | 50f0d33b12778f911fe16a4e18d0659936b9086b | /0x05-python-exceptions/4-list_division.py | e67e5211367f6871f31a26fa72ddb8ede0d0caa0 | []
| no_license | monicajoa/holbertonschool-higher_level_programming | 4f4eaa7aa2cad1642e7aed54663cb30eb92e1b4f | 451d20174144ad96fa726a4389c7aae72abf2495 | refs/heads/master | 2022-12-18T00:35:00.682624 | 2020-09-25T05:14:57 | 2020-09-25T05:14:57 | 259,479,680 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 533 | py | #!/usr/bin/python3
def list_division(my_list_1, my_list_2, list_length):
list_new = []
result = 0
for i in range(list_length):
try:
result = my_list_1[i] / my_list_2[i]
except ZeroDivisionError:
result = 0
print("division by 0")
except IndexError:
result = 0
print("out of range")
except TypeError:
result = 0
print("wrong type")
finally:
list_new.append(result)
return (list_new)
| [
"[email protected]"
]
| |
c3535fbb041dc439a9b9f5b1c995eecdee0e1942 | bc82de9237a6aa28fd7623a27b35c02ae8416702 | /allennlp/semparse/type_declarations/type_declaration.py | 47683656260ae90fc79dfd7e462348f8ee762724 | [
"Apache-2.0"
]
| permissive | Snnzhao/GrailQA | 78190a8a5bae934c07f4035786f658ef4764c510 | e89e66380402e51bac56f59c7d24d4400bcd11b6 | refs/heads/main | 2023-04-26T19:49:21.683922 | 2021-04-11T09:40:34 | 2021-04-11T09:40:34 | 370,937,323 | 1 | 0 | Apache-2.0 | 2021-05-26T07:00:21 | 2021-05-26T07:00:20 | null | UTF-8 | Python | false | false | 36,337 | py | """
This module defines some classes that are generally useful for defining a type system for a new
domain. We inherit the type logic in ``nltk.sem.logic`` and add some functionality on top of it
here. There are two main improvements:
1) Firstly, we allow defining multiple basic types with their own names (see ``NamedBasicType``).
2) Secondly, we allow defining function types that have placeholders in them (see
``PlaceholderType``).
We also extend NLTK's ``LogicParser`` to define a ``DynamicTypeLogicParser`` that knows how to deal
with the two improvements above.
"""
from typing import Dict, List, Optional, Set, Tuple, Union
from collections import defaultdict
import itertools
from overrides import overrides
from nltk.sem.logic import Expression, ApplicationExpression, ConstantExpression, LogicParser, Variable
from nltk.sem.logic import Type, BasicType, ComplexType as NltkComplexType, ANY_TYPE
from allennlp.common.util import START_SYMBOL
class ComplexType(NltkComplexType):
"""
In NLTK, a ``ComplexType`` is a function. These functions are curried, so if you need multiple
arguments for your function you nest ``ComplexTypes``. That currying makes things difficult
for us, and we mitigate the problems by adding ``return_type`` and ``argument_type`` functions
to ``ComplexType``.
"""
def return_type(self) -> Type:
"""
Gives the final return type for this function. If the function takes a single argument,
this is just ``self.second``. If the function takes multiple arguments and returns a basic
type, this should be the final ``.second`` after following all complex types. That is the
implementation here in the base class. If you have a higher-order function that returns a
function itself, you need to override this method.
"""
return_type = self.second
while isinstance(return_type, ComplexType):
return_type = return_type.second
return return_type
def argument_types(self) -> List[Type]:
"""
Gives the types of all arguments to this function. For functions returning a basic type,
we grab all ``.first`` types until ``.second`` is no longer a ``ComplexType``. That logic
is implemented here in the base class. If you have a higher-order function that returns a
function itself, you need to override this method.
"""
arguments = [self.first]
remaining_type = self.second
while isinstance(remaining_type, ComplexType):
arguments.append(remaining_type.first)
remaining_type = remaining_type.second
return arguments
def substitute_any_type(self, basic_types: Set[BasicType]) -> List[Type]:
"""
Takes a set of ``BasicTypes`` and replaces any instances of ``ANY_TYPE`` inside this
complex type with each of those basic types.
"""
substitutions = []
for first_type in substitute_any_type(self.first, basic_types):
for second_type in substitute_any_type(self.second, basic_types):
substitutions.append(self.__class__(first_type, second_type))
return substitutions
class HigherOrderType(ComplexType):
"""
A higher-order function is a ``ComplexType`` that returns functions. We just override
``return_type`` and ``argument_types`` to make sure that these types are correct.
Parameters
----------
num_arguments : ``int``
How many arguments this function takes before returning a function. We'll go through this
many levels of nested ``ComplexTypes`` before returning the final ``.second`` as our return
type.
first : ``Type``
Passed to NLTK's ComplexType.
second : ``Type``
Passed to NLTK's ComplexType.
"""
def __init__(self, num_arguments: int, first: Type, second: Type) -> None:
super().__init__(first, second)
self.num_arguments = num_arguments
@overrides
def return_type(self) -> Type:
return_type = self.second
for _ in range(self.num_arguments - 1):
return_type = return_type.second
return return_type
@overrides
def argument_types(self) -> List[Type]:
arguments = [self.first]
remaining_type = self.second
for _ in range(self.num_arguments - 1):
arguments.append(remaining_type.first)
remaining_type = remaining_type.second
return arguments
class NamedBasicType(BasicType):
"""
A ``BasicType`` that also takes the name of the type as an argument to its constructor. Type
resolution uses the output of ``__str__`` as well, so basic types with different
representations do not resolve against each other.
Parameters
----------
string_rep : ``str``
String representation of the type.
"""
def __init__(self, string_rep) -> None:
self._string_rep = string_rep
def __str__(self):
# TODO (pradeep): This limits the number of basic types we can have to 26. We may want to
# change this in the future if we extend to domains where we have more than 26 basic types.
if self._string_rep == START_SYMBOL:
return START_SYMBOL
else:
return self._string_rep.lower()[0]
def str(self):
return self._string_rep
class MultiMatchNamedBasicType(NamedBasicType):
"""
A ``NamedBasicType`` that matches with any type within a list of ``BasicTypes`` that it takes
as an additional argument during instantiation. We just override the ``matches`` method in
``BasicType`` to match against any of the types given by the list.
Parameters
----------
string_rep : ``str``
String representation of the type, passed to super class.
types_to_match : ``List[BasicType]``
List of types that this type should match with.
"""
def __init__(self,
string_rep,
types_to_match: List[BasicType]) -> None:
super().__init__(string_rep)
self.types_to_match = set(types_to_match)
@overrides
def matches(self, other):
return super().matches(other) or other in self.types_to_match
class PlaceholderType(ComplexType):
"""
``PlaceholderType`` is a ``ComplexType`` that involves placeholders, and thus its type
resolution is context sensitive. This is an abstract class for all placeholder types like
reverse, and, or, argmax, etc.
Note that ANY_TYPE in NLTK's type system doesn't work like a wild card. Once the type of a
variable gets resolved to a specific type, NLTK changes the type of that variable to that
specific type. Hence, what NLTK calls "ANY_TYPE", is essentially a "yet-to-be-decided" type.
This is a problem because we may want the same variable to bind to different types within a
logical form, and using ANY_TYPE for this purpose will cause a resolution failure. For example
the count function may apply to both rows and cells in the same logical form, and making count
of type ``ComplexType(ANY_TYPE, DATE_NUM_TYPE)`` will cause a resolution error. This class lets
you define ``ComplexType`` s with placeholders that are actually wild cards.
The subclasses of this abstract class need to do three things
1) Override the property ``_signature`` to define the type signature (this is just the
signature's string representation and will not affect type inference or checking). You will see
this signature in action sequences.
2) Override ``resolve`` to resolve the type appropriately (see the docstring in ``resolve`` for
more information).
3) Override ``get_application_type`` which returns the return type when this type is applied as
a function to an argument of a specified type. For example, if you defined a reverse type by
inheriting from this class, ``get_application_type`` gets an argument of type ``<a,b>``, it
should return ``<b,a>`` .
"""
_signature: str = None
@overrides
def resolve(self, other: Type) -> Optional[Type]:
"""
This method is central to type inference and checking. When a variable's type is being
checked, we compare what we know of its type against what is expected of its type by its
context. The expectation is provided as ``other``. We make sure that there are no
contradictions between this type and other, and return an updated type which may be more
specific than the original type.
For example, say this type is of the function variable F in F(cell), and we start out with
``<?, d>`` (that is, it takes any type and returns ``d`` ). Now we have already resolved
cell to be of type ``e`` . Then ``resolve`` gets called with ``other = <e, ?>`` , because
we know F is a function that took a constant of type ``e`` . When we resolve ``<e, ?>``
against ``<?, d>`` , there will not be a contradiction, because any type can be
successfully resolved against ``?`` . Finally we return ``<e, d>`` as the resolved type.
As a counter example, if we are trying to resolve ``<?, d>`` against ``<?, e>`` , the
resolution fails, and in that case, this method returns ``None`` .
Note that a successful resolution does not imply equality of types because of one of them
may be ANY_TYPE, and so in the subclasses of this type, we explicitly resolve in both
directions.
"""
raise NotImplementedError
def get_application_type(self, argument_type: Type) -> Type:
"""
This method returns the resulting type when this type is applied as a function to an argument of
the given type.
"""
raise NotImplementedError
@overrides
def substitute_any_type(self, basic_types: Set[BasicType]) -> List[Type]:
"""
Placeholders mess with substitutions, so even though this method is implemented in the
superclass, we override it here with a ``NotImplementedError`` to be sure that subclasses
think about what the right thing to do here is, and do it correctly.
"""
raise NotImplementedError
@overrides
def __eq__(self, other) -> bool:
return self.__class__ == other.__class__
@overrides
def matches(self, other) -> bool:
# self == ANY_TYPE = True iff self.first == ANY_TYPE and self.second == ANY_TYPE.
return self == other or self == ANY_TYPE or other == ANY_TYPE
@overrides
def __str__(self):
if self == ANY_TYPE:
# If the type remains unresolved, we return ? instead of its signature.
return str(ANY_TYPE)
else:
return self._signature
@overrides
def str(self):
if self == ANY_TYPE:
return ANY_TYPE.str()
else:
return self._signature
__hash__ = ComplexType.__hash__
class UnaryOpType(PlaceholderType):
"""
``UnaryOpType`` is a kind of ``PlaceholderType`` that takes an argument of any type and returns
an expression of the same type. ``identity`` is an example of this kind of function. The type
signature of ``UnaryOpType`` is <#1, #1>.
Parameters
----------
allowed_substitutions : ``Set[BasicType]``, optional (default=None)
If given, this sets restrictions on the types that can be substituted. That is, say you
have a unary operation that is only permitted for numbers and dates, you can pass those in
here, and we will only consider those types when calling :func:`substitute_any_type`. If
this is ``None``, all basic types are allowed.
signature : ``str``, optional (default='<#1,#1>')
The signature of the operation is what will appear in action sequences that include this
type. The default value is suitable for functions that apply to any type. If you have a
restricted set of allowed substitutions, you likely want to change the type signature to
reflect that.
"""
def __init__(self,
type_: BasicType = ANY_TYPE,
allowed_substitutions: Set[BasicType] = None,
signature: str = '<#1,#1>') -> None:
super().__init__(type_, type_)
self._allowed_substitutions = allowed_substitutions
self._signature = signature
@overrides
def resolve(self, other) -> Optional[Type]:
"""See ``PlaceholderType.resolve``"""
if not isinstance(other, NltkComplexType):
return None
other_first = other.first.resolve(other.second)
if not other_first:
return None
other_second = other.second.resolve(other_first)
if not other_second:
return None
return UnaryOpType(other_first, self._allowed_substitutions, self._signature)
@overrides
def get_application_type(self, argument_type: Type) -> Type:
return argument_type
@overrides
def substitute_any_type(self, basic_types: Set[BasicType]) -> List[Type]:
if self.first != ANY_TYPE:
return [self]
allowed_basic_types = self._allowed_substitutions if self._allowed_substitutions else basic_types
return [UnaryOpType(basic_type, self._allowed_substitutions, self._signature)
for basic_type in allowed_basic_types]
class BinaryOpType(PlaceholderType):
"""
``BinaryOpType`` is a function that takes two arguments of the same type and returns an
argument of that type. ``+``, ``-``, ``and`` and ``or`` are examples of this kind of function.
The type signature of ``BinaryOpType`` is ``<#1,<#1,#1>>``.
Parameters
----------
allowed_substitutions : ``Set[BasicType]``, optional (default=None)
If given, this sets restrictions on the types that can be substituted. That is, say you
have a unary operation that is only permitted for numbers and dates, you can pass those in
here, and we will only consider those types when calling :func:`substitute_any_type`. If
this is ``None``, all basic types are allowed.
signature : ``str``, optional (default='<#1,<#1,#1>>')
The signature of the operation is what will appear in action sequences that include this
type. The default value is suitable for functions that apply to any type. If you have a
restricted set of allowed substitutions, you likely want to change the type signature to
reflect that.
"""
def __init__(self,
type_: BasicType = ANY_TYPE,
allowed_substitutions: Set[BasicType] = None,
signature: str = '<#1,<#1,#1>>') -> None:
super().__init__(type_, ComplexType(type_, type_))
self._allowed_substitutions = allowed_substitutions
self._signature = signature
@overrides
def resolve(self, other: Type) -> Optional[Type]:
"""See ``PlaceholderType.resolve``"""
if not isinstance(other, NltkComplexType):
return None
if not isinstance(other.second, NltkComplexType):
return None
other_first = other.first.resolve(other.second.first)
if other_first is None:
return None
other_first = other_first.resolve(other.second.second)
if not other_first:
return None
other_second = other.second.resolve(ComplexType(other_first, other_first))
if not other_second:
return None
return BinaryOpType(other_first, self._allowed_substitutions, self._signature)
@overrides
def get_application_type(self, argument_type: Type) -> Type:
return ComplexType(argument_type, argument_type)
@overrides
def substitute_any_type(self, basic_types: Set[BasicType]) -> List[Type]:
if self.first != ANY_TYPE:
return [self]
allowed_basic_types = self._allowed_substitutions if self._allowed_substitutions else basic_types
return [BinaryOpType(basic_type, self._allowed_substitutions, self._signature)
for basic_type in allowed_basic_types]
class TypedConstantExpression(ConstantExpression):
# pylint: disable=abstract-method
"""
NLTK assumes all constants are of type ``EntityType`` (e) by default. We define this new class
where we can pass a default type to the constructor and use that in the ``_set_type`` method.
"""
def __init__(self, variable, default_type: Type) -> None:
super(TypedConstantExpression, self).__init__(variable)
self._default_type = default_type
@overrides
def _set_type(self, other_type=ANY_TYPE, signature=None) -> None:
if other_type == ANY_TYPE:
super(TypedConstantExpression, self)._set_type(self._default_type, signature)
else:
super(TypedConstantExpression, self)._set_type(other_type, signature)
class DynamicTypeApplicationExpression(ApplicationExpression):
"""
NLTK's ``ApplicationExpression`` (which represents function applications like P(x)) has two
limitations, which we overcome by inheriting from ``ApplicationExpression`` and overriding two
methods.
Firstly, ``ApplicationExpression`` does not handle the case where P's type involves
placeholders (R, V, !=, etc.), which are special cases because their return types depend on the
type of their arguments (x). We override the property ``type`` to redefine the type of the
application.
Secondly, NLTK's variables only bind to entities, and thus the variable types are 'e' by
default. We get around this issue by replacing x with X, whose initial type is ANY_TYPE, and
later gets resolved based on the type signature of the function whose scope the variable
appears in. This variable binding operation is implemented by overriding ``_set_type`` below.
"""
def __init__(self, function: Expression, argument: Expression, variables_with_placeholders: Set[str]) -> None:
super(DynamicTypeApplicationExpression, self).__init__(function, argument)
self._variables_with_placeholders = variables_with_placeholders
@property
def type(self):
# This gets called when the tree is being built by ``LogicParser.parse``. So, we do not
# have access to the type signatures yet. Thus, we need to look at the name of the function
# to return the type.
if not str(self.function) in self._variables_with_placeholders:
return super(DynamicTypeApplicationExpression, self).type
if self.function.type == ANY_TYPE:
return ANY_TYPE
argument_type = self.argument.type
return self.function.type.get_application_type(argument_type)
def _set_type(self, other_type: Type = ANY_TYPE, signature=None) -> None:
"""
We override this method to do just one thing on top of ``ApplicationExpression._set_type``.
In lambda expressions of the form /x F(x), where the function is F and the argument is x,
we can use the type of F to infer the type of x. That is, if F is of type <a, b>, we can
resolve the type of x against a. We do this as the additional step after setting the type
of F(x).
So why does NLTK not already do this? NLTK assumes all variables (x) are of type entity
(e). So it does not have to resolve the type of x anymore. However, this would cause type
inference failures in our case since x can bind to rows, numbers or cells, each of which
has a different type. To deal with this issue, we made X of type ANY_TYPE. Also, LambdaDCS
(and some other languages) contain a var function that indicate the usage of variables
within lambda functions. We map var to V, and made it of type <#1, #1>. We cannot leave X
as ANY_TYPE because that would propagate up the tree. We need to set its type when we have
the information about F. Hence this method. Note that the language may or may not contain
the var function. We deal with both cases below.
"""
super(DynamicTypeApplicationExpression, self)._set_type(other_type, signature)
# TODO(pradeep): Assuming the mapping of "var" function is "V". Do something better.
if isinstance(self.argument, ApplicationExpression) and str(self.argument.function) == "V":
# pylint: disable=protected-access
self.argument.argument._set_type(self.function.type.first)
if str(self.argument) == "X" and str(self.function) != "V":
# pylint: disable=protected-access
self.argument._set_type(self.function.type.first)
class DynamicTypeLogicParser(LogicParser):
"""
``DynamicTypeLogicParser`` is a ``LogicParser`` that can deal with ``NamedBasicType`` and
``PlaceholderType`` appropriately. Our extension here does two things differently.
Firstly, we should handle constants of different types. We do this by passing a dict of format
``{name_prefix: type}`` to the constructor. For example, your domain has entities of types
unicorns and elves, and you have an entity "Phil" of type unicorn, and "Bob" of type "elf". The
names of the two entities should then be "unicorn:phil" and "elf:bob" respectively.
Secondly, since we defined a new kind of ``ApplicationExpression`` above, the ``LogicParser``
should be able to create this new kind of expression.
"""
def __init__(self,
type_check: bool = True,
constant_type_prefixes: Dict[str, BasicType] = None,
type_signatures: Dict[str, Type] = None) -> None:
super(DynamicTypeLogicParser, self).__init__(type_check)
self._constant_type_prefixes = constant_type_prefixes or {}
self._variables_with_placeholders = {name for name, type_ in type_signatures.items()
if isinstance(type_, PlaceholderType)}
@overrides
def make_ApplicationExpression(self, function, argument):
return DynamicTypeApplicationExpression(function, argument, self._variables_with_placeholders)
@overrides
def make_VariableExpression(self, name):
if ":" in name:
prefix = name.split(":")[0]
if prefix in self._constant_type_prefixes:
return TypedConstantExpression(Variable(name), self._constant_type_prefixes[prefix])
else:
raise RuntimeError(f"Unknown prefix: {prefix}. Did you forget to pass it to the constructor?")
return super(DynamicTypeLogicParser, self).make_VariableExpression(name)
def __eq__(self, other):
if isinstance(self, other.__class__):
return self.__dict__ == other.__dict__
return NotImplemented
class NameMapper:
"""
The ``LogicParser`` we use has some naming conventions for functions (i.e. they should start
with an upper case letter, and the remaining characters can only be digits). This means that we
have to internally represent functions with unintuitive names. This class will automatically
give unique names following the convention, and populate central mappings with these names. If
for some reason you need to manually define the alias, you can do so by passing an alias to
`map_name_with_signature`.
Parameters
----------
language_has_lambda : ``bool`` (optional, default=False)
If your language has lambda functions, the word "lambda" needs to be in the name mapping,
mapped to the alias "\". NLTK understands this symbol, and it doesn't need a type signature
for it. Setting this flag to True adds the mapping to `name_mapping`.
alias_prefix : ``str`` (optional, default="F")
The one letter prefix used for all aliases. You do not need to specify it if you have only
instance of this class for you language. If not, you can specify a different prefix for each
name mapping you use for your language.
"""
def __init__(self,
language_has_lambda: bool = False,
alias_prefix: str = "F") -> None:
self.name_mapping: Dict[str, str] = {}
if language_has_lambda:
self.name_mapping["lambda"] = "\\"
self.type_signatures: Dict[str, Type] = {}
assert len(alias_prefix) == 1 and alias_prefix.isalpha(), (f"Invalid alias prefix: {alias_prefix}"
"Needs to be a single upper case character.")
self._alias_prefix = alias_prefix.upper()
self._name_counter = 0
def map_name_with_signature(self,
name: str,
signature: Type,
alias: str = None) -> None:
if name in self.name_mapping:
alias = self.name_mapping[name]
old_signature = self.type_signatures[alias]
if old_signature != signature:
raise RuntimeError(f"{name} already added with signature {old_signature}. "
f"Cannot add it again with {signature}!")
else:
alias = alias or f"{self._alias_prefix}{self._name_counter}"
self._name_counter += 1
self.name_mapping[name] = alias
self.type_signatures[alias] = signature
def get_alias(self, name: str) -> str:
if name not in self.name_mapping:
raise RuntimeError(f"Unmapped name: {name}")
return self.name_mapping[name]
def get_signature(self, name: str) -> Type:
alias = self.get_alias(name)
return self.type_signatures[alias]
def substitute_any_type(type_: Type, basic_types: Set[BasicType]) -> List[Type]:
"""
Takes a type and a set of basic types, and substitutes all instances of ANY_TYPE with all
possible basic types and returns a list with all possible combinations. Note that this
substitution is unconstrained. That is, If you have a type with placeholders, <#1,#1> for
example, this may substitute the placeholders with different basic types. In that case, you'd
want to use ``_substitute_placeholder_type`` instead.
"""
if type_ == ANY_TYPE:
return list(basic_types)
if isinstance(type_, BasicType):
return [type_]
# If we've made it this far, we have a ComplexType, and we can just call
# `type_.substitute_any_type()`.
return type_.substitute_any_type(basic_types)
def _make_production_string(source: Type, target: Union[List[Type], Type]) -> str:
return f"{source} -> {target}"
def _get_complex_type_production(complex_type: ComplexType,
multi_match_mapping: Dict[Type, List[Type]]) -> List[Tuple[Type, str]]:
"""
Takes a complex type (without any placeholders), gets its return values, and returns productions
(perhaps each with multiple arguments) that produce the return values. This method also takes
care of ``MultiMatchNamedBasicTypes``. If one of the arguments or the return types is a multi
match type, it gets all the substitutions of those types from ``multi_match_mapping`` and forms
a list with all possible combinations of substitutions. If the complex type passed to this method
has no ``MultiMatchNamedBasicTypes``, the returned list will contain a single tuple. For
example, if the complex is type ``<a,<<b,c>,d>>``, and ``a`` is a multi match type that matches
``e`` and ``f``, this gives the following list of tuples: ``[('d', 'd -> [<a,<<b,c>,d>, e,
<b,c>]), ('d', 'd -> [<a,<<b,c>,d>, f, <b,c>])]`` Note that we assume there will be no
productions from the multi match type, and the list above does not contain ``('d', 'd ->
[<a,<<b,c>,d>, a, <b,c>>]')``.
"""
return_type = complex_type.return_type()
if isinstance(return_type, MultiMatchNamedBasicType):
return_types_matched = list(multi_match_mapping[return_type] if return_type in
multi_match_mapping else return_type.types_to_match)
else:
return_types_matched = [return_type]
arguments = complex_type.argument_types()
argument_types_matched = []
for argument_type in arguments:
if isinstance(argument_type, MultiMatchNamedBasicType):
matched_types = list(multi_match_mapping[argument_type] if argument_type in
multi_match_mapping else argument_type.types_to_match)
argument_types_matched.append(matched_types)
else:
argument_types_matched.append([argument_type])
complex_type_productions: List[Tuple[Type, str]] = []
for matched_return_type in return_types_matched:
for matched_arguments in itertools.product(*argument_types_matched):
complex_type_productions.append((matched_return_type,
_make_production_string(return_type,
[complex_type] + list(matched_arguments))))
return complex_type_productions
def get_valid_actions(name_mapping: Dict[str, str],
type_signatures: Dict[str, Type],
basic_types: Set[Type],
multi_match_mapping: Dict[Type, List[Type]] = None,
valid_starting_types: Set[Type] = None,
num_nested_lambdas: int = 0) -> Dict[str, List[str]]:
"""
Generates all the valid actions starting from each non-terminal. For terminals of a specific
type, we simply add a production from the type to the terminal. For all terminal `functions`,
we additionally add a rule that allows their return type to be generated from an application of
the function. For example, the function ``<e,<r,<d,r>>>``, which takes three arguments and
returns an ``r`` would generate a the production rule ``r -> [<e,<r,<d,r>>>, e, r, d]``.
For functions that do not contain ANY_TYPE or placeholder types, this is straight-forward.
When there are ANY_TYPES or placeholders, we substitute the ANY_TYPE with all possible basic
types, and then produce a similar rule. For example, the identity function, with type
``<#1,#1>`` and basic types ``e`` and ``r``, would produce the rules ``e -> [<#1,#1>, e]`` and
``r -> [<#1,#1>, r]``.
We additionally add a valid action from the start symbol to all ``valid_starting_types``.
Parameters
----------
name_mapping : ``Dict[str, str]``
The mapping of names that appear in your logical form languages to their aliases for NLTK.
If you are getting all valid actions for a type declaration, this can be the
``COMMON_NAME_MAPPING``.
type_signatures : ``Dict[str, Type]``
The mapping from name aliases to their types. If you are getting all valid actions for a
type declaration, this can be the ``COMMON_TYPE_SIGNATURE``.
basic_types : ``Set[Type]``
Set of all basic types in the type declaration.
multi_match_mapping : ``Dict[Type, List[Type]]`` (optional)
A mapping from `MultiMatchNamedBasicTypes` to the types they can match. This may be
different from the type's ``types_to_match`` field based on the context. While building action
sequences that lead to complex types with ``MultiMatchNamedBasicTypes``, if a type does not
occur in this mapping, the default set of ``types_to_match`` for that type will be used.
valid_starting_types : ``Set[Type]``, optional
These are the valid starting types for your grammar; e.g., what types are we allowed to
parse expressions into? We will add a "START -> TYPE" rule for each of these types. If
this is ``None``, we default to using ``basic_types``.
num_nested_lambdas : ``int`` (optional)
Does the language used permit lambda expressions? And if so, how many nested lambdas do we
need to worry about? We'll add rules like "<r,d> -> ['lambda x', d]" for all complex
types, where the variable is determined by the number of nestings. We currently only
permit up to three levels of nesting, just for ease of implementation.
"""
valid_actions: Dict[str, Set[str]] = defaultdict(set)
valid_starting_types = valid_starting_types or basic_types
for type_ in valid_starting_types:
valid_actions[str(START_TYPE)].add(_make_production_string(START_TYPE, type_))
complex_types = set()
for name, alias in name_mapping.items():
# Lambda functions and variables associated with them get produced in specific contexts. So
# we do not add them to ``valid_actions`` here, and let ``GrammarState`` deal with it.
# ``var`` is a special function that some languages (like LambdaDCS) use within lambda
# functions to indicate the use of a variable (eg.: ``(lambda x (fb:row.row.year (var x)))``)
# We do not have to produce this function outside the scope of lambda. Even within lambdas,
# it is a lot easier to not do it, and let the action sequence to logical form transformation
# logic add it to the output logical forms instead.
if name in ["lambda", "var", "x", "y", "z"]:
continue
name_type = type_signatures[alias]
# Type to terminal productions.
for substituted_type in substitute_any_type(name_type, basic_types):
valid_actions[str(substituted_type)].add(_make_production_string(substituted_type, name))
# Keeping track of complex types.
if isinstance(name_type, ComplexType) and name_type != ANY_TYPE:
complex_types.add(name_type)
for complex_type in complex_types:
for substituted_type in substitute_any_type(complex_type, basic_types):
for head, production in _get_complex_type_production(substituted_type,
multi_match_mapping or {}):
valid_actions[str(head)].add(production)
# We can produce complex types with a lambda expression, though we'll leave out
# placeholder types for now.
for i in range(num_nested_lambdas):
lambda_var = chr(ord('x') + i)
# We'll only allow lambdas to be functions that take and return basic types as their
# arguments, for now. Also, we're doing this for all possible complex types where
# the first and second types are basic types. So we may be overgenerating a bit.
for first_type in basic_types:
for second_type in basic_types:
key = ComplexType(first_type, second_type)
production_string = _make_production_string(key, ['lambda ' + lambda_var, second_type])
valid_actions[str(key)].add(production_string)
valid_action_strings = {key: sorted(value) for key, value in valid_actions.items()}
return valid_action_strings
START_TYPE = NamedBasicType(START_SYMBOL)
# TODO(mattg): We're hard-coding three lambda variables here. This isn't a great way to do
# this; it's just something that works for now, that we can fix later if / when it's needed.
# If you allow for more than three nested lambdas, or if you want to use different lambda
# variable names, you'll have to change this somehow.
LAMBDA_VARIABLES = {'x', 'y', 'z'}
def is_nonterminal(production: str) -> bool:
# TODO(pradeep): This is pretty specific to the assumptions made in converting types to
# strings (e.g., that we're only using the first letter for types, lowercased).
# TODO(pradeep): Also we simply check the surface forms here, and this works for
# wikitables and nlvr. We should ideally let the individual type declarations define their own
# variants of this method.
if production in ['<=', '<']:
# Some grammars (including the wikitables grammar) have "less than" and "less than or
# equal to" functions that are terminals. We don't want to treat those like our
# "<t,d>" types.
return False
if production[0] == '<':
return True
if production.startswith('fb:'):
return False
if len(production) > 1 or production in LAMBDA_VARIABLES:
return False
return production[0].islower()
| [
"[email protected]"
]
| |
7415de76e89f8f4dfe95d6ebb44104381c925582 | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/ads/googleads/v8/googleads-py/google/ads/googleads/v8/enums/types/operating_system_version_operator_type.py | 9b61e85a5eec3edf7a897381b9d6f3c4f647b52d | [
"Apache-2.0"
]
| permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,213 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v8.enums',
marshal='google.ads.googleads.v8',
manifest={
'OperatingSystemVersionOperatorTypeEnum',
},
)
class OperatingSystemVersionOperatorTypeEnum(proto.Message):
r"""Container for enum describing the type of OS operators. """
class OperatingSystemVersionOperatorType(proto.Enum):
r"""The type of operating system version."""
UNSPECIFIED = 0
UNKNOWN = 1
EQUALS_TO = 2
GREATER_THAN_EQUALS_TO = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"bazel-bot-development[bot]@users.noreply.github.com"
]
| bazel-bot-development[bot]@users.noreply.github.com |
4f8360cb9656c65b9ab0af1060d4f523bca4959f | 6a95b330e1beec08b917ff45eccfd6be3fd4629f | /kubernetes/client/models/v1_namespace_status.py | 523e7e43d3f44960d90c934e8371361de7fc1cc0 | [
"Apache-2.0"
]
| permissive | TokkoLabs/client-python | f4a83d6540e64861b59e322c951380a670578d7f | f1ad9c6889105d8510472606c98f8d3807f82020 | refs/heads/master | 2023-07-14T01:36:46.152341 | 2017-12-21T21:32:11 | 2017-12-21T21:32:11 | 115,042,671 | 0 | 0 | Apache-2.0 | 2021-08-06T03:29:17 | 2017-12-21T20:05:15 | Python | UTF-8 | Python | false | false | 3,306 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1NamespaceStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'phase': 'str'
}
attribute_map = {
'phase': 'phase'
}
def __init__(self, phase=None):
"""
V1NamespaceStatus - a model defined in Swagger
"""
self._phase = None
self.discriminator = None
if phase is not None:
self.phase = phase
@property
def phase(self):
"""
Gets the phase of this V1NamespaceStatus.
Phase is the current lifecycle phase of the namespace. More info: https://git.k8s.io/community/contributors/design-proposals/namespaces.md#phases
:return: The phase of this V1NamespaceStatus.
:rtype: str
"""
return self._phase
@phase.setter
def phase(self, phase):
"""
Sets the phase of this V1NamespaceStatus.
Phase is the current lifecycle phase of the namespace. More info: https://git.k8s.io/community/contributors/design-proposals/namespaces.md#phases
:param phase: The phase of this V1NamespaceStatus.
:type: str
"""
self._phase = phase
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1NamespaceStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
]
| |
32b8393a60a17cb8d5d3a614d581aae9fcb466f1 | a7f16c95f973905e880ad4dc277fbba890486654 | /wildlifecompliance/migrations/0283_auto_20190814_1036.py | dcfe076119285e3560ffaf749bd7547dd1ce0fd5 | [
"Apache-2.0"
]
| permissive | dbca-wa/wildlifecompliance | 9e98e9c093aeb25dbb7ff8d107be47e29bcd05e1 | cb12ad9ea1171f10b5297cdb7e1eb6ea484e633d | refs/heads/master | 2023-08-08T14:37:05.824428 | 2023-07-31T02:57:23 | 2023-07-31T02:57:23 | 232,276,030 | 1 | 17 | NOASSERTION | 2023-07-31T02:57:24 | 2020-01-07T08:12:53 | Python | UTF-8 | Python | false | false | 772 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-08-14 02:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wildlifecompliance', '0282_auto_20190813_1820'),
]
operations = [
migrations.RemoveField(
model_name='inspectiontypeapprovaldocument',
name='log_entry',
),
migrations.AddField(
model_name='inspectiontype',
name='approval_document',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='inspection_type', to='wildlifecompliance.InspectionTypeApprovalDocument'),
),
]
| [
"[email protected]"
]
| |
7e7f5a718ac8033167bc5a225a645a38c8c3650a | e5dd21ac1305257fe163995f951cbbfbf3051fd7 | /Cracking_the_Coding_Interview/8-6.py | c8c86de2f9e0015f6f18220cf9120789b84d7d12 | []
| no_license | QI1002/exampool | d3d3cdad040e4a861b934122ef12e059f7d6cd01 | 08800f78482f9fd9d6641c3eabc5880e69782f42 | refs/heads/master | 2021-01-02T09:35:15.283632 | 2020-10-13T14:54:25 | 2020-10-13T14:54:25 | 99,178,691 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 552 | py |
def paintBox(point, box):
x = point[0]
y = point[1]
if (x < 0 or x >= len(box[0])):
return
if (y < 0 or y >= len(box)):
return
if (box[y][x] == 1):
return
box[y][x] = 1
paintBox((x-1,y), box)
paintBox((x+1,y), box)
paintBox((x,y-1), box)
paintBox((x,y+1), box)
box = []
m = 4
n = 6
for i in range(n):
box.append([])
for j in range(m):
box[i].append(0)
point = (2,3)
paintBox(point, box)
print(box) | [
"[email protected]"
]
| |
897cf0c437285e8773f49a6e7cb7f12530950287 | 491f9ca49bbb275c99248134c604da9fb43ee9fe | /MD_analysis/process_radius_of_gyration.py | 2d4077af75475dccee4e3c7ab1dad1d1e233f511 | []
| no_license | KineOdegardHanssen/PhD-subprojects | 9ef0facf7da4b2a80b4bea9c890aa04f0ddcfd1a | c275539689b53b94cbb85c0fdb3cea5885fc40e9 | refs/heads/Windows | 2023-06-08T13:32:15.179813 | 2023-06-05T08:40:10 | 2023-06-05T08:40:10 | 195,783,664 | 2 | 0 | null | 2020-08-18T14:42:21 | 2019-07-08T09:49:14 | Python | UTF-8 | Python | false | false | 6,058 | py | import matplotlib.pyplot as plt # To plot
from scipy.optimize import curve_fit
import numpy as np
import random
import math
import time
start_time = time.process_time()
M = 9
N = 101
kangle = 20
kbond = 200
Kangle = kangle
Kbond = kbond
#factors = [0.1,1,10,100,250]
#charges = [0,-1,-5,-10]
#spacings = [1,2,3,4,5,10,40,100]
N = 101
spacing = 1
gridspacing = spacing
spacings = [1,2,3,4,5,6,7,8,10,15,40,100]#[1,2,3,4,5,8,10,15,40,100]
#spacings = [40]
#dielectrics = [1,2,10,100] # For lg = 2 nm
dielectrics = [1,2,10,50,100,1000]
#lengths = [21,61,101,141]
#lensp = [1,3,5,7]
#krfacs = [0.01, 0.05, 0.10, 0.50, 1.00]
#kangles = [20, 100, 200, 1000, 2000]
wallenergies = [1.042]
charge = -1
T = 310
spacesims = False
dielsims = True
wallsims = False
if spacesims == True:
Nsp = len(spacings)
outfilename = 'table_radgyrs_chaingrid_quadratic_M%iN%i_Langevin_wall%.3f_Kangle%i_Kbond%i_debye_kappa1_debyecutoff3_charge%i_T%i_theta0is180_twofirst_are_fixed_varyspacing.txt' % (M,N,wallenergy,kangle,kbond,charge,T)
outfile = open(outfilename,'w')
outfile.write('begin{table}\n\centering\n\caption{}\n begin{tabular}{r|c|c|c|c|c|c|c|c|c|c}\nSpacing/Chain & 1 & 2 & 3 & 4 & 5 & 6 & 7 & 8 & 9 & Average \ \ \n\hline\n')
if dielsims == True:
Nsp = len(dielectrics)
outfilename = 'table_radgyrs_chaingrid_quadratic_M%iN%i_Langevin_gridspacing%i_Kangle%i_Kbond%i_debye_kappa1_debyecutoff3_charge%i_T%i_theta0is180_twofirst_are_fixed_varydielectric.txt' % (M,N,gridspacing,Kangle,Kbond,charge,T)
outfile = open(outfilename,'w')
outfile.write('begin{table}\n\centering\n\caption{}\n begin{tabular}{r|c|c|c|c|c|c|c|c|c|c}\nDielectric/Chain & 1 & 2 & 3 & 4 & 5 & 6 & 7 & 8 & 9 & Average \ \ \n\hline\n')
if wallsims == True:
Nsp = len(wallenergies)
outfilename = 'table_radgyrs_chaingrid_quadratic_M%iN%i_Langevin_gridspacing%i_Kangle%i_Kbond%i_debye_kappa1_debyecutoff3_charge%i_T%i_theta0is180_twofirst_are_fixed_varywallenergy.txt' % (M,N,gridspacing,Kangle,Kbond,charge,T)
outfile = open(outfilename,'w')
outfile.write('begin{table}\n\centering\n\caption{}\n begin{tabular}{r|c|c|c|c|c|c|c|c|c|c}\n $\epsilon_w$/Chain & 1 & 2 & 3 & 4 & 5 & 6 & 7 & 8 & 9 & Average \ \ \n\hline\n')
totalaverage = np.zeros(Nsp)
totalrms = np.zeros(Nsp)
for i in range(Nsp):
if spacesims == True:
spacing = spacings[i]
outfile.write('%i' % spacing)
infilename = 'log.radgyrs_chaingrid_quadratic_M%iN%i_gridspacing%i_Langevin_Kangle%i_Kbond%i_debye_kappa1_debyecutoff3_charge' % (M,N,spacing,kangle,kbond)+str(charge)+'_T%i_theta0is180_twofirst_are_fixed' % T
if dielsims == True:
dielectric = dielectrics[i]
outfile.write('%i' % dielectric)
infilename = 'log.chaingrid_quadratic_M%iN%i_gridspacing%i_Langevin_Kangle%i_Kbond%i_debye_kappa1_debyecutoff3_charge%i_dielectric%i_T%i_theta0is180_twofirst_are_fixed' % (M,N,gridspacing,Kangle,Kbond,charge,dielectric,T)
if wallsims == True:
wallenergy = wallenergies[i]
outfile.write('%.3f' % wallenergy)
infilename = 'log.chaingrid_quadratic_M%iN%i_gridspacing%i_Langevin_wall%.3f_Kangle%i_Kbond%i_debye_kappa1_debyecutoff3_charge%i_T%i_theta0is180_twofirst_are_fixed_with_rgyr' % (M,N,spacing,wallenergy,kangle,kbond,charge,T)
# Readying arrays:
radgyr_average = np.zeros(M)
radgyr_stdv = np.zeros(M)
# This is really not the optimal solution:
allradgyrs_vals = []
allradgyrs_inds = []
infile = open(infilename,'r')
lines = infile.readlines()
#print('infilename:', infilename)
# Finding the mean and rms:
# Finding the mean:
starter1 = 0
starter2 = 0
counter = 0
for line in lines:
words = line.split()
#print('words=',words)
#print('starter1:', starter1, '; starter2:', starter2)
if len(words)>2:
if words[1]=='Run' and words[2]=='and':
# Finding the line: ####################### Run and write to file #########################################
starter1 = 1
#print('First mark hit')
#if starter1==1:
# print(words)
if starter1==1 and starter2==1:
# Test if we should break:
if len(words)>0:
if words[0]=='WARNING:' or words[0]=='Loop':
break
#print('Starting to read data')
if len(words)==12 or len(words)==18:
#print('I am in')
if len(words)==12:
addon = 3
else:
addon = 9
for j in range(M):
#print(words)
thisvalue = float(words[j+addon])
radgyr_average[j] += thisvalue
allradgyrs_vals.append(thisvalue)
allradgyrs_inds.append(j)
counter+=1
if starter1==1 and starter2==0:
if len(words)>0:
if words[0]=='Step':
starter2=1
#print('Second mark hit')
infile.close()
radgyr_average /= counter
totalaverage[i] = np.mean(radgyr_average)
# Finding the rms:
for j in range(len(allradgyrs_vals)):
chain = allradgyrs_inds[j]
val = allradgyrs_vals[j]
radgyr_stdv[chain] += (radgyr_average[chain]-val)**2
totalrms[i] = (totalaverage[i]-val)**2
totalrms[i] = np.sqrt(totalrms[i]/(counter-1))
for j in range(M):
radgyr_stdv[j] = np.sqrt(radgyr_stdv[j]/(counter-1))
outfile.write(' & %.3f$\pm$%.3f' % (radgyr_average[j], radgyr_stdv[j]))
outfile.write(' & %.4f$\pm$%.4f \ \ \n' % (totalaverage[i], totalrms[i]))
outfile.write('\end{tabular}\n\label{table:radgyrs_chain_and_total_something}\n\end{table}')
outfile.close()
| [
"[email protected]"
]
| |
290cccac3244c8f49b7fe30dc928990ec75a0610 | b7ba02a29b10c449a8e405063c5eede32c36f0c8 | /doc/conf.py | 31ed7b46162d1b1b5bd2fbd7c00247768fd3b1bc | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | Paebbels/pyHTTPRequestRouter | 96e32756ef4ef0e538f8990cef99719eac5dad0d | 10592ecdd9fd57bc04d218a7cdbb050d7ae38cc5 | refs/heads/master | 2021-06-22T14:25:37.250642 | 2020-01-02T00:57:08 | 2020-01-02T00:57:08 | 214,282,531 | 0 | 0 | NOASSERTION | 2021-04-28T22:09:05 | 2019-10-10T20:46:33 | Python | UTF-8 | Python | false | false | 4,408 | py | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('..'))
#sys.path.insert(0, os.path.abspath('../pyHTTPRequestRouter'))
#sys.path.insert(0, os.path.abspath('_extensions'))
#sys.path.insert(0, os.path.abspath('_themes/sphinx_rtd_theme'))
# -- Project information -----------------------------------------------------
project = 'pyHTTPRequestRouter'
copyright = '2017-2019, Patrick Lehmann'
author = 'Patrick Lehmann'
# The full version, including alpha/beta/rc tags
release = 'v0.1'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
# Sphinx theme
"sphinx_rtd_theme",
# Standard Sphinx extensions
"sphinx.ext.autodoc",
'sphinx.ext.extlinks',
'sphinx.ext.intersphinx',
'sphinx.ext.inheritance_diagram',
'sphinx.ext.todo',
'sphinx.ext.graphviz',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
# SphinxContrib extensions
# Other extensions
# 'DocumentMember',
# local extensions (patched)
# local extensions
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [
"_build",
"Thumbs.db",
".DS_Store"
]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# ==============================================================================
# Sphinx.Ext.InterSphinx
# ==============================================================================
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None),
# 'pyFlags': ('http://pyFlags.readthedocs.io/en/latest', None),
'pyExceptions': ('http://pyExceptions.readthedocs.io/en/latest', None),
'pyAttributes': ('http://pyAttributes.readthedocs.io/en/latest', None),
'pyGenericPath': ('http://pyGenericPath.readthedocs.io/en/latest', None),
'pyHTTPInterface': ('http://pyHTTPInterface.readthedocs.io/en/latest', None),
}
# ==============================================================================
# Sphinx.Ext.AutoDoc
# ==============================================================================
# see: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration
autodoc_member_order = "bysource" # alphabetical, groupwise, bysource
# ==============================================================================
# Sphinx.Ext.ExtLinks
# ==============================================================================
extlinks = {
'issue': ('https://github.com/Paebbels/pyHTTPRequestRouter/issues/%s', 'issue #'),
'pull': ('https://github.com/Paebbels/pyHTTPRequestRouter/pull/%s', 'pull request #'),
'src': ('https://github.com/Paebbels/pyHTTPRequestRouter/blob/master/pyHTTPRequestRouter/%s?ts=2', None),
# 'test': ('https://github.com/Paebbels/pyHTTPRequestRouter/blob/master/test/%s?ts=2', None)
}
# ==============================================================================
# Sphinx.Ext.Graphviz
# ==============================================================================
graphviz_output_format = "svg"
| [
"[email protected]"
]
| |
fbe5bcce893c65d0de9fbe54faebde4410ae5695 | 5ac726f23d0490d3d43819578cca590b62d0ff02 | /wise_intern/Interviews/admin.py | 259b7df34ca5d091e9f4021328f27e7c247f6b8f | []
| no_license | youssriaboelseod/Software-HumanResources | 52ab324bf43479d8bea20690f71690615c68ef25 | 821fdf195915af3f6b6ec16ef2fb6d9f70d986f7 | refs/heads/master | 2022-12-28T01:29:52.310176 | 2020-10-10T05:05:22 | 2020-10-10T05:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | from django.contrib import admin
from .models import Interview
# Register your models here.
admin.site.register(Interview) | [
"[email protected]"
]
| |
6ba2eaaf0441bd9cbe54bd95bd0f0810f655a902 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02999/s283934284.py | 6cd487c30f50e638f9be2239e26427ad2bcd0a27 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85 | py | x,a = list(map(int,input().strip().split()))
s = (0)
if x >= a:
s = (10)
print(s) | [
"[email protected]"
]
| |
5ce998a8d321fbb8c92ffc3515a595137019c013 | 222d525f50f0c955ba6e8af0b41a9bd9c04d99a9 | /venv/Lib/site-packages/pandas/_version.py | 560c9c69332156b1bd4362ce45c1cffa515f362a | []
| no_license | Sakthi-zebra/Rest_RCI | 943c4652a239808b71d0d2ba5c28acca7435cf68 | 9a716860986a3d1fafee70f0c6339810fce152f2 | refs/heads/master | 2021-01-05T16:44:41.126142 | 2020-02-17T10:22:16 | 2020-02-17T10:22:16 | 241,074,751 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 545 | py | # This file was generated by 'versioneer.py' (0.15) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
from warnings import catch_warnings
with catch_warnings(record=True):
import json
import sys
version_json = '''
{
"dirty": false,
"error": null,
"full-revisionid": "29d6b0232aab9576afa896ff5bab0b994760495a",
"version": "1.0.1"
}
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
| [
"[email protected]"
]
| |
7132a6efe79998e97391fede2615e22427a1242a | 5ffed81ced523b6e417b4e48d20380b6f16f8f42 | /pre_exam/vacantion.py | 10a4829eea56d31166c28138daf1f4126ed1418f | []
| no_license | Nikoletazl/Basics-Python | 0f3f095bd51f9546c681e3cdd268232de88749ab | 17aef1b95814f13a02053681aae3e617e56f2fe6 | refs/heads/main | 2023-08-14T15:48:48.450249 | 2021-10-08T15:02:35 | 2021-10-08T15:02:35 | 415,027,622 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 372 | py | count_people = int(input())
count_nights = int(input())
count_cards = int(input())
count_tickets = int(input())
sum_one_person = count_nights * 20
sum_cards = count_cards * 1.60
tickets = count_tickets * 6
total_sum_one = sum_one_person + sum_cards + tickets
group_sum = total_sum_one * count_people
end_sum = group_sum + 0.25 * group_sum
print(f"{end_sum:.2f}") | [
"[email protected]"
]
| |
e6fc546651d2205d4808a4a327045054eda8451d | 7db0883137d119565540f2d071638c4016f39213 | /Note/Project_Play/BaiduBaike/SpiderMan.py | eb7543c12a969940ffb61d81059fa69f378fe5f0 | []
| no_license | PhilHuang-d/python--- | cf22a4cc00d4beaaf75ef7ca87a4c5d31a9d5efe | 152c18f51838ce652b79a0cd24765b1a1c237eee | refs/heads/master | 2021-09-13T05:32:53.754865 | 2018-04-25T13:36:40 | 2018-04-25T13:36:40 | 108,812,447 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,344 | py | #coding:utf-8
import DataOutput
import HTMLDownloader
import URLManger
import HtmlParser
class SpiderMan(object):
def __init__(self):
self.manager = URLManger.UrlManager()
self.downloader = HTMLDownloader.HtmlDownloader()
self.parser = HtmlParser.HhmlParser()
self.output = DataOutput.DataOutput()
def crawl(self,root_url):
#添加url入口
self.manager.add_new_url(root_url)
while(self.manager.has_new_url() and self.manager.old_url_size() < 100):
try:
#从url管理器获取新的url
new_url = self.manager.add_new_url()
#html下载器下载网页
html = self.downloader.download(new_url)
#html解析器抽取网页数据
new_urls,data = self.parser.parser(new_url,html)
#将抽取的url添加到url管理器中
self.manager.add_new_urls(new_url)
#数据存储器存储文件
self.output.store_data(data)
print("已经抓取%s个链接")%self.manager.old_url_size()
except Exception,e:
print("抓取失败")
self.output.output_html()
if __name__=="__main__":
spider_man = SpiderMan()
spider_man.crawl("http://baike.baidu.com/view/284853.htm") | [
"[email protected]"
]
| |
4ab2d1b2588ed24e75a8fd18060032ba278250ad | bd792a49f21d901f14165993d45e114e4df60340 | /venv/bin/futurize | 2a1ba3df470dca8741f0b0ce6790bd1dcee24bdc | []
| no_license | Cynthrial/Serial-read | 82d0883ad4de01fe48e58523d2d0f4bcf97b3835 | eb706094b95b09cfc8870bff0f6385d04d807996 | refs/heads/master | 2020-04-13T13:37:32.380790 | 2018-12-27T02:35:04 | 2018-12-27T02:35:04 | 163,236,705 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 417 | #!/root/PycharmProjects/Serial_read/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.17.1','console_scripts','futurize'
__requires__ = 'future==0.17.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('future==0.17.1', 'console_scripts', 'futurize')()
)
| [
"[email protected]"
]
| ||
28d939d002a35bc02c16215e3fe153a32445d91b | 8726a58628e1d6c8e6e8cba0bb67de80bad72a51 | /wizard/create_data_template.py | 5cfc1efe6cda7e3d4c77f4974c579767e7e5f7b0 | []
| no_license | cgsoftware/jasper_reports | 54a612a44cd94963794b16ab4266026b233b8ba4 | 207bdea1b8738dff88260f4ea76da8b627e05375 | refs/heads/master | 2021-01-10T19:33:22.921656 | 2011-09-06T15:10:39 | 2011-09-06T15:10:39 | null | 0 | 0 | null | null | null | null | WINDOWS-1252 | Python | false | false | 5,766 | py | # encoding: iso-8859-15
from xml.dom.minidom import getDOMImplementation
import wizard
import pooler
import base64
import osv
import string
from tools.translate import _
view_form_start = """<?xml version="1.0"?>
<form string="Create Data Template">
<group colspan="2">
<field name="model"/>
<field name="depth"/>
</group>
</form>"""
view_fields_start = {
'model': { 'string': 'Model', 'type': 'many2one', 'relation': 'ir.model', 'required': True },
'depth': { 'string':'Depth', 'type':'integer', 'required': True },
}
view_form_end = """<?xml version="1.0"?>
<form string="Create Data Template">
<group colspan="2">
<field name="model"/>
<field name="data" filename="filename"/>
<field name="filename" invisible="1"/>
</group>
</form>"""
view_fields_end = {
'model': { 'string': 'Model', 'type': 'char', 'readonly': True },
'data': { 'string': 'XML', 'type': 'binary', 'relation': 'ir.model', 'readonly': True },
'filename': { 'string': 'File Name', 'type': 'char' },
}
src_chars = """àáäâÀÁÄÂèéëêÈÉËÊìíïîÌÍÏÎòóöôÒÓÖÔùúüûÙÚÜÛçñºª·¤ '"()/*-+?¿!&$[]{}@#`'^:;<>=~%,\\"""
src_chars = unicode( src_chars, 'iso-8859-1' )
dst_chars = """aaaaAAAAeeeeEEEEiiiiIIIIooooOOOOuuuuUUUUcnoa_e________________________________"""
dst_chars = unicode( dst_chars, 'iso-8859-1' )
class create_data_template(wizard.interface):
def _action_start(self, cr, uid, data, context):
res = {
'depth': 1
}
return res
def normalize(self, text):
if isinstance( text, unicode ):
text = text.encode('utf-8')
return text
def unaccent(self, text):
if isinstance( text, str ):
text = unicode( text, 'utf-8' )
output = text
for c in xrange(len(src_chars)):
output = output.replace( src_chars[c], dst_chars[c] )
return output.strip('_').encode( 'utf-8' )
def generate_xml(self, cr, uid, context, pool, modelName, parentNode, document, depth, first_call):
# First of all add "id" field
fieldNode = document.createElement('id')
parentNode.appendChild( fieldNode )
valueNode = document.createTextNode( '1' )
fieldNode.appendChild( valueNode )
language = context.get('lang')
if language == 'en_US':
language = False
# Then add all fields in alphabetical order
model = pool.get(modelName)
fields = model._columns.keys()
fields.sort()
for field in fields:
name = False
if language:
# Obtain field string for user's language.
name = pool.get('ir.translation')._get_source(cr, uid, modelName + ',' + field, 'field', language)
#name = self.unaccent( name )
#name = self.normalize( name )
#help = pool.get('ir.translation')._get_source(cr, uid, modelName + ',' + field, 'help', language)
#help = self.normalize( help )
if not name:
# If there's not description in user's language, use default (english) one.
name = pool.get(modelName)._columns[field].string
#help = pool.get(modelName)._columns[field].help
if name:
name = self.unaccent( name )
# After unaccent the name might result in an empty string
if name:
name = '%s-%s' % (self.unaccent( name ), field )
else:
name = field
fieldNode = document.createElement( name )
#if name:
#fieldNode.setAttribute( 'name', name )
#if help:
#fieldNode.setAttribute( 'help', help )
parentNode.appendChild( fieldNode )
fieldType = model._columns[field]._type
if fieldType in ('many2one','one2many','many2many'):
if depth <= 1:
continue
newName = model._columns[field]._obj
self.generate_xml(cr, uid, context, pool, newName, fieldNode, document, depth-1, False)
continue
if fieldType == 'float':
value = '12345.67'
elif fieldType == 'integer':
value = '12345'
elif fieldType == 'date':
value = '2009-12-31 00:00:00'
elif fieldType == 'time':
value = '12:34:56'
elif fieldType == 'datetime':
value = '2009-12-31 12:34:56'
else:
value = field
valueNode = document.createTextNode( value )
fieldNode.appendChild( valueNode )
if depth > 1 and modelName != 'Attachments':
# Create relation with attachments
fieldNode = document.createElement( '%s-Attachments' % _('Attachments') )
parentNode.appendChild( fieldNode )
self.generate_xml(cr, uid, context, pool, 'ir.attachment', fieldNode, document, depth-1, False)
if first_call:
# Create relation with user
fieldNode = document.createElement( '%s-User' % _('User') )
parentNode.appendChild( fieldNode )
self.generate_xml(cr, uid, context, pool, 'res.users', fieldNode, document, depth-1, False)
def _action_create_xml(self, cr, uid, data, context):
pool = pooler.get_pool(cr.dbname)
form = data['form']
values = pool.get('ir.model').read(cr, uid, form['model'], ['name','model'], context)
name = values['name']
model = values['model']
document = getDOMImplementation().createDocument(None, 'data', None)
topNode = document.documentElement
recordNode = document.createElement('record')
topNode.appendChild( recordNode )
self.generate_xml( cr, uid, context, pool, model, recordNode, document, form['depth'], True )
topNode.toxml()
res = {
'model': name,
'data': base64.encodestring( topNode.toxml() ),
'filename': 'jasper.xml',
}
return res
states = {
'init': {
'actions': [_action_start],
'result': {
'type': 'form',
'arch': view_form_start,
'fields': view_fields_start,
'state': [('end','Cancel','gtk-cancel'),('create','Create','gtk-ok')]
}
},
'create': {
'actions': [_action_create_xml],
'result': {
'type': 'form',
'arch': view_form_end,
'fields': view_fields_end,
'state': [('end','Accept','gtk-ok')]
}
}
}
create_data_template('jasper_create_data_template')
| [
"[email protected]"
]
| |
958e6cc962347253a2d2217e8fb7795f660d2001 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /Mkt3yqQMsw9e3Jmjq_4.py | 02def150323524f87c2640e1f03e1201eb361dcd | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,137 | py | """
Write a function which, given a permutation of `[0, 1, ..., n-1]` ( `n>0`)
represented by a shuffled list, returns the permutation in _disjoint cycle
form_ as a set of tuples.
A **permutation** is a particular (re)ordering of a set of objects. For
example, `[1,3,0,4]` is a permutation on the 4 objects `[0,1,2,3]`. In this
problem, we represent permutations on `n` objects as lists containing the
numbers in `list(range(n)) == [0, ..., n-1]`.
A **cycle** or **cyclic permutation** is a particular kind of permutation
whereby all elements are sent to one another in a cyclic fashion. In this
problem, we represent cycles as tuples.
* For example, the permutation `[1,2,3,0]` is a cyclic permutation of `[0,1,2,3]` because it can be made from `[0,1,2,3]` by applying the mapping `{0:1, 1:2, 2:3, 3:0}`, which maps elements in the _cycle_ `0➞1➞2➞3➞0`. We represent this cycle by the tuple `(0,1,2,3)`, where each element gets sent to the one on the right, and the last is sent to the first.
* The cycles `(0,1,2,3)`, `(1,2,3,0)`, `(2,3,0,1)` and `(3,0,1,2)` all represent the same cycle; namely `0➞1➞2➞3➞0` . We always choose the cycle to have the lowest element first: `(0,1,2,3)`.
Finally, any permutation can be written in **disjoint cycle form** , or as an
unordered set of cyclic permutations. _Disjoint_ means none of the cycles have
any elements in common. This form is unique up to the order of the cycles and
up to the cycle representation.
* The cyclic permutation `[0,1,3,2,4,5]` can be written as `(2,3)`—since 2 an 3 are swapped—and so the disjoint cycle form is `{(2,3)}`.
* `[1,0,3,2]` is the mapping `{0:1, 1:0, 2:3, 3:2}` and has disjoint cycle form`{(0, 1), (2, 3)}` .
Your function takes a list (the permutation) and returns a set of tuples (the
set of cyclic permutations).
### Examples
disjoint_cycle_form([1, 0]) ➞ {(0, 1)}
# 0 and 1 are swapped, but lowest is listed first.
disjoint_cycle_form([0, 1, 2, 3]) ➞ set()
# Permutation is already in order.
disjoint_cycle_form([0, 1, 3, 2]) ➞ {(2, 3)}
disjoint_cycle_form([1, 0, 3, 2]) ➞ {(0, 1), (2, 3)}
# or {(2, 3), (0, 1)}; the cycle order in a set doesn't matter.
disjoint_cycle_form([1, 3, 0, 2]) ➞ {(0, 1, 3, 2)}
### Notes
Look up "disjoint cycle notation" or "cycle decomposition" for more
information about permutations. This is the kind of thing you learn in a first
course in Group Theory. Note that the given permutations will always have at
least one element (the only such permutation is `[0]`), and a permutation of
length `n` will always contain the elements of `range(n)` (that is, `0` to
`n-1` inclusive).
"""
def cycles(perm):
remain = set(perm)
result = []
while len(remain) > 0:
n = remain.pop()
cycle = [n]
while True:
n = perm[n]
if n not in remain:
break
remain.remove(n)
cycle.append(n)
result.append(cycle)
return result
def disjoint_cycle_form(perm):
ans = set()
for cycle in cycles(perm):
if len(cycle) > 1:
ans.add(tuple(cycle))
return ans
| [
"[email protected]"
]
| |
5606b5c260655962cf20f5832309e6fa1fba193e | 077beb02d73045eb97261a1c5e7021bfe709e55c | /tests/new_item/test_invalid_new_item_class.py | 0412c3d225cd16df9a5cc57f7d49be2a1a918cd8 | [
"MIT"
]
| permissive | zcutlip/pyonepassword | a91d8491d807c2cede2c483a66872b7913ad3aac | 3ced5acf3667f1af73cad26ae0ef31e8c4b19585 | refs/heads/main | 2023-09-04T03:16:49.170698 | 2023-06-26T19:51:32 | 2023-06-26T19:51:32 | 201,505,055 | 48 | 13 | MIT | 2023-09-05T01:44:18 | 2019-08-09T16:29:56 | Python | UTF-8 | Python | false | false | 619 | py | import pytest
from pyonepassword.api.exceptions import OPInvalidItemException
from pyonepassword.op_items._new_item import OPNewItemMixin
class OPInvalidLoginItemTemplate(OPNewItemMixin):
def __init__(self, title: str, fields=[], sections=[], extra_data={}):
super().__init__(title, fields, sections, extra_data)
def test_invalid_new_item_class_01():
with pytest.raises(OPInvalidItemException):
OPNewItemMixin("invalid-new-item")
def test_invalid_login_item_template_01():
with pytest.raises(OPInvalidItemException):
OPInvalidLoginItemTemplate("invalid login item template")
| [
"[email protected]"
]
| |
9c151b3a6ea5ad2faf547932fcbb58f8c96ed5ea | b22588340d7925b614a735bbbde1b351ad657ffc | /athena/Control/GaudiSequencer/share/test_athretrysequencer.py | 019c631baffe73d52c8ae1a6ebcba84383c65e5e | []
| no_license | rushioda/PIXELVALID_athena | 90befe12042c1249cbb3655dde1428bb9b9a42ce | 22df23187ef85e9c3120122c8375ea0e7d8ea440 | refs/heads/master | 2020-12-14T22:01:15.365949 | 2020-01-19T03:59:35 | 2020-01-19T03:59:35 | 234,836,993 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,634 | py | ###############################################################
#
# Job options file
#
#==============================================================
import AthenaCommon.Constants as Lvl
from AthenaCommon.AppMgr import theApp
from AthenaCommon.AppMgr import ServiceMgr as svcMgr
# get a handle on the job main sequence
from AthenaCommon.AlgSequence import AlgSequence, AthSequencer
job = AlgSequence()
#--------------------------------------------------------------
# Event related parameters
#--------------------------------------------------------------
if not 'EVTMAX' in dir():
EVTMAX = 10
pass
theApp.EvtMax = EVTMAX
#--------------------------------------------------------------
# Sequence(s)
#--------------------------------------------------------------
## Sequencer configuration ##
job += CfgMgr.AthRetrySequencer ('seq', MaxRetries = 5)
import AthenaPython.PyAthena as PyAthena
class PyPush(PyAthena.Alg):
def __init__(self, name='PyPush', **kw):
## init base class
kw['name'] = name
super(PyPush, self).__init__(**kw)
def initialize(self):
self.evts = 0
return PyAthena.StatusCode.Success
def execute(self):
self.setFilterPassed(True)
self.evts += 1
self.msg.info("events seen: %s" % self.evts)
return PyAthena.StatusCode.Success
def finalize(self):
self.msg.info("finalize ==> total events: %s" % self.evts)
return PyAthena.StatusCode.Success
pass
class PyPull(PyAthena.Alg):
def __init__(self, name='PyPull', **kw):
## init base class
kw['name'] = name
super(PyPull, self).__init__(**kw)
def initialize(self):
self.evts = 0
return PyAthena.StatusCode.Success
def execute(self):
self.evts += 1
self.setFilterPassed(True)
import random
if random.random() < 0.8:
self.msg.info("requesting more events!!")
self.setFilterPassed(False)
else:
self.msg.info("event quite satisfying...")
self.msg.info("seen %s event(s)" % self.evts)
return PyAthena.StatusCode.Success
def finalize(self):
self.msg.info("finalize ==> total events: %s" % self.evts)
return PyAthena.StatusCode.Success
pass
job.seq += PyPush("push", OutputLevel = Lvl.INFO)
job.seq += PyPull("pull", OutputLevel = Lvl.INFO)
job += CfgMgr.AthEventCounter("counter")
#svcMgr.MessageSvc.OutputLevel = Lvl.INFO
#==============================================================
#
# End of job options file
#
###############################################################
| [
"[email protected]"
]
| |
aebf3cbd105f56502484732cbb959833a049352b | 6bce631b869a8717eed29eae186688a7fdb7f5c8 | /venv/Lib/site-packages/test/test_stock_price.py | 4ce7f9435eb697392c8e98c7711ab22e0976e446 | []
| no_license | singhd3101/CS5100-Stock-Market-Prediction | 6d43bd39633dd80bb1141dc550302874a5bc0939 | 2804a6270a05155e168d0f2518bcd97f1c9bcb3e | refs/heads/master | 2020-11-26T03:56:02.613630 | 2019-12-19T02:22:13 | 2019-12-19T02:22:13 | 228,958,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | # coding: utf-8
"""
Intrinio API
Welcome to the Intrinio API! Through our Financial Data Marketplace, we offer a wide selection of financial data feed APIs sourced by our own proprietary processes as well as from many data vendors. For a complete API request / response reference please view the [Intrinio API documentation](https://intrinio.com/documentation/api_v2). If you need additional help in using the API, please visit the [Intrinio website](https://intrinio.com) and click on the chat icon in the lower right corner. # noqa: E501
OpenAPI spec version: 2.2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import intrinio_sdk
from intrinio_sdk.models.stock_price import StockPrice # noqa: E501
from intrinio_sdk.rest import ApiException
class TestStockPrice(unittest.TestCase):
"""StockPrice unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testStockPrice(self):
"""Test StockPrice"""
# FIXME: construct object with mandatory attributes with example values
# model = intrinio_sdk.models.stock_price.StockPrice() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
ac68c34a9df77b38ee0be71b8c371854aa47da18 | 142fd48d2c09bc83ba31b96553fc6d27fad596a3 | /v1/202.happy-number.132775164.ac.py | 76ae51a59bdadd727573d185296fe6de77a038ba | []
| no_license | goalong/lc | baaa8ecc55ecdb136271687d21609832f32ccf6e | 7b45d500e65c759cc2e278d33d9d21925a713017 | refs/heads/master | 2021-10-28T03:40:23.534592 | 2019-04-21T14:29:47 | 2019-04-21T14:29:47 | 111,088,996 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,284 | py | #
# [202] Happy Number
#
# https://leetcode.com/problems/happy-number/description/
#
# algorithms
# Easy (41.39%)
# Total Accepted: 153.8K
# Total Submissions: 371.6K
# Testcase Example: '1'
#
# Write an algorithm to determine if a number is "happy".
#
# A happy number is a number defined by the following process: Starting with
# any positive integer, replace the number by the sum of the squares of its
# digits, and repeat the process until the number equals 1 (where it will
# stay), or it loops endlessly in a cycle which does not include 1. Those
# numbers for which this process ends in 1 are happy numbers.
#
# Example: 19 is a happy number
#
#
# 12 + 92 = 82
# 82 + 22 = 68
# 62 + 82 = 100
# 12 + 02 + 02 = 1
#
#
# Credits:Special thanks to @mithmatt and @ts for adding this problem and
# creating all test cases.
#
class Solution(object):
def isHappy(self, n):
"""
:type n: int
:rtype: bool
"""
# 3 star.
memo = set()
while n not in memo:
memo.add(n)
n = self.get_next(n)
if n == 1:
return True
return False
def get_next(self, num):
num_list = list(str(num))
rs = sum([int(i)*int(i) for i in num_list])
return rs
| [
"[email protected]"
]
| |
2c8f96dfd60e771a4512c4b9b459a21ff197f9ae | e04c3af194afacf7e454eb63a1f917c0df46698d | /MAST/test/workflow_test/workflow_setup.py | 4fecd61563ecda63785f435c50709a593de50be3 | [
"MIT"
]
| permissive | kcantosh/MAST | 050716de2580fe53cf241b0d281a84f13175b542 | 4138b87e5a1038eb65023232f80907333d3196f2 | refs/heads/dev | 2021-01-20T16:51:22.759949 | 2017-01-31T16:40:45 | 2017-01-31T16:40:45 | 82,833,665 | 0 | 1 | null | 2017-02-22T17:34:13 | 2017-02-22T17:34:13 | null | UTF-8 | Python | false | false | 4,053 | py | ##############################################################
# This code is part of the MAterials Simulation Toolkit (MAST)
#
# Maintainer: Tam Mayeshiba
# Last updated: 2016-02-08
##############################################################
##############################################################
# Requirements:
# 1. Home directory access from where the test will be run
# 2. MAST installation
##############################################################
import os
import time
import shutil
import numpy as np
from MAST.utility import MASTError
from MAST.utility import dirutil
from MAST.utility import MASTFile
import MAST
import subprocess
testname ="workflow_test"
testdir = dirutil.get_test_dir(testname)
checkname = os.path.join(testdir, "WORKFLOW_CONFIG")
def verify_checks():
checkfile=MASTFile(checkname)
for myline in checkfile.data:
if "Check" in myline:
checkresult = myline.split(":")[1].strip()[0].lower()
if checkresult == 'y':
print "Checks okay"
else:
raise MASTError("verify checks","Checks for workflow setup not verified. Check %s" % checkname)
return
def get_variables():
verify_checks()
myvars=dict()
checkfile=MASTFile(checkname)
for myline in checkfile.data:
if myline[0:9] == "workflow_":
mykey = myline.split("=")[0].strip()
myval = myline.split("=")[1].strip()
myvars[mykey] = myval
return myvars
def create_workflow_test_script(inputfile):
myvars = get_variables()
# set up testing directory tree
wtdir=myvars['workflow_test_directory']
mast_test_dir=os.path.join(wtdir,"no_directory_yet")
while not (os.path.isdir(mast_test_dir)):
timestamp=time.strftime("%Y%m%dT%H%M%S")
mast_test_dir = os.path.join(wtdir,"output_test_%s" % timestamp)
if not (os.path.isdir(mast_test_dir)):
shutil.copytree("%s/mini_mast_tree" % wtdir, mast_test_dir)
# set up output file and submission script
shortname = inputfile.split(".")[0]
output="%s/output_%s" % (wtdir, shortname)
submitscript="%s/submit_%s.sh" % (wtdir, shortname)
generic_script="%s/generic_mast_workflow.sh" % wtdir
bashcommand="bash %s %s %s %s %s %s >> %s" % (generic_script,
mast_test_dir,
myvars["workflow_examples_located"],
inputfile,
myvars["workflow_activate_command"],
myvars["workflow_testing_environment"],
output)
submitfile=MASTFile()
submitfile.data.append(bashcommand + "\n")
submitfile.to_file(submitscript)
return [mast_test_dir, submitscript, output]
def generic_submit(inputfile):
[mast_test_dir, submitscript, outputname] = create_workflow_test_script(inputfile)
mygsub = "bash %s" % submitscript
gproc = subprocess.Popen(mygsub, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
gproc.wait()
if not (os.path.isfile(outputname)):
print "Sleep 5"
time.sleep(5)
if not (os.path.isfile(outputname)):
raise OSError("Test did not create output %s" % outputname)
print "Output %s created" % outputname
waitct=0
tailcmd = "tail -n 3 %s" % outputname
maxwait=502
while waitct < maxwait:
tail3proc=subprocess.Popen(tailcmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
tail3=tail3proc.communicate()[0]
tail3proc.wait()
for tailline in tail3.split("\n"):
if "Workflow completed" in tailline:
return ["Completed", mast_test_dir]
time.sleep(30)
waitct = waitct + 1
print "Output not complete. Attempt %i/%i" % (waitct, maxwait)
return ["Unfinished", mast_test_dir]
def get_finished_recipe_dir(mast_test_dir):
trydirs=os.listdir(os.path.join(mast_test_dir,"ARCHIVE"))
for trydir in trydirs:
trypath=os.path.join(mast_test_dir,"ARCHIVE",trydir)
if (os.path.isdir(trypath)):
return trypath
return ""
| [
"[email protected]"
]
| |
389ce0bd3e07869ffa7d5d82fc97f0e6114b317e | 1740075fca5d99eee47d8ab10e918be07f544d55 | /catalog/migrations/0002_auto_20191107_1239.py | bd20f9edcd6d8711f45f088ad0c948df3acd2e3a | []
| no_license | Grayw0lf/local_library | 0933bd5d35ef64ee4dc90dd0cdd83686a8eeed3a | 652f0260bfd153138eaee24810685c52f4063b07 | refs/heads/master | 2023-04-30T10:23:38.048841 | 2019-11-13T21:10:09 | 2019-11-13T21:10:09 | 221,551,305 | 1 | 0 | null | 2023-04-21T20:40:05 | 2019-11-13T21:03:49 | Python | UTF-8 | Python | false | false | 688 | py | # Generated by Django 2.2.7 on 2019-11-07 09:39
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('catalog', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='author',
old_name='date_of_died',
new_name='date_of_death',
),
migrations.AlterField(
model_name='bookinstance',
name='id',
field=models.UUIDField(default=uuid.UUID('976d9b8b-7c2f-4e07-9879-78d7f1d2fe11'), help_text='Unique ID for this particular book across whole library', primary_key=True, serialize=False),
),
]
| [
"[email protected]"
]
| |
2a500a425eb1abbc023f928e0a265bbc37889d78 | 64fc5dfec9a6f7b31c224286321899f5103d3983 | /duckworthd/mining.py | f2127b917aeb1fa70482dce7c25ce5b13176311f | []
| no_license | espoirMur/public-DKHQ_GlobalWitness | 68aaaaef52a1b05773ded143060a0c5f45c14c6a | e0b0d2b669faa1cb6b3cc86791ff5ce306c1cfcb | refs/heads/master | 2020-04-17T17:22:23.323979 | 2018-10-11T19:50:42 | 2018-10-11T19:50:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,678 | py | """Utilities for working with satellite imagery for mining."""
import json
import os
import re
import string
import tempfile
import urllib
import zipfile
from matplotlib import pyplot as plt
import bcolz
import ee as earth_engine
earth_engine.Initialize()
import gdal
import h5py
import numpy as np
import pandas as pd
import geopandas as gpd
# Default directory containing images.
DEFAULT_IMAGE_ROOT = '/workspace/lab/data_s3/mines_ipis'
# Fusion Table ID containing polygons around mining sites.
DEFAULT_IPIS_MINING_POLYGONS = 'ft:1HG3R3cebqMp2yK0cOimTL7wLnh41c1DH24GyWQg1'
# Images with 4 axes. The first two are typical for images -- x, y. The
# third is color band (traditionally RGB, but Landsat captures more). The
# fourth is time, representing when the image was captured.
X_AXIS = 0
Y_AXIS = 1
BAND_AXIS = 2
TIME_AXIS = 3
def load_ipis_mining_sites_dataset():
"""Load all mining sites annotated by IPIS from FusionTable as GeoJSON."""
return earth_engine.FeatureCollection('ft:1P1f-A2Sl44YJEqtD1FvA1z7QtDFsRut1QziMD-nV').getInfo()
def _get_metadata_file_path(image_root):
"""Get absolute path to metadata.json file in a given directory.
If there are more than one metadata.json files, pick the last one after
sorting.
"""
if not os.path.exists(image_root):
raise ValueError(
u'%s does not exist. No metadata files found.' % image_root)
filenames = os.listdir(image_root)
metadata_filenames = [name for name in filenames if 'metadata' in name]
if not metadata_filenames:
raise ValueError(
u'No files with "metadata" in name found under %s' % image_root)
metadata_filename = list(sorted(metadata_filenames))[-1]
return os.path.join(image_root, metadata_filename)
def load_metadata(image_root=None):
"""Load JSON file storing image metadata from disk.
If no JSON file can be found, an empty DataFrame is returned.
"""
image_root = image_root or DEFAULT_IMAGE_ROOT
try:
fpath = _get_metadata_file_path(image_root)
except ValueError:
return pd.DataFrame(
columns=["bands", "collection", "dates", "dim", "fpath", "id"])
with open(fpath) as f:
return pd.DataFrame(json.load(f))
def save_metadata(image_root, metadata):
"""Store DataFrame containing image metadata to disk."""
if not os.path.exists(image_root):
os.makedirs(image_root)
with open(os.path.join(image_root, "metadata4.json"), "w") as f:
return metadata.to_json(f)
def merge_metadata(old_metadata, new_metadata):
"""Merge two metadata DataFrames."""
# Remove all rows from 'old_metadata' that have the same path as in 'new_metadata'
old_metadata = old_metadata[~old_metadata['fpath'].isin(
new_metadata['fpath'])]
# Concatenate new and old together.
return pd.concat([old_metadata, new_metadata], ignore_index=True)
def load_image(img_metadata, image_root=None):
"""Load a single image from disk."""
image_root = image_root or DEFAULT_IMAGE_ROOT
fname = os.path.join(image_root, img_metadata['fpath'])
return bcolz.open(fname)[:]
def geodataframe_to_earthengine(geodataframe):
"""Converts a GeoDataFrame to an ee.FeatureCollection."""
geojson_str = geodataframe.to_json()
geojson = json.loads(geojson_str)
return geojson_to_earthengine(geojson)
def geojson_to_earthengine(geojson):
"""Converts a GeoJSON dict to an Earth Engine type.
Args:
geojson: GeoJSON-supported object as a nested dict/list/tuple.
Returns:
A matching type that Earth Engine understands (e.g. ee.FeatureCollection, ee.Geometry.Point).
"""
if isinstance(geojson, dict):
if 'type' not in geojson:
raise ValueError("Not 'type' attribute in geojson: %s" % (geojson,))
if geojson['type'] == 'FeatureCollection':
return earth_engine.FeatureCollection(
geojson_to_earthengine(geojson['features']))
elif geojson['type'] == 'Feature':
return earth_engine.Feature(
geojson_to_earthengine(geojson['geometry']),
geojson['properties'])
elif geojson['type'] == 'Point':
return earth_engine.Geometry.Point(coords=geojson['coordinates'])
elif geojson['type'] == 'Polygon':
return earth_engine.Geometry.Polygon(
coords=geojson['coordinates'],
geodesic=geojson.get('geodesic', None))
raise ValueError("Unsupported GeoJSON dict type: %s" % geojson['type'])
elif isinstance(geojson, list):
return [geojson_to_earthengine(element) for element in geojson]
elif isinstance(geojson, tuple):
return tuple(geojson_to_earthengine(element) for element in geojson)
elif type(geojson) in [int, float, str, unicode]:
return geojson
else:
raise ValueError("Unable to parse type: %s" % type(geojson))
def to_earthengine_featurecollection(obj):
"""Converts an object to an ee.FeatureCollection.
'obj' can be one of:
- str: a Fusion Table ID ("ft:xxx")
- GeoDataFrame
- GeoJSON dict of type 'FeatureCollection'
"""
# If string, load FeatureCollection using Earth Engine.
if isinstance(obj, basestring):
return earth_engine.FeatureCollection(obj)
# If GeoDataFrame, convert to ee.FeatureCollection.
if isinstance(obj, gpd.GeoDataFrame):
return geodataframe_to_earthengine(obj)
# If GeoJSON, convert to ee.FeatureCollection.
if isinstance(obj, dict):
assert 'type' in obj
assert obj['type'] == 'FeatureCollection'
return geojson_to_earthengine(obj)
def load_image_mask(img_metadata, ipis_mining_sites=None, ipis_mining_polygons=None, image_root=None):
"""Load binary mask labeling pixels as "mining" or "not mining".
Args:
img_metadata: pd.Series from a metadata.json file.
ipis_mining_sites: FeatureCollection GeoJSON dict containing all IPIS
mining site locations as Points.
ipis_mining_polygons: Object that can be converted to an
ee.FeatureCollection. See to_earthengine_featurecollection() for
available options. Default's to Sina's Fusion Table.
image_root: string. unused?
Returns:
numpy array of shape [100, 100] with values {0, 1}, where 0.0 == no mine
and 1.0 == mine, centered at the location described by img_metadata.
"""
# If None, use the Fusion Table containing mining sites that Sina created.
if ipis_mining_sites is None:
ipis_mining_polygons = DEFAULT_IPIS_MINING_POLYGONS
ipis_mining_polygons = to_earthengine_featurecollection(ipis_mining_sites)
ipis_mining_image = ipis_mining_polygons.reduceToImage(
properties=['mine'],
reducer=earth_engine.Reducer.first()) # earth_engine.Image() type
# Get Point corresponding to this image from IPIS dataset.
roi_id = img_metadata['id']
if ipis_mining_sites is None:
ipis_mining_sites = load_ipis_mining_sites_dataset()
roi = ipis_mining_sites['features'][roi_id]['geometry']
assert roi['type'] == 'Point'
# Create a circle around the point with a given buffer size (in meters).
buff = 1500 # radius of 1500 meters about the point.
roi_point = earth_engine.Geometry.Point(roi['coordinates'])
roi_buff = earth_engine.Geometry.buffer(roi_point, buff) # ee.Geometry()
roi_buff = roi_buff.getInfo() # GeoJSON dict
# Download image containing circle from Earth Engine.
scale = 30 # 30 meters/pixel --> circle with 100 pixel diameter.
mask = load_map_tile_containing_roi(
ipis_mining_image, roi_buff['coordinates'], scale=scale)
# Some images are 101 x 101, some are 100 x 100. Let's ensure they're all
# 100 x 100.
mask = mask[:100, :100]
assert mask.shape[2] == 1, 'Mask has > 1 band.'
return mask.reshape(mask.shape[0], mask.shape[1])
def load_map_tile_containing_roi(image, roi, scale=30):
"""Get rasterized image containing ROI from Earth Engine.
Constructs a rasterized image tile subsetting 'image'. The image is large
enough to fully contain the polygon described by 'roi', and will contain
one pixel per 'scale' m^2 area.
Args:
image: ee.Image instance. To be used as mask. Must have exactly 1 band.
roi: Triple-nested list of floats, where lowest level is [longitude,
latitude] pairs from 'coordinates' of a GeoJSON polygon.
scale: int. Number of squared meters per pixel.
Returns:
numpy array of shape [N x M x K], where N is width, M is height, and K is
number of bands.
"""
# Generate a random filename.
filename = ''.join(np.random.choice(list(string.ascii_letters), size=10))
# Download image containing ROI.
url = earth_engine.data.makeDownloadUrl(
earth_engine.data.getDownloadId({
'image': image.serialize(),
'scale': '%d' % scale,
'filePerBand': 'false',
'name': filename,
'region': roi
}))
local_zip, headers = urllib.urlretrieve(url)
with zipfile.ZipFile(local_zip) as local_zipfile:
local_tif_filename = local_zipfile.extract(
filename + '.tif', tempfile.mkdtemp())
# Read image into memory. Result has shape [x, y, color bands].
dataset = gdal.Open(local_tif_filename, gdal.GA_ReadOnly)
bands = [dataset.GetRasterBand(i + 1).ReadAsArray()
for i in range(dataset.RasterCount)]
return np.stack(bands, axis=2)
def save_images(image_root, images, metadata):
"""Store a list of images to disk."""
assert len(images) == len(metadata)
if not os.path.exists(image_root):
os.makedirs(image_root)
for (img, (_, img_metadata)) in zip(images, metadata.iterrows()):
save_image(image_root, img, img_metadata)
def save_image(image_root, img, img_metadata):
"""Store a single image to disk."""
if not os.path.exists(image_root):
os.makedirs(image_root)
fname = os.path.join(image_root, img_metadata['fpath'])
dname = os.path.dirname(fname)
if not os.path.exists(dname):
os.makedirs(dname)
c = bcolz.carray(img, rootdir=fname, mode='w')
c.flush()
def save_images_with_hdf5(image_root, images, metadata):
assert len(images) > 0, "Must have 1+ images to write."
# Make directory if necessary.
if not os.path.exists(image_root):
os.makedirs(image_root)
# Construct an empty HDF5 dataset on disk.
image_shape = images[0].shape
initial_images_shape = (len(images),) + image_shape
max_images_shape = (None,) + image_shape
with h5py.File(os.path.join(image_root, "images.h5"), "w") as h5f:
dataset = h5f.create_dataset(
"images", initial_images_shape, maxshape=max_images_shape)
# Write images into space.
for i, image in enumerate(images):
dataset[i] = image
def save_images_with_bcolz(image_root, imgs, metadata):
assert len(imgs) == len(metadata)
# Make directory if necessary.
if not os.path.exists(image_root):
os.makedirs(image_root)
# Construct a bcolz array with the first image only.
assert len(imgs) > 0, "Must have 1+ images to write."
output_shape = (1, ) + imgs[0].shape
with bcolz.carray(imgs[0].reshape(output_shape), rootdir=os.path.join(image_root, "images"), mode="w") as array:
# Add all other images.
for i, img in enumerate(imgs):
if i == 0:
continue
array.append(img.reshape(output_shape))
def load_images_with_hdf5(image_root):
"""Load all images from HDF5 array."""
with h5py.File(os.path.join(image_root, "images.h5")) as h5f:
return h5f['images'][:]
def load_image_with_hdf5(image_root, img_metadata):
"""Load all images from HDF5 array."""
with h5py.File(os.path.join(image_root, "images.h5")) as h5f:
return h5f['images'][int(img_metadata.name)]
def load_images_with_bcolz(image_root):
"""Load all images from bcolz array."""
with bcolz.open(os.path.join(image_root, "images")) as array:
return array[:]
def load_image_with_bcolz(image_root, img_metadata):
"""Load a single image from bcolz array."""
with bcolz.open(os.path.join(image_root, "images")) as array:
return array[int(img_metadata.name)]
def plot_image(image, metadata=None, band=None, ax=None, cmap='gray'):
ax = ax or plt.gca()
# Aggregate over time.
if len(image.shape) == 4:
image = np.nanmedian(image, axis=TIME_AXIS)
# Select only the bands requested.
if len(image.shape) == 3:
assert band is not None, "You must choose a band to plot."
assert metadata is not None, "metadata required to select color band."
band_index = metadata['bands'].index(band)
image = image[:, :, band_index]
ax.imshow(image, cmap=cmap)
ax.xaxis.set_ticks([])
ax.yaxis.set_ticks([])
return ax
def canonicalize_image(img, img_metadata):
"""Canonicalize image for machine learning models.
- Aggregates across 2016/06 to 2017/06
- Drops all bands but B1...B11.
"""
img_metadata = img_metadata.copy()
# Get all dates in a 12 month span.
dates = [date for date in img_metadata['dates']
if date >= '20160601' and date < '20170601']
if len(dates) < 12:
raise ValueError(
"Found %d dates for the following image when 12 were expected. %s"
% (len(dates), img_metadata))
img_metadata['dates'] = dates
# Aggregate across 12 month span (hides cloud cover). Only keep the start
# date in the metadata, as there's exactly one date dimension.
img = np.nanmedian(img, axis=TIME_AXIS, keepdims=True)
img_metadata['dates'] = [dates[0]]
# Only keep raw bands. All others bands are simple functions of these.
bands = [band for band in img_metadata['bands']
if re.search('^B\d+$', band) is not None]
band_indices = [img_metadata['bands'].index(band) for band in bands]
img = img[:, :, band_indices]
img_metadata['bands'] = bands
img_metadata["dim"] = img.shape
return img, img_metadata
def canonicalize_image_by_month(img, img_metadata, band=None):
"""Canonicalize an image by taking its median pixel value per month.
Args:
img: numpy array, shape [height, width, num color bands, num dates].
img_metadata: pandas Series. Contains 'bands' and 'dates' entries.
band: None, string, or list of strings. If None, output all color
bands. If string, output a single color band, if list of strings,
output one color band per string.
"""
assert len(img.shape) == 4, "img must be [width, height color band, time]."
# Select bands to process.
if band is None:
bands = img_metadata["bands"]
if isinstance(band, basestring):
bands = [band]
elif isinstance(band, list):
bands = band
else:
raise ValueError("Unrecognized type for argument 'band': %s" % band)
band_idxs = [img_metadata["bands"].index(b) for b in bands]
img_band = img[:, :, band_idxs, :]
# Extract month out of each date (YYYYMMDD string)
dates = pd.DataFrame({"dates": img_metadata['dates']})
dates["month"] = dates["dates"].str.slice(4, 6)
# Construct result image. There will be 12 months.
width, height, _, _ = img.shape
result_img = np.full((width, height, len(bands), 12), np.nan)
for month, group in dates.groupby("month"):
# Select the appropriate time, color bands.
time_idxs = list(group.index)
img_month = img_band[:, :, :, time_idxs]
# Take median pixel intensity over time.
result_img[:, :, :, int(month) - 1] = np.nanmedian(
img_month, axis=[TIME_AXIS])
# Construct new metadata. We'll use the first date for each month in the
# grouping.
result_metadata = img_metadata.copy()
result_metadata["dim"] = result_img.shape
result_metadata["bands"] = bands
result_metadata["dates"] = list(dates.groupby("month").first()["dates"])
return result_img, result_metadata
def merge_canonical_image_and_mask(canonical_img, mask, img_metadata):
"""Combine canonical_image and mask into a single array."""
# Ensure canonical_img and mask have the same shape.
assert len(canonical_img.shape) == 4
mask = np.reshape(mask, [mask.shape[0], mask.shape[1], 1, 1])
# Copy time dim as many times as necessary to match 'canonical_img'.
mask = np.tile(mask, [1, 1, 1, canonical_img.shape[3]])
# Concatenate mask as the final band.
canonical_img = np.concatenate([canonical_img, mask], axis=BAND_AXIS)
# Add 'mask' as the final band to the metadata.
img_metadata = img_metadata.copy()
img_metadata['bands'] = img_metadata['bands'] + ['mask']
return canonical_img, img_metadata
def plot_monthly_image(img, img_metadata):
assert len(
img.shape) == 4, "img shape must be [height, width, color band, month]"
assert img.shape[3] == 12, "img must have 1 entry per month for every color band."
months = ["Jan", "Feb", "Mar", "Apr", "May", "June",
"July", "Aug", "Sept", "Oct", "Nov", "Dec"]
num_cols = len(img_metadata["bands"])
num_rows = len(months)
plt.figure(figsize=(2 * num_cols, 2 * num_rows))
for i in range(num_rows):
for j in range(num_cols):
ax = plt.subplot(num_rows, num_cols, i * num_cols + j + 1)
ax.set_title("%s/%s" % (months[i], img_metadata["bands"][j]))
plot_image(img[:, :, j, i])
| [
"[email protected]"
]
| |
61e7ac4e48aa441ddac70c7a136199bc95ef0cb8 | 97be97cfc56fb2170b60b91063dbfe5f1449e3c0 | /python/ABC189/D.py | 7e2f46bfedc29c348c5d23cf98f1faf6718dbc94 | []
| no_license | iWonder118/atcoder | 73d965a0a9ade189733808e47634f2b7776aad4b | 3ab7271e838a2903ff0e07f94015ef13c59577e1 | refs/heads/master | 2022-01-25T10:10:55.007340 | 2021-12-31T14:04:54 | 2021-12-31T14:04:54 | 245,155,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | n = int(input())
logics = [input() for _ in range(n)]
for i in range(n):
| [
"[email protected]"
]
| |
46e425071b72856e84300bad5e705cc2c7dff76d | 800b5cd8c3d58b60d80aca551e54af28ec3c9f18 | /code/chapter_05_example_14.py | 81334fc4e4cc158d144cc5ba91bcb59c006f0045 | []
| no_license | CyberLight/two-scoops-of-django-1.8 | 6591347cb20f3c16e252943c04f0f524f8e8b235 | 423971ad609ec9a552617fc4f7424e701295c09b | refs/heads/master | 2021-01-21T03:02:52.704822 | 2015-05-11T16:32:31 | 2015-05-11T16:32:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,626 | py | """
Using This Code Example
=========================
The code examples provided are provided by Daniel Greenfeld and Audrey Roy of
Two Scoops Press to help you reference Two Scoops of Django: Best Practices
for Django 1.8. Code samples follow PEP-0008, with exceptions made for the
purposes of improving book formatting. Example code is provided "as is", and
is not intended to be, and should not be considered or labeled as "tutorial code".
Permissions
============
In general, you may use the code we've provided with this book in your programs
and documentation. You do not need to contact us for permission unless you're
reproducing a significant portion of the code or using it in commercial
distributions. Examples:
* Writing a program that uses several chunks of code from this course does not require permission.
* Selling or distributing a digital package from material taken from this book does require permission.
* Answering a question by citing this book and quoting example code does not require permission.
* Incorporating a significant amount of example code from this book into your product's documentation does require permission.
Attributions usually include the title, author, publisher and an ISBN. For
example, "Two Scoops of Django: Best Practices for Django 1.8, by Daniel
Roy Greenfeld and Audrey Roy Greenfeld. Copyright 2015 Two Scoops Press (ISBN-GOES-HERE)."
If you feel your use of code examples falls outside fair use of the permission
given here, please contact us at [email protected]."""
# Top of settings/production.py
import os
SOME_SECRET_KEY = os.environ["SOME_SECRET_KEY"]
| [
"[email protected]"
]
| |
6f2edb09e5c1f151145ab5c1adacec423009c475 | e452f89c51180487f2ed68c33ca2fed54e14a967 | /1-Python-Programming-Basics (Sep 2020)/Course-Exercises-and-Exams/03_Conditional-Statements-Advanced/01.Lab-04-Personal-Titles.py | 72a970d05c0e96713bf60476264312a5d9ccd0bc | [
"MIT"
]
| permissive | karolinanikolova/SoftUni-Software-Engineering | c996f18eea9fb93164ab674614e90b357ef4858a | 7891924956598b11a1e30e2c220457c85c40f064 | refs/heads/main | 2023-06-21T23:24:55.224528 | 2021-07-22T16:15:59 | 2021-07-22T16:15:59 | 367,432,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 856 | py | # 4. Обръщение според възраст и пол
# Да се напише конзолна програма, която прочита възраст (реално число) и пол ('m' или 'f'), въведени от потребителя, и отпечатва обръщение измежду следните:
# • "Mr." – мъж (пол 'm') на 16 или повече години
# • "Master" – момче (пол 'm') под 16 години
# • "Ms." – жена (пол 'f') на 16 или повече години
# • "Miss" – момиче (пол 'f') под 16 години
age = float(input())
sex = input()
if sex == 'f':
if age >= 16:
print('Ms.')
elif age < 16:
print('Miss')
elif sex == 'm':
if age >= 16:
print('Mr.')
elif age < 16:
print('Master')
| [
"[email protected]"
]
| |
18a62f5f58f3eacf0f4b6e83ac4fda4770a77484 | a2e638cd0c124254e67963bda62c21351881ee75 | /Extensions/StructuredProductsDealPackage/FPythonCode/SP_ModuleReload.py | ed019b05682e9d07250ac27a96aa65a7a6824bdd | []
| no_license | webclinic017/fa-absa-py3 | 1ffa98f2bd72d541166fdaac421d3c84147a4e01 | 5e7cc7de3495145501ca53deb9efee2233ab7e1c | refs/heads/main | 2023-04-19T10:41:21.273030 | 2021-05-10T08:50:05 | 2021-05-10T08:50:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,569 | py |
# Need to add
# Additional Info
# - Participation (DealPackage)
# - CapitalProtection (DealPackage)
# - StrikePricePct (Instrument)
# - BarrierLevelPct (Instrument)
# - ProductQuantity (Deal Package)
# - AccumulatorLeverage
# Exotic Events
# - Initial Fixing
# ChoiceLists
# - AccDec (Val Group)
# - accDecModelDesc (Valuation Extension)
import SP_DealPackageHelper
import importlib
importlib.reload(SP_DealPackageHelper)
import SP_BusinessCalculations
importlib.reload(SP_BusinessCalculations)
import CompositeComponentBase
importlib.reload(CompositeComponentBase)
import CompositeExoticEventComponents
importlib.reload(CompositeExoticEventComponents)
import CompositeExoticComponents
importlib.reload(CompositeExoticComponents)
import CompositeOptionAdditionComponents
importlib.reload(CompositeOptionAdditionComponents)
import CompositeCashFlowComponents
importlib.reload(CompositeCashFlowComponents)
import CompositeOptionComponents
importlib.reload(CompositeOptionComponents)
import CompositeBasketComponents
importlib.reload(CompositeBasketComponents)
import CompositeBasketOptionComponents
importlib.reload (CompositeBasketOptionComponents)
import CompositeTradeComponents
importlib.reload(CompositeTradeComponents)
import StructuredProductBase
importlib.reload(StructuredProductBase)
import Validation_BarrierReverseConvertible
importlib.reload(Validation_BarrierReverseConvertible)
import SP_BarrierReverseConvertible
importlib.reload(SP_BarrierReverseConvertible)
import SP_CapitalProtectedNote
importlib.reload(SP_CapitalProtectedNote)
import SP_EqStraddle
importlib.reload(SP_EqStraddle)
import SP_CallPutSpread
importlib.reload(SP_CallPutSpread)
import SP_DualCurrencyDeposit
importlib.reload(SP_DualCurrencyDeposit)
import SP_WeddingCakeDeposit
importlib.reload(SP_WeddingCakeDeposit)
import SP_AccumulatorSetup
importlib.reload(SP_AccumulatorSetup)
import SP_AccumulatorCustomInsDef
importlib.reload(SP_AccumulatorCustomInsDef)
import SP_AccumulatorValuation
importlib.reload(SP_AccumulatorValuation)
import SP_AccumulatorModel
importlib.reload(SP_AccumulatorModel)
import SP_AccumulatorDealPackage
importlib.reload(SP_AccumulatorDealPackage)
import SP_Autocall
importlib.reload(SP_Autocall)
import SP_CapitalProtectedCertificate
importlib.reload(SP_CapitalProtectedCertificate)
import SP_CustomTradeActions
importlib.reload(SP_CustomTradeActions)
import SP_InvokeTradeActions
importlib.reload(SP_InvokeTradeActions)
import CustomLifeCycleEvents
importlib.reload(CustomLifeCycleEvents)
| [
"[email protected]"
]
| |
d60cd1bfe7525f7f1d1505b330008095c64c52b2 | 5e59252778f8b6465f6e9c4a1890297624cab8f8 | /shell.py | 15b5a123b00f2886e529971c6a178f4639a69ac8 | []
| no_license | tazjel/rpathcmd | fa62dfed77d56ea100c8f76a035486b2761058ee | 0ebffe639f329665824fdd94d8b5c89ce695f153 | refs/heads/master | 2021-01-16T20:03:25.225459 | 2012-11-05T16:09:17 | 2012-11-05T16:09:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,104 | py | #
# Licensed under the GNU General Public License Version 3
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2012 James Tanner <[email protected]>
#
# NOTE: the 'self' variable is an instance of RpathShell
import atexit, logging, os, readline, re, sys
from cmd import Cmd
from pwd import getpwuid
from rpathcmd.utils import *
import pdb
class RpathShell(Cmd):
__module_list = [ 'api', 'projects', 'groups', 'systems', 'images', 'platforms', 'targets', 'packages']
# a SyntaxError is thrown if we don't wrap this in an 'exec'
for module in __module_list:
exec 'from %s import *' % module
# maximum length of history file
HISTORY_LENGTH = 1024
cmdqueue = []
completekey = 'tab'
stdout = sys.stdout
#prompt_template = 'rpathcmd {SSM:##}> '
prompt_template = 'rpathcmd> '
current_line = ''
# do nothing on an empty line
emptyline = lambda self: None
def __init__(self, options):
self.session = ''
self.username = ''
self.server = ''
self.ssm = {}
self.postcmd(False, '')
# make the options available everywhere
self.options = options
#pdb.set_trace()
userinfo = getpwuid(os.getuid())
self.conf_dir = os.path.join(userinfo[5], '.spacecmd')
try:
if not os.path.isdir(self.conf_dir):
os.mkdir(self.conf_dir, 0700)
except OSError:
logging.error('Could not create directory %s' % self.conf_dir)
self.history_file = os.path.join(self.conf_dir, 'history')
try:
# don't split on hyphens or colons during tab completion
newdelims = readline.get_completer_delims()
newdelims = re.sub(':|-|/', '', newdelims)
readline.set_completer_delims(newdelims)
if not options.nohistory:
try:
if os.path.isfile(self.history_file):
readline.read_history_file(self.history_file)
readline.set_history_length(self.HISTORY_LENGTH)
# always write the history file on exit
atexit.register(readline.write_history_file,
self.history_file)
except IOError:
logging.error('Could not read history file')
except:
pass
# handle commands that exit the shell
def precmd(self, line):
# remove leading/trailing whitespace
line = re.sub('^\s+|\s+$', '', line)
# don't do anything on empty lines
if line == '':
return ''
# terminate the shell
if re.match('quit|exit|eof', line, re.I):
print
sys.exit(0)
# don't attempt to login for some commands
if re.match('help|login|logout|whoami|history|clear', line, re.I):
return line
# login before attempting to run a command
#if not self.session:
#pdb.set_trace()
#self.do_login('')
#if self.session == '': return ''
parts = line.split()
if len(parts):
command = parts[0]
else:
return ''
if len(parts[1:]):
args = ' '.join(parts[1:])
else:
args = ''
# print the help message if the user passes '--help'
if re.search('--help', line):
return 'help %s' % command
# should we look for an item in the history?
if command[0] != '!' or len(command) < 2:
return line
# remove the '!*' line from the history
self.remove_last_history_item()
history_match = False
if command[1] == '!':
# repeat the last command
line = readline.get_history_item(
readline.get_current_history_length())
if line:
history_match = True
else:
logging.warning('%s: event not found' % command)
return ''
# attempt to find a numbered history item
if not history_match:
try:
number = int(command[1:])
line = readline.get_history_item(number)
if line:
history_match = True
else:
raise Exception
except IndexError:
pass
except ValueError:
pass
# attempt to match the beginning of the string with a history item
if not history_match:
history_range = range(1, readline.get_current_history_length())
history_range.reverse()
for i in history_range:
item = readline.get_history_item(i)
if re.match(command[1:], item):
line = item
history_match = True
break
# append the arguments to the substituted command
if history_match:
line += ' %s' % args
readline.add_history(line)
print line
return line
else:
logging.warning('%s: event not found' % command)
return ''
# update the prompt with the SSM size
def postcmd(self, stop, line):
self.prompt = re.sub('##', str(len(self.ssm)), self.prompt_template)
# vim:ts=4:expandtab:
| [
"[email protected]"
]
| |
a98c0f87c5e54efc98415dca9576d0bcecc3346f | aae551baa369fda031f363c2afbdf1984467f16d | /Machine_Learning/Programming_Assignments/CS15B001_PA3/Code/q2/bernoulli.py | 59000649f234d836785dc85871bffe40b30ef448 | []
| no_license | ameet-1997/Course_Assignments | 37f7d4115baec383ccf029772efcf9c33beb2a23 | 629e9d5cfc6fa6cf37a96c5fcc33bc669cbdc59d | refs/heads/master | 2021-05-16T16:23:32.731296 | 2018-02-03T05:57:01 | 2018-02-03T05:57:01 | 119,939,202 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,114 | py | import pandas as pd
import numpy as np
from scipy import sparse
import os
import functions
import time
from sklearn.model_selection import KFold
from sklearn.metrics import precision_recall_fscore_support
from tabulate import tabulate
from sklearn.naive_bayes import BernoulliNB, MultinomialNB
from sklearn.metrics import precision_recall_curve
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
# Load the data
data_matrix = functions.load_sparse_csr('data_sparse').todense()
labels_matrix = np.loadtxt('labels.csv', delimiter=',')
# Cross Validation
kf = KFold(n_splits=5)
counter = 0
[avr_prec, avr_rec, avr_fsc] = [.0,.0,.0]
for train_index, test_index in kf.split(data_matrix):
counter += 1
data_train, data_test = data_matrix[train_index], data_matrix[test_index]
labels_train, labels_test = labels_matrix[train_index], labels_matrix[test_index]
b = BernoulliNB()
b.fit(data_train, labels_train)
predicted_labels = b.predict(data_test)
# # Estimate the class priors
# spam_prior = float(np.count_nonzero(labels_train == 0))/labels_train.shape[0]
# ham_prior = float(np.count_nonzero(labels_train == 1))/labels_train.shape[0]
# # Estimate the conditional probabilities
# # Get all spam articles and get the column sum
# # Do the same for all ham articles
# # Add-1 smoothing is performed here
# cond_ham = ((np.count_nonzero(data_train[labels_train==1], axis=0)+1).astype(dtype=float))/(data_train[labels_train==1].shape[0]+2)
# cond_spam = ((np.count_nonzero(data_train[labels_train==0], axis=0)+1).astype(dtype=float))/(data_train[labels_train==0].shape[0]+2)
# # Using log so that there are no underflow problems
# predicted_labels = np.ones(shape=labels_test.shape, dtype=float)
# for i in range(predicted_labels.shape[0]):
# score_ham = np.sum(np.multiply(np.log(cond_ham), data_test[i,:]))+np.log(ham_prior)
# score_spam = np.sum(np.multiply(np.log(cond_spam), data_test[i,:]))+np.log(spam_prior)
# if score_spam > score_ham:
# predicted_labels[i] = 0
# else:
# predicted_labels[i] = 1
# print("Fold Number "+str(counter))
[prec,rec,fsc,sup] = precision_recall_fscore_support(labels_test, predicted_labels)
avr_prec += prec[1]
avr_rec += rec[1]
avr_fsc += fsc[1]
# print tabulate([prec, rec, fsc], headers=['Spam', 'Ham'])
# print("")
print("")
print("Average Scores for Spam Class")
print("Precision: "+str(avr_prec/5))
print("Recall: "+str(avr_rec/5))
print("FScore: "+str(avr_fsc/5))
# Plot the PR Curves
train_data, test_data, train_labels, test_labels = train_test_split(data_matrix, labels_matrix, test_size=0.33, random_state=42)
m = BernoulliNB()
m.fit(train_data, train_labels)
probab = m.predict_proba(test_data)
precision_, recall_, threshold_ = precision_recall_curve(test_labels, probab[:,1])
fig = plt.figure()
fig.suptitle('Precision Recall Curve')
ax = fig.add_subplot(111)
ax.set_xlabel('Precision')
ax.set_ylabel('Recall')
# ax.fill(precision_,np.zeros(shape=precision_.shape),'b')
p = [0]
r = [1]
p.extend(list(precision_))
r.extend(list(recall_))
ax.fill(p, r,'b', zorder=5)
plt.plot(p, r)
plt.show() | [
"[email protected]"
]
| |
2241916c7d68776e94af575a2559596e236b1ca4 | 6c298f03496560276fb9f478cbefc218ecd24e9a | /VoiceInput/program/lib/voiceinput.py | 7f661347d3c4a859be5930192ef02c22284a2b7f | []
| no_license | koenschepens/OldPhone | 1f3fccd6018e14e779373243a0e90a759a7425f9 | 5ac9247d0c9e08d6af8fb384479c53b48c174aa6 | refs/heads/master | 2021-01-10T08:31:43.368378 | 2016-03-26T19:06:07 | 2016-03-26T19:06:07 | 43,725,567 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,048 | py | import sys
import xbmc, xbmcgui, xbmcaddon
try:
import simplejson
except ImportError:
import json as simplejson
import httplib
__addon__ = xbmcaddon.Addon()
__cwd__ = __addon__.getAddonInfo('path').decode("utf-8")
__language__ = __addon__.getLocalizedString
class InputWindow(xbmcgui.WindowXMLDialog):
def __init__( self, *args, **kwargs ):
self.Kodi14 = False
self.CTL_NUM_START = 48
self.CTL_NUM_END = 57
self.CTL_LABEL_EDIT = 310
self.strEdit = kwargs.get("default").decode('utf-8') or u""
self.strHeading = kwargs.get("heading") or ""
self.bIsConfirmed = False
self.oldPhone = True
self.keyType = LOWER
self.words = []
self.hzcode = ''
self.pos = 0
self.num = 0
xbmcgui.WindowXMLDialog.__init__(self)
xbmc.log(msg="HEE HALLO@!!", level=xbmc.LOGDEBUG)
def initControl(self):
pEdit = self.getControl(self.CTL_LABEL_EDIT)
px = pEdit.getX()
py = pEdit.getY()
pw = pEdit.getWidth()
ph = pEdit.getHeight()
self.listw = pw - 95
self.CTL_HZCODE = xbmcgui.ControlLabel(px, py + ph, 90, 30, '')
self.CTL_HZLIST = xbmcgui.ControlLabel(px + 95, py + ph, pw - 95, 30, '')
self.addControl(self.CTL_HZCODE)
self.addControl(self.CTL_HZLIST)
def getText(self):
return "MONGOL!"
class Keyboard:
def __init__( self, default='', heading='' ):
self.bIsConfirmed = False
self.strEdit = default
self.strHeading = heading
def doModal (self):
self.win = InputWindow("DialogKeyboard.xml", __cwd__, heading=self.strHeading, default=self.strEdit )
self.win.doModal()
self.bIsConfirmed = self.win.isConfirmed()
self.strEdit = self.win.getText()
del self.win
def setHeading(self, heading):
self.strHeading = "WHOWHOWWWWOOOOO"
def isConfirmed(self):
return self.bIsConfirmed
def getText(self):
return "youtube" | [
"[email protected]"
]
| |
5957fddb7c3f6e6aa3a69b0ba94279abc367d105 | d09b14a13e05adcd3d0f1714384b3ab65be4aa7c | /controller/UserRoleManagementDialog.py | 88b9e2a569b3cea650f01750526b2b90f2bedf97 | []
| no_license | ankhbold/lm2 | bd61a353b95d6d8e351cf4b0af48b1b936db8b9f | 30dfbeced57f123d39a69cb4d643a15429b8bfde | refs/heads/master | 2021-07-24T20:57:16.534659 | 2017-11-03T16:33:43 | 2017-11-03T16:33:43 | 90,375,636 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64,172 | py | __author__ = 'ankhaa'
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from sqlalchemy import exc, or_
from sqlalchemy.exc import DatabaseError, SQLAlchemyError
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy import func, or_, and_, desc,extract
from inspect import currentframe
from ..view.Ui_UserRoleManagementDialog import *
from ..model.SetRole import *
from ..model.AuLevel1 import *
from ..model.AuLevel2 import *
from ..model.LM2Exception import LM2Exception
from ..model.DialogInspector import DialogInspector
from ..model.ClPositionType import *
from ..model.ClGroupRole import *
from ..model.SetPositionGroupRole import *
from ..model.SetUserPosition import *
from ..model.SetUserGroupRole import *
from ..utils.PluginUtils import *
from ..controller.UserRoleManagementDetialDialog import *
from uuid import getnode as get_mac
import commands
import datetime
import socket
import sys
import struct
INTERFACE_NAME = "eth0"
class UserRoleManagementDialog(QDialog, Ui_UserRoleManagementDialog):
GROUP_SEPARATOR = '-----'
PW_PLACEHOLDER = '0123456789'
def __init__(self, has_privilege , user, parent=None):
super(UserRoleManagementDialog, self).__init__(parent)
self.setupUi(self)
self.db_session = SessionHandler().session_instance()
self.has_privilege = has_privilege
self.__username = user
self.__privilage()
self.__setup_combo_boxes()
self.__populate_user_role_lwidget()
self.__populate_group_lwidget()
self.__populate_au_level1_cbox()
self.close_button.clicked.connect(self.reject)
# permit only alphanumeric characters for the username
reg_ex = QRegExp(u"[a-z]{4}[0-9]{6}")
validator = QRegExpValidator(reg_ex, None)
reg_ex = QRegExp(u"[a-z_0-9]+")
validator_pass = QRegExpValidator(reg_ex, None)
database = QSettings().value(SettingsConstants.DATABASE_NAME)
self.username_edit.setText('user'+ database[-4:])
self.username_edit.setValidator(validator)
self.password_edit.setValidator(validator_pass)
self.retype_password_edit.setValidator(validator_pass)
self.__setup_validators()
self.selected_user = None
# self.mac_address = self.get_mac_address()
# self.mac_address_edit.setText(self.mac_address)
self.__setup_twidget()
self.__load_default_ritht_grud()
def __setup_twidget(self):
self.user_twidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.user_twidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.user_twidget.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.user_twidget.setSortingEnabled(True)
self.position_twidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.position_twidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.position_twidget.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.position_twidget.setSortingEnabled(True)
self.settings_position_twidget.setAlternatingRowColors(True)
self.settings_position_twidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.settings_position_twidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.settings_right_grud_twidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.settings_right_grud_twidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.settings_right_grud_twidget.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.settings_right_grud_twidget.setSortingEnabled(True)
self.settings_right_grud_twidget.setColumnWidth(0, 170)
self.settings_right_grud_twidget.setColumnWidth(1, 170)
self.settings_right_grud_twidget.setColumnWidth(2, 45)
self.settings_right_grud_twidget.setColumnWidth(3, 45)
self.settings_right_grud_twidget.setColumnWidth(4, 45)
self.settings_right_grud_twidget.setColumnWidth(5, 45)
self.right_grud_twidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.right_grud_twidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.right_grud_twidget.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.right_grud_twidget.setSortingEnabled(True)
self.right_grud_twidget.setColumnWidth(0, 170)
self.right_grud_twidget.setColumnWidth(1, 45)
self.right_grud_twidget.setColumnWidth(2, 45)
self.right_grud_twidget.setColumnWidth(3, 45)
self.right_grud_twidget.setColumnWidth(4, 45)
@pyqtSlot(int)
def on_get_mac_checkbox_stateChanged(self, state):
if state == Qt.Checked:
self.mac_address = self.get_mac_address()
self.mac_address_edit.setText(self.mac_address)
else:
self.mac_address_edit.clear()
def __setup_validators(self):
self.mac_validator = QRegExpValidator(
QRegExp("[a-zA-Z0-9]{2}:[a-zA-Z0-9]{2}:[a-zA-Z0-9]{2}:[a-zA-Z0-9]{2}:[a-zA-Z0-9]{2}:[a-zA-Z0-9]{2}"),
None)
self.mac_address_edit.setValidator(self.mac_validator)
def get_mac_address(self):
if sys.platform == 'win32':
for line in os.popen("ipconfig /all"):
if line.lstrip().startswith('Physical Address'):
mac = line.split(':')[1].strip().replace('-', ':')
if len(mac) == 17:
mac = line.split(':')[1].strip().replace('-', ':')
break
else:
for line in os.popen("/sbin/ifconfig"):
if line.find('Ether') > -1:
mac = line.split()[4]
if len(mac) == 17:
mac = line.split(':')[1].strip().replace('-', ':')
break
return mac
def get_macaddress(self, host):
""" Returns the MAC address of a network host, requires >= WIN2K. """
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/347812
import ctypes
import socket
import struct
# Check for api availability
try:
SendARP = ctypes.windll.Iphlpapi.SendARP
except:
raise NotImplementedError('Usage only on Windows 2000 and above')
# Doesn't work with loopbacks, but let's try and help.
if host == '127.0.0.1' or host.lower() == 'localhost':
host = socket.gethostname()
# gethostbyname blocks, so use it wisely.
try:
inetaddr = ctypes.windll.wsock32.inet_addr(host)
if inetaddr in (0, -1):
raise Exception
except:
hostip = socket.gethostbyname(host)
inetaddr = ctypes.windll.wsock32.inet_addr(hostip)
buffer = ctypes.c_buffer(6)
addlen = ctypes.c_ulong(ctypes.sizeof(buffer))
if SendARP(inetaddr, 0, ctypes.byref(buffer), ctypes.byref(addlen)) != 0:
raise WindowsError('Retreival of mac address(%s) - failed' % host)
# Convert binary data into a string.
macaddr = ''
for intval in struct.unpack('BBBBBB', buffer):
if intval > 15:
replacestr = '0x'
else:
replacestr = 'x'
if macaddr != '':
macaddr = ':'.join([macaddr, hex(intval).replace(replacestr, '')])
else:
macaddr = ''.join([macaddr, hex(intval).replace(replacestr, '')])
return macaddr.upper()
def __privilage(self):
if not self.has_privilege:
self.groupBox_2.setEnabled(False)
self.add_button.setEnabled(False)
self.delete_button.setEnabled(False)
self.username_edit.setEnabled(False)
self.phone_edit.setEnabled(False)
self.surname_edit.setEnabled(False)
self.firstname_edit.setEnabled(False)
self.email_edit.setEnabled(False)
self.position_cbox.setEnabled(False)
self.mac_address_edit.setEnabled(False)
self.groupBox_3.setEnabled(False)
def __setup_combo_boxes(self):
try:
positions = self.db_session.query(ClPositionType).all()
for position in positions:
self.position_cbox.addItem(position.description, position.code)
except SQLAlchemyError, e:
PluginUtils.show_error(self, self.tr("Query Error"), self.tr("Error in line {0}: {1}").format(currentframe().f_lineno, e.message))
return
# def set_username(self, username):
#
# self.__username = username
def __populate_user_role_lwidget(self):
self.user_role_lwidget.clear()
if self.has_privilege:
users = self.db_session.query(SetRole.user_name).order_by(SetRole.user_name).group_by(SetRole.user_name)
else:
users = self.db_session.query(SetRole.user_name).filter(SetRole.user_name == self.__username).group_by(SetRole.user_name).all()
try:
for user in users:
item = QListWidgetItem(QIcon(":/plugins/lm2/person.png"), user.user_name)
# if user.user_name == self.__logged_on_user():
item.setForeground(Qt.blue)
# if self.__is_db_role(user.user_name):
self.user_role_lwidget.addItem(item)
except (DatabaseError, SQLAlchemyError), e:
PluginUtils.show_error(self, self.tr("Database Error"), e.message)
def __is_db_role(self, user_name):
try:
sql = "SELECT count(*) FROM pg_roles WHERE rolname = '{0}' and rolcanlogin = true".format(user_name)
count = self.db_session.execute(sql).fetchone()
return True if count[0] == 1 else False
except SQLAlchemyError, e:
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
def __populate_group_lwidget(self):
self.group_lwidget.clear()
self.member_lwidget.clear()
QListWidgetItem("land_office_administration", self.group_lwidget)
QListWidgetItem("db_creation", self.group_lwidget)
QListWidgetItem("role_management", self.group_lwidget)
QListWidgetItem(self.GROUP_SEPARATOR, self.group_lwidget)
QListWidgetItem("application_view", self.group_lwidget)
QListWidgetItem("application_update", self.group_lwidget)
QListWidgetItem("cadastre_view", self.group_lwidget)
QListWidgetItem("cadastre_update", self.group_lwidget)
QListWidgetItem("contracting_view", self.group_lwidget)
QListWidgetItem("contracting_update", self.group_lwidget)
QListWidgetItem("reporting", self.group_lwidget)
QListWidgetItem("log_view", self.member_lwidget)
def __populate_au_level1_cbox(self):
try:
PluginUtils.populate_au_level1_cbox(self.aimag_cbox, True, False, False)
except DatabaseError, e:
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
@pyqtSlot()
def on_aimag_lwidget_itemSelectionChanged(self):
try:
self.soum_cbox.clear()
self.soum_cbox.addItem("*", "*")
if self.aimag_lwidget.currentItem() is None:
return
# if self.aimag_lwidget.count() > 1:
# return
au_level1_code = self.aimag_lwidget.currentItem().data(Qt.UserRole)
PluginUtils.populate_au_level2_cbox(self.soum_cbox, au_level1_code, True, False, False)
except DatabaseError, e:
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
@pyqtSlot()
def on_user_role_lwidget_itemSelectionChanged(self):
self.selected_user = self.user_role_lwidget.currentItem().text()
user_name = self.user_role_lwidget.currentItem().text()
try:
user_c = self.db_session.query(SetRole). \
filter(SetRole.user_name == user_name).count()
if user_c == 1:
user = self.db_session.query(SetRole). \
filter(SetRole.user_name == user_name).one()
else:
user = self.db_session.query(SetRole).\
filter(SetRole.user_name == user_name).\
filter(SetRole.is_active == True).one()
except NoResultFound:
return
self.username_real_lbl.setText(user.user_name_real)
self.username_edit.setText(user.user_name)
self.surname_edit.setText(user.surname)
self.firstname_edit.setText(user.first_name)
self.email_edit.setText(user.email)
self.position_cbox.setCurrentIndex(self.position_cbox.findData(user.position))
# self.position_edit.setText(user.position)
self.phone_edit.setText(user.phone)
self.mac_address_edit.setText(user.mac_addresses)
self.password_edit.setText(self.PW_PLACEHOLDER)
self.retype_password_edit.setText(self.PW_PLACEHOLDER)
self.register_edit.setText(user.user_register)
# populate groups
self.__populate_group_lwidget()
groups = self.__groupsByUser(user_name)
for group in groups:
group_name = group[0]
items = self.group_lwidget.findItems(group_name, Qt.MatchExactly)
if len(items) > 0:
item = items[0]
self.member_lwidget.addItem(item.text())
self.group_lwidget.takeItem(self.group_lwidget.row(item))
# populate admin units
self.aimag_lwidget.clear()
self.soum_lwidget.clear()
restriction_au_level1 = user.restriction_au_level1
aimag_codes = restriction_au_level1.split(',')
try:
if len(aimag_codes) == self.db_session.query(AuLevel1).count(): # all Aimags
item = QListWidgetItem("*")
item.setData(Qt.UserRole, "*")
self.aimag_lwidget.addItem(item)
self.soum_lwidget.addItem(item)
else:
for code in aimag_codes:
code = code.strip()
aimag = self.db_session.query(AuLevel1).filter(AuLevel1.code == code).one()
item = QListWidgetItem(aimag.name)
item.setData(Qt.UserRole, aimag.code)
self.aimag_lwidget.addItem(item)
restriction_au_level2 = user.restriction_au_level2
soum_codes = restriction_au_level2.split(',')
# Find districts among the Aimags:
l1_district_entries = filter(lambda x: x.startswith('1') or x.startswith('01'), aimag_codes)
l2_district_entries = filter(lambda x: x.startswith('1') or x.startswith('01'), soum_codes)
true_aimags = filter(lambda x: not x.startswith('1') and not x.startswith('01'), aimag_codes)
if len(aimag_codes)-len(l1_district_entries) == 1 and \
len(soum_codes)-len(l2_district_entries) == self.db_session.query(AuLevel2)\
.filter(AuLevel2.code.startswith(true_aimags[0]))\
.count():
item = QListWidgetItem("*")
item.setData(Qt.UserRole, "*")
self.soum_lwidget.addItem(item)
else:
for code in soum_codes:
code = code.strip()
soum = self.db_session.query(AuLevel2).filter(AuLevel2.code == code).one()
item = QListWidgetItem(soum.name+'_'+soum.code)
item.setData(Qt.UserRole, soum.code)
self.soum_lwidget.addItem(item)
except NoResultFound:
pass
def reject(self):
SessionHandler().destroy_session()
QDialog.reject(self)
@pyqtSlot()
def on_add_button_clicked(self):
try:
if self.__add_or_update_role():
PluginUtils.show_message(self, self.tr("User Role Management"), self.tr('New user created.'))
except DatabaseError, e:
self.db_session.rollback()
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
@pyqtSlot()
def on_update_button_clicked(self):
try:
if self.__add_or_update_role('UPDATE'):
PluginUtils.show_message(self, self.tr("User Role Management"), self.tr('User information updated.'))
except DatabaseError, e:
self.db_session.rollback()
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
def __add_or_update_role(self, mode='ADD'):
if not self.__validate_user_input(mode):
return False
user_name = self.username_edit.text().strip()
surname = self.surname_edit.text().strip()
first_name = self.firstname_edit.text().strip()
user_register = self.register_edit.text().strip()
phone = self.phone_edit.text().strip()
# position = self.position_edit.text().strip()
position = self.position_cbox.itemData(self.position_cbox.currentIndex())
mac_addresses = self.mac_address_edit.text().strip()
password = self.password_edit.text().strip()
email = ''
if self.email_edit.text():
email = self.email_edit.text().strip()
if self.has_privilege:
try:
self.db_session.execute("SET ROLE role_management")
except DatabaseError, e:
self.db_session.rollback()
PluginUtils.show_error(self, self.tr("Database Query Error"),
self.tr("You must login different username with member of role management"))
return
if mode == 'ADD':
sql = "SELECT count(*) FROM pg_roles WHERE rolname = '{0}' and rolcanlogin = true".format(user_name)
count = self.db_session.execute(sql).fetchone()
if count[0] == 0:
self.db_session.execute(u"CREATE ROLE {0} login PASSWORD '{1}'".format(user_name, password))
else:
message_box = QMessageBox()
message_box.setText(self.tr("Could not execute: {0} already exists. Do you want to connect selected soums?").format(user_name))
yes_button = message_box.addButton(self.tr("Yes"), QMessageBox.ActionRole)
message_box.addButton(self.tr("Cancel"), QMessageBox.ActionRole)
message_box.exec_()
if not message_box.clickedButton() == yes_button:
return
else:
if password != self.PW_PLACEHOLDER:
self.db_session.execute(u"ALTER ROLE {0} PASSWORD '{1}'".format(user_name, password))
groups = self.__groupsByUser(user_name)
for group in groups:
self.db_session.execute(u"REVOKE {0} FROM {1}".format(group[0], user_name))
for index in range(self.member_lwidget.count()):
item = self.member_lwidget.item(index)
sql = "SELECT count(*) FROM pg_roles WHERE rolname = '{0}' and rolcanlogin = true".format(user_name)
count = self.db_session.execute(sql).fetchone()
if count[0] == 0:
self.db_session.execute(u"CREATE ROLE {0} login PASSWORD '{1}'".format(user_name, password))
self.db_session.execute(u"GRANT {0} TO {1}".format(item.text(), user_name))
self.db_session.execute("RESET ROLE")
restriction_au_level1 = ''
restriction_au_level2 = ''
is_first = 0
for index in range(self.aimag_lwidget.count()):
item = self.aimag_lwidget.item(index)
if item.text() == '*': # all Aimags
for index2 in range(self.aimag_cbox.count()):
au_level1_code = str(self.aimag_cbox.itemData(index2, Qt.UserRole))
if au_level1_code != '*':
restriction_au_level1 += au_level1_code + ','
# Special treatment for UB's districts:
if au_level1_code.startswith('1') or au_level1_code.startswith('01'):
restriction_au_level2 += au_level1_code + '00' + ','
self.db_session.execute("SET ROLE role_management")
self.db_session.execute(u"GRANT s{0}00 TO {1}".format(au_level1_code, user_name))
self.db_session.execute("RESET ROLE")
for au_level2 in self.db_session.query(AuLevel2).filter(AuLevel2.code.startswith(au_level1_code))\
.order_by(AuLevel2.code):
restriction_au_level2 += au_level2.code + ','
self.db_session.execute("SET ROLE role_management")
self.db_session.execute(u"GRANT s{0} TO {1}".format(au_level2.code, user_name))
self.db_session.execute(u"GRANT s{0} TO {1}".format(au_level2.code, user_name))
self.db_session.execute("RESET ROLE")
break
else:
au_level1_code = str(item.data(Qt.UserRole))
restriction_au_level1 += au_level1_code + ','
# Special treatment for UB's districts:
# if au_level1_code.startswith('1') or au_level1_code.startswith('01'):
# restriction_au_level2 += au_level1_code + '00' + ','
# self.db_session.execute("SET ROLE role_management")
# self.db_session.execute(u"GRANT s{0}00 TO {1}".format(au_level1_code, user_name))
# self.db_session.execute("RESET ROLE")
if is_first == 0:
is_first = 1
for index2 in range(self.soum_lwidget.count()):
item = self.soum_lwidget.item(index2)
if item.text() == '*':
for au_level2 in self.db_session.query(AuLevel2).filter(AuLevel2.code.startswith(au_level1_code))\
.order_by(AuLevel2.code):
restriction_au_level2 += au_level2.code + ','
self.db_session.execute("SET ROLE role_management")
self.db_session.execute(u"GRANT s{0} TO {1}".format(au_level2.code, user_name))
self.db_session.execute("RESET ROLE")
else:
try:
au_level2_code = str(item.data(Qt.UserRole))
restriction_au_level2 += au_level2_code + ','
self.db_session.execute("SET ROLE role_management")
self.db_session.execute(u"GRANT s{0} TO {1}".format(au_level2_code, user_name))
self.db_session.execute("RESET ROLE")
except DatabaseError, e:
self.db_session.rollback()
PluginUtils.show_error(self, self.tr("Database Query Error"),
self.tr("You must login different username with member of role management"))
return
restriction_au_level1 = restriction_au_level1[:len(restriction_au_level1)-1]
restriction_au_level2 = restriction_au_level2[:len(restriction_au_level2)-1]
pa_from = datetime.datetime.today()
pa_till = datetime.date.max
role_c = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).count()
if self.register_edit.text() == None or self.register_edit.text() == '':
PluginUtils.show_message(None, self.tr("None register"),
self.tr("Register not null!"))
return
if mode == 'ADD':
if role_c != 0:
role_count = self.db_session.query(SetRole).filter(
SetRole.user_register == self.register_edit.text()).count()
# if role_count > 0:
# PluginUtils.show_message(None, self.tr("Duplicate user"),
# self.tr("This user already registered!"))
# return
is_active_user = False
if role_c == 0:
is_active_user = True
else:
active_role_count = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).filter(SetRole.is_active == True).count()
if active_role_count == 0:
is_active_user = True
else:
is_active_user = False
try:
count = self.db_session.query(SetRole) \
.filter(SetRole.user_name == user_name) \
.order_by(func.substr(SetRole.user_name_real, 11, 12).desc()).count()
except SQLAlchemyError, e:
PluginUtils.show_error(self, self.tr("File Error"),
self.tr("Error in line {0}: {1}").format(currentframe().f_lineno, e.message))
return
if count > 0:
try:
max_number_user = self.db_session.query(SetRole) \
.filter(SetRole.user_name == user_name) \
.order_by(func.substr(SetRole.user_name_real, 11, 12).desc()).first()
except SQLAlchemyError, e:
PluginUtils.show_error(self, self.tr("File Error"),
self.tr("Error in line {0}: {1}").format(currentframe().f_lineno,
e.message))
return
user_numbers = max_number_user.user_name_real[-2:]
new_user_number = (str(int(user_numbers[1]) + 1).zfill(2))
last_user_name = user_name[:10] + new_user_number
user_name_real = last_user_name
employee_type = 2
else:
user_name_real = self.username_edit.text()+'01'
employee_type = 1
role = SetRole(user_name=user_name, surname=surname, first_name=first_name, phone=phone, user_register=user_register,
mac_addresses=mac_addresses, position=position, restriction_au_level1=restriction_au_level1, user_name_real = user_name_real,
employee_type = employee_type, restriction_au_level2=restriction_au_level2, pa_from=pa_from, pa_till=pa_till, is_active=is_active_user, email=email)
self.db_session.add(role)
else:
active_role_count = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).filter(
SetRole.is_active == True).count()
if active_role_count == 1:
role = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).filter(SetRole.is_active == True).one()
else:
role = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).filter(SetRole.user_name_real == self.username_real_lbl.text()).one()
# for role in roles:
# print role.user_name_real
role.surname = surname
role.first_name = first_name
role.phone = phone
role.user_register = user_register
role.mac_addresses = mac_addresses
if active_role_count == 0:
role.is_active = True
role.position = position
role.restriction_au_level1 = restriction_au_level1
role.restriction_au_level2 = restriction_au_level2
role.email = email
self.db_session.commit()
self.__populate_user_role_lwidget()
item = self.user_role_lwidget.findItems(user_name, Qt.MatchExactly)[0]
row = self.user_role_lwidget.row(item)
self.user_role_lwidget.setCurrentRow(row)
return True
else:
if password != self.PW_PLACEHOLDER:
self.db_session.execute(u"ALTER ROLE {0} PASSWORD '{1}'".format(user_name, password))
self.db_session.commit()
self.__populate_user_role_lwidget()
item = self.user_role_lwidget.findItems(user_name, Qt.MatchExactly)[0]
row = self.user_role_lwidget.row(item)
self.user_role_lwidget.setCurrentRow(row)
return True
def __validate_user_input(self, mode='ADD'):
if mode == 'UPDATE':
if self.username_edit.text().strip() != self.selected_user:
PluginUtils.show_message(None, self.tr("Username can't be modified"),
self.tr("The username of an existing user cannot be modified!"))
self.username_edit.setText(self.selected_user)
return False
if self.username_edit.text().strip() == 'role_manager' \
and not self.member_lwidget.findItems('role_management', Qt.MatchExactly):
PluginUtils.show_message(self, self.tr("Required group"),
self.tr("The user 'role_manager' must be member of group 'role_management'."))
return False
if len(self.username_edit.text().strip()) == 0:
PluginUtils.show_message(self, self.tr("No Username"), self.tr("Provide a valid username!"))
return False
if len(self.password_edit.text().strip()) < 8:
PluginUtils.show_message(self, self.tr("Invalid Password"),
self.tr("Provide a valid password that consists of 8 characters or more!"))
return False
if self.password_edit.text().strip() != self.retype_password_edit.text().strip():
PluginUtils.show_message(self, self.tr("Passwords Not Matching"),
self.tr("Password and retyped password are not identical!"))
return False
if len(self.surname_edit.text().strip()) == 0:
PluginUtils.show_message(self, self.tr("No Surname"), self.tr("Provide a valid surname!"))
return False
if len(self.firstname_edit.text().strip()) == 0:
PluginUtils.show_message(self, self.tr("No First Name"), self.tr("Provide a valid first name!"))
return False
if len(self.email_edit.text().strip()) == 0:
PluginUtils.show_message(self, self.tr("No Email"), self.tr("Provide a valid email!"))
return False
if len(self.firstname_edit.text().strip()) == 0:
PluginUtils.show_message(self, self.tr("No Position"), self.tr("Provide a valid position!"))
return False
if self.member_lwidget.count() == 0:
PluginUtils.show_message(self, self.tr("No Group Membership"),
self.tr("The user must be member of at least one group!"))
return False
if not self.member_lwidget.findItems('role_management', Qt.MatchExactly) \
and not self.member_lwidget.findItems('db_creation', Qt.MatchExactly):
if self.aimag_lwidget.count() == 0:
PluginUtils.show_message(self, self.tr("No Aimag/Duureg"),
self.tr("The user must be granted at least one Aimag/Duureg!"))
return False
if self.soum_lwidget.count() == 0:
PluginUtils.show_message(self, self.tr("No Soum"),
self.tr("The user must granted at least one Soum!"))
return False
return True
@pyqtSlot()
def on_down_groups_button_clicked(self):
if not self.group_lwidget.currentItem():
return
group = self.group_lwidget.currentItem().text()
if group.find(self.GROUP_SEPARATOR) != -1:
return
self.group_lwidget.takeItem(self.group_lwidget.row(self.group_lwidget.currentItem()))
self.member_lwidget.addItem(group)
if group == 'land_office_administration':
item_list = self.member_lwidget.findItems('contracting_update', Qt.MatchExactly)
if len(item_list) == 0:
contracting_update_item = self.group_lwidget.findItems('contracting_update', Qt.MatchExactly)[0]
self.group_lwidget.takeItem(self.group_lwidget.row(contracting_update_item))
self.member_lwidget.addItem(contracting_update_item.text())
# elif group == 'contracting_update':
# item_list = self.member_lwidget.findItems('cadastre_update', Qt.MatchExactly)
# if len(item_list) == 0:
# cadastre_update_item = self.group_lwidget.findItems('cadastre_update', Qt.MatchExactly)[0]
# self.group_lwidget.takeItem(self.group_lwidget.row(cadastre_update_item))
# self.member_lwidget.addItem(cadastre_update_item.text())
@pyqtSlot()
def on_up_groups_button_clicked(self):
if not self.member_lwidget.currentItem():
return
group = self.member_lwidget.currentItem().text()
if group == 'log_view': # cannot be removed from member widget
return
self.member_lwidget.takeItem(self.member_lwidget.row(self.member_lwidget.currentItem()))
if group == 'role_management' or group == 'db_creation' or group == 'land_office_administration':
self.group_lwidget.insertItem(0, group)
else:
self.group_lwidget.addItem(group)
# if group == 'contracting_update':
# item_list = self.group_lwidget.findItems('land_office_administration', Qt.MatchExactly)
# if len(item_list) == 0:
# land_office_admin_item = self.member_lwidget.findItems('land_office_administration', Qt.MatchExactly)[0]
# self.member_lwidget.takeItem(self.member_lwidget.row(land_office_admin_item))
# self.group_lwidget.insertItem(0, land_office_admin_item.text())
# elif group == 'cadastre_update':
# item_list = self.group_lwidget.findItems('contracting_update', Qt.MatchExactly)
# if len(item_list) == 0:
# contracting_update_item = self.member_lwidget.findItems('contracting_update', Qt.MatchExactly)[0]
# self.member_lwidget.takeItem(self.member_lwidget.row(contracting_update_item))
# self.group_lwidget.addItem(contracting_update_item.text())
@pyqtSlot()
def on_down_aimag_button_clicked(self):
au_level1_name = self.aimag_cbox.currentText()
au_level1_code = self.aimag_cbox.itemData(self.aimag_cbox.currentIndex(), Qt.UserRole)
if len(self.aimag_lwidget.findItems(au_level1_name, Qt.MatchExactly)) == 0:
if len(self.aimag_lwidget.findItems("*", Qt.MatchExactly)) == 0:
if au_level1_name == '*':
self.aimag_lwidget.clear()
self.soum_lwidget.clear()
item = QListWidgetItem("*")
item.setData(Qt.UserRole, "*")
self.soum_lwidget.addItem(item)
item = QListWidgetItem(au_level1_name)
item.setData(Qt.UserRole, au_level1_code)
self.aimag_lwidget.addItem(item)
self.aimag_lwidget.setCurrentItem(item)
if self.aimag_lwidget.count() > 1:
self.soum_lwidget.clear()
item = QListWidgetItem("*")
item.setData(Qt.UserRole, "*")
self.soum_lwidget.addItem(item)
@pyqtSlot()
def on_up_aimag_button_clicked(self):
self.aimag_lwidget.takeItem(self.aimag_lwidget.row(self.aimag_lwidget.currentItem()))
if self.aimag_lwidget.count() > 0:
self.aimag_lwidget.setItemSelected(self.aimag_lwidget.item(0), False)
self.aimag_lwidget.setCurrentItem(self.aimag_lwidget.item(0))
self.soum_lwidget.clear()
@pyqtSlot()
def on_down_soum_button_clicked(self):
au_level2_name = self.soum_cbox.currentText()
au_level2_code = self.soum_cbox.itemData(self.soum_cbox.currentIndex(), Qt.UserRole)
itemsList = self.aimag_lwidget.selectedItems()
if len(self.soum_lwidget.findItems(au_level2_name +'_'+ au_level2_code, Qt.MatchExactly)) == 0:
if len(self.soum_lwidget.findItems("*", Qt.MatchExactly)) == 0:
if au_level2_name == '*':
self.soum_lwidget.clear()
item = QListWidgetItem(au_level2_name +'_'+ au_level2_code)
item.setData(Qt.UserRole, au_level2_code)
self.soum_lwidget.addItem(item)
@pyqtSlot()
def on_up_soum_button_clicked(self):
self.soum_lwidget.takeItem(self.soum_lwidget.row(self.soum_lwidget.currentItem()))
@pyqtSlot()
def on_delete_button_clicked(self):
item = self.user_role_lwidget.currentItem()
if item is None:
return
user_name = item.text()
if user_name == 'role_manager':
PluginUtils.show_message(self, self.tr("Delete User"),
self.tr("The user 'role_manager' is a required role and cannot be deleted."))
return
# The user logged on must not delete himself:
if self.__logged_on_user() == user_name:
PluginUtils.show_message(self, self.tr("Delete User"),
self.tr("The user currently logged on cannot be deleted."))
return
message = "Delete user role {0}".format(user_name)
if QMessageBox.No == QMessageBox.question(self, self.tr("Delete User Role"),
message, QMessageBox.Yes | QMessageBox.No, QMessageBox.No):
return
try:
user_role = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).one()
self.db_session.delete(user_role)
self.db_session.execute("SET ROLE role_management")
self.db_session.execute(u"DROP ROLE {0}".format(user_name))
self.db_session.execute("RESET ROLE")
self.db_session.commit()
self.__populate_user_role_lwidget()
PluginUtils.show_message(self, self.tr("User Role Management"), self.tr('User role deleted.'))
except DatabaseError, e:
self.db_session.rollback()
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
def __groupsByUser(self, user_name):
sql = "select rolname from pg_user join pg_auth_members on (pg_user.usesysid=pg_auth_members.member) " \
"join pg_roles on (pg_roles.oid=pg_auth_members.roleid) where pg_user.usename=:bindName"
result = self.db_session.execute(sql, {'bindName': user_name}).fetchall()
return result
def __logged_on_user(self):
result = self.db_session.execute("SELECT USER")
current_user = result.fetchone()
return current_user[0]
@pyqtSlot()
def on_help_button_clicked(self):
os.system("hh.exe "+ str(os.path.dirname(os.path.realpath(__file__))[:-10]) +"help\output\help_lm2.chm::/html/user_role_management.htm")
@pyqtSlot(QListWidgetItem)
def on_user_role_lwidget_itemDoubleClicked(self, item):
username = item.text()
dlg = UserRoleManagementDetialDialog(username)
dlg.exec_()
@pyqtSlot()
def on_settings_button_clicked(self):
if not self.user_role_lwidget.currentItem():
return
username = self.user_role_lwidget.currentItem().text()
dlg = UserRoleManagementDetialDialog(username)
dlg.exec_()
def __load_default_ritht_grud(self):
aa = self.db_session.query(ClGroupRole).all()
positions = self.db_session.query(ClPositionType).all()
for position in positions:
# right_grud = self.db_session.query(SetPositionGroupRole)
row = self.settings_position_twidget.rowCount()
self.settings_position_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(position.description))
item.setData(Qt.UserRole, position.code)
self.settings_position_twidget.setItem(row, 0, item)
@pyqtSlot()
def on_load_users_button_clicked(self):
self.__load_user_roles()
def __load_user_roles(self):
self.user_twidget.setRowCount(0)
user_start = "user" + "%"
users = self.db_session.query(SetRole).filter(SetRole.user_name.like(user_start)).all()
for user in users:
row = self.user_twidget.rowCount()
self.user_twidget.insertRow(row)
full_name = '('+ user.user_name_real +') '+ user.surname[:1] + '.' + user.first_name
item = QTableWidgetItem(u'{0}'.format(full_name))
item.setData(Qt.UserRole, user.user_name_real)
self.user_twidget.setItem(row, 0, item)
@pyqtSlot()
def on_load_position_button_clicked(self):
self.__load_all_positions()
def __load_all_positions(self):
self.position_twidget.setRowCount(0)
selected_items = self.user_twidget.selectedItems()
if len(selected_items) == 0:
PluginUtils.show_message(self, self.tr("Selection"), self.tr("Please select user."))
return
cur_row = self.user_twidget.currentRow()
item = self.user_twidget.item(cur_row, 0)
user_name_real = item.data(Qt.UserRole)
positions = self.db_session.query(ClPositionType).all()
for position in positions:
row = self.position_twidget.rowCount()
self.position_twidget.insertRow(row)
user_positions_count = self.db_session.query(SetUserPosition).\
filter(SetUserPosition.user_name_real == user_name_real).\
filter(SetUserPosition.position == position.code).count()
item = QTableWidgetItem(u'{0}'.format(position.description))
item.setData(Qt.UserRole, position.code)
if user_positions_count == 0:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.position_twidget.setItem(row, 0, item)
@pyqtSlot(QTableWidgetItem)
def on_user_twidget_itemClicked(self, item):
self.position_twidget.setRowCount(0)
self.right_grud_twidget.setRowCount(0)
cur_row = self.user_twidget.currentRow()
item = self.user_twidget.item(cur_row, 0)
user_name_real = item.data(Qt.UserRole)
self.__load_user_positions(user_name_real)
self.__load_user_right_types(user_name_real)
def __load_user_right_types(self, user_name_real):
right_types = self.db_session.query(ClGroupRole).all()
for right_type in right_types:
user_right_types_count = self.db_session.query(SetUserGroupRole). \
filter(SetUserGroupRole.user_name_real == user_name_real).\
filter(SetUserGroupRole.group_role == right_type.code).count()
if user_right_types_count == 1:
user_right_type = self.db_session.query(SetUserGroupRole). \
filter(SetUserGroupRole.user_name_real == user_name_real). \
filter(SetUserGroupRole.group_role == right_type.code).one()
row = self.right_grud_twidget.rowCount()
self.right_grud_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(right_type.description))
item.setData(Qt.UserRole, right_type.code)
self.right_grud_twidget.setItem(row, 0, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, right_type.code)
if user_right_types_count == 0:
item.setCheckState(Qt.Unchecked)
else:
if not user_right_type.r_view:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 1, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, right_type.code)
if user_right_types_count == 0:
item.setCheckState(Qt.Unchecked)
else:
if not user_right_type.r_add:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 2, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, right_type.code)
if user_right_types_count == 0:
item.setCheckState(Qt.Unchecked)
else:
if not user_right_type.r_remove:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 3, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, right_type.code)
if user_right_types_count == 0:
item.setCheckState(Qt.Unchecked)
else:
if not user_right_type.r_update:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 4, item)
def __load_user_positions(self, user_name_real):
user_positions = self.db_session.query(SetUserPosition). \
filter(SetUserPosition.user_name_real == user_name_real).all()
set_role = self.db_session.query(SetRole).filter(SetRole.user_name_real == user_name_real).one()
position = self.db_session.query(ClPositionType). \
filter(ClPositionType.code == set_role.position).one()
user_positions_count = self.db_session.query(SetUserPosition). \
filter(SetUserPosition.user_name_real == user_name_real). \
filter(SetUserPosition.position == position.code).count()
if user_positions_count == 0:
row = self.position_twidget.rowCount()
self.position_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(position.description))
item.setData(Qt.UserRole, position.code)
item.setCheckState(Qt.Checked)
self.position_twidget.setItem(row, 0, item)
for user_position in user_positions:
position = self.db_session.query(ClPositionType). \
filter(ClPositionType.code == user_position.position).one()
row = self.position_twidget.rowCount()
self.position_twidget.insertRow(row)
user_positions_count = self.db_session.query(SetUserPosition). \
filter(SetUserPosition.user_name_real == user_name_real). \
filter(SetUserPosition.position == position.code).count()
item = QTableWidgetItem(u'{0}'.format(position.description))
item.setData(Qt.UserRole, position.code)
if user_positions_count == 0:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.position_twidget.setItem(row, 0, item)
@pyqtSlot()
def on_load_default_settings_button_clicked(self):
self.right_grud_twidget.setRowCount(0)
cur_row = self.user_twidget.currentRow()
item = self.user_twidget.item(cur_row, 0)
user_name_real = item.data(Qt.UserRole)
user = self.db_session.query(SetRole).filter_by(user_name_real = user_name_real).one()
position_code = user.position
position_gruds = self.db_session.query(SetPositionGroupRole). \
filter(SetPositionGroupRole.position == position_code).all()
for position_grud in position_gruds:
group_role = self.db_session.query(ClGroupRole).filter(ClGroupRole.code == position_grud.group_role).one()
row = self.right_grud_twidget.rowCount()
self.right_grud_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(group_role.description))
item.setData(Qt.UserRole, group_role.code)
self.right_grud_twidget.setItem(row, 0, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_view:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 1, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_add:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 2, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_remove:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 3, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_update:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 4, item)
@pyqtSlot(QTableWidgetItem)
def on_settings_position_twidget_itemClicked(self, item):
self.settings_right_grud_twidget.setRowCount(0)
cur_row = self.settings_position_twidget.currentRow()
item = self.settings_position_twidget.item(cur_row, 0)
position_code = item.data(Qt.UserRole)
position_gruds = self.db_session.query(SetPositionGroupRole).\
filter(SetPositionGroupRole.position == position_code).all()
group_roles = self.db_session.query(ClGroupRole).all()
for group_role in group_roles:
position_grud_c = self.db_session.query(SetPositionGroupRole). \
filter(SetPositionGroupRole.position == position_code). \
filter(SetPositionGroupRole.group_role == group_role.code).count()
if position_grud_c == 1:
position_grud = self.db_session.query(SetPositionGroupRole). \
filter(SetPositionGroupRole.position == position_code).\
filter(SetPositionGroupRole.group_role == group_role.code).one()
row = self.settings_right_grud_twidget.rowCount()
self.settings_right_grud_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(group_role.description_en))
item.setData(Qt.UserRole, group_role.code)
self.settings_right_grud_twidget.setItem(row, 0, item)
item = QTableWidgetItem(u'{0}'.format(group_role.description))
item.setData(Qt.UserRole, group_role.code)
self.settings_right_grud_twidget.setItem(row, 1, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_view:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.settings_right_grud_twidget.setItem(row, 2, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_add:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.settings_right_grud_twidget.setItem(row, 3, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_remove:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.settings_right_grud_twidget.setItem(row, 4, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_update:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.settings_right_grud_twidget.setItem(row, 5, item)
else:
row = self.settings_right_grud_twidget.rowCount()
self.settings_right_grud_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(group_role.description_en))
item.setData(Qt.UserRole, group_role.code)
self.settings_right_grud_twidget.setItem(row, 0, item)
item = QTableWidgetItem(u'{0}'.format(group_role.description))
item.setData(Qt.UserRole, group_role.code)
self.settings_right_grud_twidget.setItem(row, 1, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
item.setCheckState(Qt.Unchecked)
self.settings_right_grud_twidget.setItem(row, 2, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
item.setCheckState(Qt.Unchecked)
self.settings_right_grud_twidget.setItem(row, 3, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
item.setCheckState(Qt.Unchecked)
self.settings_right_grud_twidget.setItem(row, 4, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
item.setCheckState(Qt.Unchecked)
self.settings_right_grud_twidget.setItem(row, 5, item)
def __start_fade_out_timer(self):
self.timer = QTimer()
self.timer.timeout.connect(self.__fade_status_message)
self.time_counter = 500
self.timer.start(10)
def __fade_status_message(self):
opacity = int(self.time_counter * 0.5)
self.status_label.setStyleSheet("QLabel {color: rgba(255,0,0," + str(opacity) + ");}")
self.status_label.setText(self.tr('Changes applied successfully.'))
if self.time_counter == 0:
self.timer.stop()
self.time_counter -= 1
def __save_settings(self):
try:
self.__save_right_settings()
self.__save_user_positions()
self.__save_user_right_type()
return True
except exc.SQLAlchemyError, e:
PluginUtils.show_error(self, self.tr("SQL Error"), e.message)
return False
def __save_user_right_type(self):
selected_items = self.user_twidget.selectedItems()
if len(selected_items) == 0:
return
cur_row = self.user_twidget.currentRow()
item = self.user_twidget.item(cur_row, 0)
user_name_real = item.data(Qt.UserRole)
for row in range(self.right_grud_twidget.rowCount()):
check_item = self.right_grud_twidget.item(row, 0)
group_role = check_item.data(Qt.UserRole)
user_right_count = self.db_session.query(SetUserGroupRole).\
filter(SetUserGroupRole.group_role == group_role) .\
filter(SetUserGroupRole.user_name_real == user_name_real).count()
check_view_item = self.right_grud_twidget.item(row, 1)
check_add_item = self.right_grud_twidget.item(row, 2)
check_delete_item = self.right_grud_twidget.item(row, 3)
check_update_item = self.right_grud_twidget.item(row, 4)
if user_right_count == 0:
user_right = SetUserGroupRole()
user_right.user_name_real = user_name_real
user_right.group_role = group_role
if check_view_item.checkState() == Qt.Checked:
user_right.r_view = True
else:
user_right.r_view = False
if check_add_item.checkState() == Qt.Checked:
user_right.r_add = True
else:
user_right.r_add = False
if check_delete_item.checkState() == Qt.Checked:
user_right.r_remove = True
else:
user_right.r_remove = False
if check_update_item.checkState() == Qt.Checked:
user_right.r_update = True
else:
user_right.r_update = False
self.db_session.add(user_right)
else:
if user_right_count == 1:
user_right = self.db_session.query(SetUserGroupRole). \
filter(SetUserGroupRole.group_role == group_role). \
filter(SetUserGroupRole.user_name_real == user_name_real).one()
if check_view_item.checkState() == Qt.Checked:
user_right.r_view = True
else:
user_right.r_view = False
if check_add_item.checkState() == Qt.Checked:
user_right.r_add = True
else:
user_right.r_add = False
if check_delete_item.checkState() == Qt.Checked:
user_right.r_remove = True
else:
user_right.r_remove = False
if check_update_item.checkState() == Qt.Checked:
user_right.r_update = True
else:
user_right.r_update = False
def __save_user_positions(self):
selected_items = self.user_twidget.selectedItems()
if len(selected_items) == 0:
return
cur_row = self.user_twidget.currentRow()
item = self.user_twidget.item(cur_row, 0)
user_name_real = item.data(Qt.UserRole)
for row in range(self.position_twidget.rowCount()):
check_item = self.position_twidget.item(row, 0)
position_code = check_item.data(Qt.UserRole)
user_positions_count = self.db_session.query(SetUserPosition).\
filter(SetUserPosition.position == position_code) .\
filter(SetUserPosition.user_name_real == user_name_real).count()
if check_item.checkState() == Qt.Checked:
if user_positions_count == 0:
user_position = SetUserPosition()
user_position.user_name_real = user_name_real
user_position.position = position_code
self.db_session.add(user_position)
else:
if user_positions_count == 1:
self.db_session.query(SetUserPosition). \
filter(SetUserPosition.position == position_code). \
filter(SetUserPosition.user_name_real == user_name_real).delete()
def __save_right_settings(self):
selected_items = self.settings_position_twidget.selectedItems()
if len(selected_items) == 0:
return
cur_row = self.settings_position_twidget.currentRow()
item = self.settings_position_twidget.item(cur_row, 0)
position_code = item.data(Qt.UserRole)
for row in range(self.settings_right_grud_twidget.rowCount()):
group_role = self.settings_right_grud_twidget.item(row, 0).data(Qt.UserRole)
position_gruds_c = self.db_session.query(SetPositionGroupRole). \
filter(SetPositionGroupRole.position == position_code). \
filter(SetPositionGroupRole.group_role == group_role).count()
if position_gruds_c == 1:
position_gruds = self.db_session.query(SetPositionGroupRole).\
filter(SetPositionGroupRole.position == position_code). \
filter(SetPositionGroupRole.group_role == group_role).one()
check_view_item = self.settings_right_grud_twidget.item(row, 2)
check_add_item = self.settings_right_grud_twidget.item(row, 3)
check_delete_item = self.settings_right_grud_twidget.item(row, 4)
check_update_item = self.settings_right_grud_twidget.item(row, 5)
if check_view_item.checkState() == Qt.Checked:
position_gruds.r_view = True
else:
position_gruds.r_view = False
if check_add_item.checkState() == Qt.Checked:
position_gruds.r_add = True
else:
position_gruds.r_add = False
if check_delete_item.checkState() == Qt.Checked:
position_gruds.r_remove = True
else:
position_gruds.r_remove = False
if check_update_item.checkState() == Qt.Checked:
position_gruds.r_update = True
else:
position_gruds.r_update = False
else:
position_gruds = SetPositionGroupRole()
position_gruds.group_role = group_role
position_gruds.position = position_code
check_view_item = self.settings_right_grud_twidget.item(row, 2)
check_add_item = self.settings_right_grud_twidget.item(row, 3)
check_delete_item = self.settings_right_grud_twidget.item(row, 4)
check_update_item = self.settings_right_grud_twidget.item(row, 5)
if check_view_item.checkState() == Qt.Checked:
position_gruds.r_view = True
else:
position_gruds.r_view = False
if check_add_item.checkState() == Qt.Checked:
position_gruds.r_add = True
else:
position_gruds.r_add = False
if check_delete_item.checkState() == Qt.Checked:
position_gruds.r_remove = True
else:
position_gruds.r_remove = False
if check_update_item.checkState() == Qt.Checked:
position_gruds.r_update = True
else:
position_gruds.r_update = False
self.db_session.add(position_gruds)
@pyqtSlot()
def on_apply_button_clicked(self):
if not self.__save_settings():
return
self.db_session.commit()
self.__start_fade_out_timer()
| [
"[email protected]"
]
| |
5505cd4011c837c9e22cf9e9d81addb8442e050d | 11cd362cdd78c2fc48042ed203614b201ac94aa6 | /apps/oozie/src/oozie/migrations/0005_initial.py | 2688a433ed8dcc89995fc5f9b23a9defb2088449 | [
"CC-BY-3.0",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-unknown-license-reference",
"ZPL-2.0",
"Unlicense",
"LGPL-3.0-only",
"CC0-1.0",
"LicenseRef-scancode-other-permissive",
"CNRI-Python",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-2.0-or-later",
"Python-2.0",
"GPL-3.0-only",
"CC-BY-4.0",
"LicenseRef-scancode-jpython-1.1",
"AFL-2.1",
"JSON",
"WTFPL",
"MIT",
"LicenseRef-scancode-generic-exception",
"LicenseRef-scancode-jython",
"GPL-3.0-or-later",
"LicenseRef-scancode-python-cwi",
"BSD-3-Clause",
"LGPL-3.0-or-later",
"Zlib",
"LicenseRef-scancode-free-unknown",
"Classpath-exception-2.0",
"LicenseRef-scancode-proprietary-license",
"GPL-1.0-or-later",
"LGPL-2.0-or-later",
"MPL-2.0",
"ISC",
"GPL-2.0-only",
"ZPL-2.1",
"BSL-1.0",
"Apache-2.0",
"LGPL-2.0-only",
"LicenseRef-scancode-public-domain",
"Xnet",
"BSD-2-Clause"
]
| permissive | cloudera/hue | b42343d0e03d2936b5a9a32f8ddb3e9c5c80c908 | dccb9467675c67b9c3399fc76c5de6d31bfb8255 | refs/heads/master | 2023-08-31T06:49:25.724501 | 2023-08-28T20:45:00 | 2023-08-28T20:45:00 | 732,593 | 5,655 | 2,244 | Apache-2.0 | 2023-09-14T03:05:41 | 2010-06-21T19:46:51 | JavaScript | UTF-8 | Python | false | false | 1,402 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-06 18:55
from __future__ import unicode_literals
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('oozie', '0004_initial'),
]
operations = [
migrations.AddField(
model_name='link',
name='child',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parent_node', to='oozie.Node', verbose_name=b''),
),
migrations.AddField(
model_name='link',
name='parent',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='child_node', to='oozie.Node'),
),
migrations.AddField(
model_name='job',
name='owner',
field=models.ForeignKey(help_text='Person who can modify the job.', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Owner'),
),
migrations.AddField(
model_name='history',
name='job',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oozie.Job'),
),
]
| [
"[email protected]"
]
| |
5ba300fb8fe455146525b436819e316a5e780da1 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2811/61132/294777.py | 4cbb3ce02f1703d0fb35813ef04ff2bc5e50a6e3 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 237 | py | p,n=map(int,input().split())
l=[]
for i in range(n):
l.append(int(input()))
dic={}
for pos,i in enumerate(l):
key=i%p
if dic.get(key,'')=='':
print(pos+1)
break
else:
dic[key]=i
else:
print(-1) | [
"[email protected]"
]
| |
e400e3f7cfee1b0808a278fe8e94120ceb12437e | 692b907d07eee8ce3ee32a1fda74b6d92fd6c548 | /tests/models/validators/v1_3_0/jsd_d9bdb9034df99dba.py | 4f3ce36a7f1b34bd26fe19e07e1dc62094323ae1 | [
"MIT"
]
| permissive | AltusConsulting/dnacentercli | 04c9c7d00b25753a26c643994388dd4e23bf4c54 | 26ea46fdbd40fc30649ea1d8803158655aa545aa | refs/heads/master | 2022-12-16T04:50:30.076420 | 2020-07-17T22:12:39 | 2020-07-17T22:12:39 | 212,206,213 | 0 | 0 | MIT | 2022-12-08T06:39:49 | 2019-10-01T21:50:42 | Python | UTF-8 | Python | false | false | 2,451 | py | # -*- coding: utf-8 -*-
"""DNA Center Get Site Count data model.
Copyright (c) 2019 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import fastjsonschema
import json
from dnacentersdk.exceptions import MalformedRequest
from builtins import *
class JSONSchemaValidatorD9BdB9034Df99Dba(object):
"""Get Site Count request schema definition."""
def __init__(self):
super(JSONSchemaValidatorD9BdB9034Df99Dba, self).__init__()
self._validator = fastjsonschema.compile(json.loads(
'''{
"properties": {
"response": {
"description":
"Response",
"type": [
"string",
"null"
]
},
"version": {
"description":
"Version",
"type": [
"string",
"null"
]
}
},
"type": "object"
}'''.replace("\n" + ' ' * 16, '')
))
def validate(self, request):
try:
self._validator(request)
except fastjsonschema.exceptions.JsonSchemaException as e:
raise MalformedRequest(
'{} is invalid. Reason: {}'.format(request, e.message)
)
| [
"[email protected]"
]
| |
038769006e9dcbff4aa1248ab9f5b7c86a38959a | 5cd04ee165edb98c80fdfab4ca2ceaf3352f3a60 | /cflearn/models/ddr/loss.py | 4ae8915ad1fab6995fceed631a5eb62fe2106b0f | [
"MIT"
]
| permissive | adbmd/carefree-learn | f99e620ead71e15d7e91c0a74bb564e05afa8ba5 | 10970de9e9b96673f56104bf410bbd4927e86334 | refs/heads/master | 2022-12-21T07:48:28.780174 | 2020-08-01T02:37:23 | 2020-08-01T02:37:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,539 | py | import torch
import logging
import torch.nn as nn
from typing import *
from cftool.ml import Anneal
from cftool.misc import LoggingMixin
from ...misc.toolkit import tensor_dict_type
from ...modules.auxiliary import MTL
class DDRLoss(nn.Module, LoggingMixin):
def __init__(self,
config: Dict[str, Any],
device: torch.device):
super().__init__()
self._joint_training = config["joint_training"]
self._use_dynamic_dual_loss_weights = config["use_dynamic_weights"]
self._use_anneal, self._anneal_step = config["use_anneal"], config["anneal_step"]
self._median_pressure = config.setdefault("median_pressure", 3.)
self._median_pressure_inv = 1. / self._median_pressure
self.mtl = MTL(16, config["mtl_method"])
self._target_loss_warned = False
self._zero = torch.zeros([1], dtype=torch.float32).to(device)
if self._use_anneal:
anneal_config = config.setdefault("anneal_config", {})
anneal_methods = anneal_config.setdefault("methods", {})
anneal_ratios = anneal_config.setdefault("ratios", {})
anneal_floors = anneal_config.setdefault("floors", {})
anneal_ceilings = anneal_config.setdefault("ceilings", {})
default_anneal_methods = {
"median_anneal": "linear", "main_anneal": "linear",
"monotonous_anneal": "sigmoid", "anchor_anneal": "linear",
"dual_anneal": "sigmoid", "recover_anneal": "sigmoid", "pressure_anneal": "sigmoid"
}
default_anneal_ratios = {
"median_anneal": 0.25, "main_anneal": 0.25,
"monotonous_anneal": 0.2, "anchor_anneal": 0.2,
"dual_anneal": 0.75, "recover_anneal": 0.75, "pressure_anneal": 0.5
}
default_anneal_floors = {
"median_anneal": 1., "main_anneal": 0.,
"monotonous_anneal": 0., "anchor_anneal": 0.,
"dual_anneal": 0., "recover_anneal": 0., "pressure_anneal": 0.
}
default_anneal_ceilings = {
"median_anneal": 2.5, "main_anneal": 0.8,
"monotonous_anneal": 2.5, "anchor_anneal": 2.,
"dual_anneal": 0.1, "recover_anneal": 0.1, "pressure_anneal": 1.,
}
for anneal in default_anneal_methods:
anneal_methods.setdefault(anneal, default_anneal_methods[anneal])
anneal_ratios.setdefault(anneal, default_anneal_ratios[anneal])
anneal_floors.setdefault(anneal, default_anneal_floors[anneal])
anneal_ceilings.setdefault(anneal, default_anneal_ceilings[anneal])
for anneal in default_anneal_methods:
attr = f"_{anneal}"
if anneal_methods[anneal] is None:
setattr(self, attr, None)
else:
setattr(self, attr, Anneal(
anneal_methods[anneal], round(self._anneal_step * anneal_ratios[anneal]),
anneal_floors[anneal], anneal_ceilings[anneal]
))
def forward(self,
predictions: tensor_dict_type,
target: torch.Tensor,
*,
check_monotonous_only: bool = False) -> Tuple[torch.Tensor, tensor_dict_type]:
# anneal
if not self._use_anneal or not self.training or check_monotonous_only:
main_anneal = median_anneal = None
monotonous_anneal = anchor_anneal = None
dual_anneal = recover_anneal = pressure_anneal = None
else:
main_anneal = None if self._main_anneal is None else self._main_anneal.pop()
median_anneal = None if self._median_anneal is None else self._median_anneal.pop()
monotonous_anneal = None if self._monotonous_anneal is None else self._monotonous_anneal.pop()
anchor_anneal = None if self._median_anneal is None else self._anchor_anneal.pop()
dual_anneal = None if self._median_anneal is None else self._dual_anneal.pop()
recover_anneal = None if self._median_anneal is None else self._recover_anneal.pop()
pressure_anneal = None if self._pressure_anneal is None else self._pressure_anneal.pop()
self._last_main_anneal, self._last_pressure_anneal = main_anneal, pressure_anneal
if self._use_anneal and check_monotonous_only:
main_anneal, pressure_anneal = self._last_main_anneal, self._last_pressure_anneal
# median
median = predictions["predictions"]
median_loss = nn.functional.l1_loss(median, target)
if median_anneal is not None:
median_loss = median_loss * median_anneal
# get
anchor_batch, cdf_raw = map(predictions.get, ["anchor_batch", "cdf_raw"])
sampled_anchors, sampled_cdf_raw = map(predictions.get, ["sampled_anchors", "sampled_cdf_raw"])
quantile_batch, median_residual, quantile_residual, quantile_sign = map(
predictions.get, ["quantile_batch", "median_residual", "quantile_residual", "quantile_sign"])
sampled_quantiles, sampled_quantile_residual = map(
predictions.get, ["sampled_quantiles", "sampled_quantile_residual"])
cdf_gradient, quantile_residual_gradient = map(
predictions.get, ["cdf_gradient", "quantile_residual_gradient"])
dual_quantile, quantile_cdf_raw = map(predictions.get, ["dual_quantile", "quantile_cdf_raw"])
dual_cdf, cdf_quantile_residual = map(predictions.get, ["dual_cdf", "cdf_quantile_residual"])
# cdf
fetch_cdf = cdf_raw is not None
cdf_anchor_loss = cdf_monotonous_loss = None
if not fetch_cdf or check_monotonous_only:
cdf_loss = cdf_losses = None
else:
cdf_losses = self._get_cdf_loss(target, cdf_raw, anchor_batch, False)
if main_anneal is not None:
cdf_losses = cdf_losses * main_anneal
cdf_loss = cdf_losses.mean()
if sampled_cdf_raw is not None:
cdf_anchor_loss = self._get_cdf_loss(target, sampled_cdf_raw, sampled_anchors, True)
if anchor_anneal is not None:
cdf_anchor_loss = cdf_anchor_loss * anchor_anneal
# cdf monotonous
if cdf_gradient is not None:
cdf_monotonous_loss = nn.functional.relu(-cdf_gradient).mean()
if anchor_anneal is not None:
cdf_monotonous_loss = cdf_monotonous_loss * monotonous_anneal
# quantile
fetch_quantile = quantile_residual is not None
quantile_anchor_loss = quantile_monotonous_loss = None
if not fetch_quantile or check_monotonous_only:
median_residual_loss = quantile_loss = quantile_losses = None
else:
target_median_residual = target - predictions["median_detach"]
median_residual_loss = self._get_median_residual_loss(
target_median_residual, median_residual, quantile_sign)
if anchor_anneal is not None:
median_residual_loss = median_residual_loss * anchor_anneal
quantile_losses = self._get_quantile_residual_loss(
target_median_residual, quantile_residual, quantile_batch, False)
quantile_loss = quantile_losses.mean() + median_residual_loss
if main_anneal is not None:
quantile_loss = quantile_loss * main_anneal
if sampled_quantile_residual is not None:
quantile_anchor_loss = self._get_quantile_residual_loss(
target_median_residual, sampled_quantile_residual,
sampled_quantiles, True
)
if anchor_anneal is not None:
quantile_anchor_loss = quantile_anchor_loss * anchor_anneal
# median pressure
if not fetch_quantile:
median_pressure_loss = None
else:
median_pressure_loss = self._get_median_pressure_loss(predictions)
if pressure_anneal is not None:
median_pressure_loss = median_pressure_loss * pressure_anneal
# quantile monotonous
quantile_monotonous_losses = []
if quantile_residual_gradient is not None:
quantile_monotonous_losses.append(nn.functional.relu(-quantile_residual_gradient).mean())
if median_residual is not None and quantile_sign is not None:
quantile_monotonous_losses.append(
self._get_median_residual_monotonous_loss(median_residual, quantile_sign))
if quantile_monotonous_losses:
quantile_monotonous_loss = sum(quantile_monotonous_losses)
if anchor_anneal is not None:
quantile_monotonous_loss = quantile_monotonous_loss * monotonous_anneal
# dual
if not self._joint_training or not fetch_cdf or not fetch_quantile or check_monotonous_only:
dual_cdf_loss = dual_quantile_loss = None
cdf_recover_loss = quantile_recover_loss = None
else:
# dual cdf (cdf -> quantile [recover loss] -> cdf [dual loss])
quantile_recover_loss, quantile_recover_losses, quantile_recover_loss_weights = \
self._get_dual_recover_loss(dual_quantile, anchor_batch, cdf_losses)
if quantile_cdf_raw is None:
dual_quantile_loss = None
else:
dual_quantile_losses = self._get_cdf_loss(target, quantile_cdf_raw, anchor_batch, False)
if quantile_recover_losses is None or not self._use_dynamic_dual_loss_weights:
dual_quantile_loss_weights = 1.
else:
quantile_recover_losses_detach = quantile_recover_losses.detach()
dual_quantile_loss_weights = 0.5 * (
quantile_recover_loss_weights + 1 / (1 + 2 * torch.tanh(quantile_recover_losses_detach)))
dual_quantile_loss = (dual_quantile_losses * dual_quantile_loss_weights).mean()
# dual quantile (quantile -> cdf [recover loss] -> quantile [dual loss])
cdf_recover_loss, cdf_recover_losses, cdf_recover_loss_weights = \
self._get_dual_recover_loss(dual_cdf, quantile_batch, quantile_losses)
if cdf_quantile_residual is None:
dual_cdf_loss = None
else:
dual_cdf_losses = self._get_quantile_residual_loss(
target, cdf_quantile_residual, quantile_batch, False)
if cdf_recover_losses is None or not self._use_dynamic_dual_loss_weights:
dual_cdf_loss_weights = 1.
else:
cdf_recover_losses_detach = cdf_recover_losses.detach()
dual_cdf_loss_weights = 0.5 * (
cdf_recover_loss_weights + 1 / (1 + 10 * cdf_recover_losses_detach))
dual_cdf_loss = (dual_cdf_losses * dual_cdf_loss_weights).mean() + median_residual_loss
if dual_anneal is not None:
if dual_cdf_loss is not None:
dual_cdf_loss = dual_cdf_loss * dual_anneal
if dual_quantile_loss is not None:
dual_quantile_loss = dual_quantile_loss * dual_anneal
if recover_anneal is not None:
if cdf_recover_loss is not None:
cdf_recover_loss = cdf_recover_loss * recover_anneal
if quantile_recover_loss is not None:
quantile_recover_loss = quantile_recover_loss * recover_anneal
# combine
if check_monotonous_only:
losses = {}
else:
losses = {"median": median_loss}
if not self._joint_training:
if cdf_anchor_loss is not None:
losses["cdf_anchor"] = cdf_anchor_loss
if quantile_anchor_loss is not None:
losses["quantile_anchor"] = quantile_anchor_loss
else:
if fetch_cdf:
losses["cdf"] = cdf_loss
if cdf_anchor_loss is not None:
losses["cdf_anchor"] = cdf_anchor_loss
if fetch_quantile:
losses["quantile"] = quantile_loss
if quantile_anchor_loss is not None:
losses["quantile_anchor"] = quantile_anchor_loss
if fetch_cdf and fetch_quantile:
losses["quantile_recover"], losses["cdf_recover"] = quantile_recover_loss, cdf_recover_loss
losses["dual_quantile"], losses["dual_cdf"] = dual_quantile_loss, dual_cdf_loss
if median_residual_loss is not None:
losses["median_residual_loss"] = median_residual_loss
if median_pressure_loss is not None:
key = "synthetic_median_pressure_loss" if check_monotonous_only else "median_pressure_loss"
losses[key] = median_pressure_loss
if cdf_monotonous_loss is not None:
key = "synthetic_cdf_monotonous" if check_monotonous_only else "cdf_monotonous"
losses[key] = cdf_monotonous_loss
if quantile_monotonous_loss is not None:
key = "synthetic_quantile_monotonous" if check_monotonous_only else "quantile_monotonous"
losses[key] = quantile_monotonous_loss
if not losses:
return self._zero, {"loss": self._zero}
if not self.mtl.registered:
self.mtl.register(losses.keys())
return self.mtl(losses), losses
def _get_dual_recover_loss(self, dual_prediction, another_input_batch, another_losses):
if dual_prediction is None:
recover_loss = recover_losses = recover_loss_weights = None
else:
recover_losses = torch.abs(another_input_batch - dual_prediction)
if not self._use_dynamic_dual_loss_weights:
recover_loss_weights = 1.
else:
another_losses_detach = another_losses.detach()
recover_loss_weights = 1 / (1 + 2 * torch.tanh(another_losses_detach))
recover_loss = (recover_losses * recover_loss_weights).mean()
return recover_loss, recover_losses, recover_loss_weights
@staticmethod
def _get_cdf_loss(target, cdf_raw, anchor_batch, reduce):
indicative = (target <= anchor_batch).to(torch.float32)
cdf_losses = -indicative * cdf_raw + nn.functional.softplus(cdf_raw)
return cdf_losses if not reduce else cdf_losses.mean()
@staticmethod
def _get_median_residual_monotonous_loss(median_residual, quantile_sign):
return nn.functional.relu(-median_residual * quantile_sign).mean()
@staticmethod
def _get_quantile_residual_loss(target_residual, quantile_residual, quantile_batch, reduce):
quantile_error = target_residual - quantile_residual
quantile_losses = torch.max(quantile_batch * quantile_error, (quantile_batch - 1) * quantile_error)
return quantile_losses if not reduce else quantile_losses.mean()
def _get_median_residual_loss(self, target_median_residual, median_residual, quantile_sign):
same_sign_mask = quantile_sign * torch.sign(target_median_residual) > 0
tmr, mr = map(lambda tensor: tensor[same_sign_mask], [target_median_residual, median_residual])
median_residual_mae = self._median_pressure * torch.abs(tmr - mr).mean()
residual_monotonous_loss = DDRLoss._get_median_residual_monotonous_loss(median_residual, quantile_sign)
return median_residual_mae + residual_monotonous_loss
def _get_median_pressure_loss(self, predictions):
pressure_pos_dict, pressure_neg_dict = map(
predictions.get, map(lambda attr: f"pressure_sub_quantile_{attr}_dict", ["pos", "neg"]))
additive_pos, additive_neg = pressure_pos_dict["add"], pressure_neg_dict["add"]
multiply_pos, multiply_neg = pressure_pos_dict["mul"], pressure_neg_dict["mul"]
# additive net & multiply net are tend to be zero here
# because median pressure batch receives 0.5 as input
return sum(
torch.max(
-self._median_pressure * sub_quantile,
self._median_pressure_inv * sub_quantile
).mean()
for sub_quantile in [
additive_pos, -additive_neg,
multiply_pos, multiply_neg
]
)
__all__ = ["DDRLoss"]
| [
"[email protected]"
]
| |
0628946d4e9a280e8355cd0413d75bd4a43845dc | 84e5297e214dd94105df7bbe627a506773d70224 | /Assignment2/dnn_tf.py | 478f858ded57e45f0034d15cb734f6130922bf28 | []
| no_license | toannguyen1904/VietAI-ML-Foundation-5 | b02b1463d0b820088fa7400112d41d4291357172 | 5adcd49c88e4c886b15973254d56c07c15a8660d | refs/heads/master | 2022-05-16T10:27:27.570181 | 2020-03-16T05:37:58 | 2020-03-16T05:37:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,419 | py | """dnn_tf_sol.py
Solution of deep neural network implementation using tensorflow
Author: Kien Huynh
"""
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from util import *
from dnn_np import test
import pdb
def bat_classification():
# Load data from file
# Make sure that bat.dat is in data/
train_x, train_y, test_x, test_y = get_bat_data()
train_x, _, test_x = normalize(train_x, train_x, test_x)
test_y = test_y.flatten().astype(np.int32)
train_y = train_y.flatten().astype(np.int32)
num_class = (np.unique(train_y)).shape[0]
# DNN parameters
hidden_layers = [100, 100, 100]
learning_rate = 0.01
batch_size = 200
steps = 2000
# Specify that all features have real-value data
feature_columns = [tf.feature_column.numeric_column("x", shape=[train_x.shape[1]])]
# Available activition functions
# https://www.tensorflow.org/api_guides/python/nn#Activation_Functions
# tf.nn.relu
# tf.nn.elu
# tf.nn.sigmoid
# tf.nn.tanh
activation = tf.nn.relu
# [TODO 1.7] Create a neural network and train it using estimator
# Some available gradient descent optimization algorithms
# https://www.tensorflow.org/api_docs/python/tf/train#classes
# tf.train.GradientDescentOptimizer
# tf.train.AdadeltaOptimizer
# tf.train.AdagradOptimizer
# tf.train.AdagradDAOptimizer
# tf.train.MomentumOptimizer
# tf.train.AdamOptimizer
# tf.train.FtrlOptimizer
# tf.train.ProximalGradientDescentOptimizer
# tf.train.ProximalAdagradOptimizer
# tf.train.RMSPropOptimizer
# Create optimizer
# optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
# optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=0.005)
# build a deep neural network
# https://www.tensorflow.org/api_docs/python/tf/estimator/DNNClassifier
classifier = tf.estimator.DNNClassifier(feature_columns=feature_columns,
hidden_units=hidden_layers,
n_classes=num_class,
activation_fn=activation,
optimizer=optimizer)
# Define the training inputs
# https://www.tensorflow.org/api_docs/python/tf/estimator/inputs/numpy_input_fn
train_input_fn = tf.estimator.inputs.numpy_input_fn(x = {"x": train_x},
y = train_y,
batch_size=batch_size,
shuffle=True,
num_epochs=None)
# Train model.
classifier.train(
input_fn=train_input_fn,
steps=steps)
# Define the test inputs
test_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": test_x},
y=test_y,
num_epochs=1,
shuffle=False)
# Evaluate accuracy.
predict_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": test_x},
num_epochs=1,
shuffle=False)
y_hat = classifier.predict(input_fn=predict_input_fn)
y_hat = list(y_hat)
y_hat = np.asarray([int(x['classes'][0]) for x in y_hat])
test(y_hat, test_y)
def mnist_classification():
# Load data from file
# Make sure that fashion-mnist/*.gz is in data/
train_x, train_y, val_x, val_y, test_x, test_y = get_mnist_data(1)
train_x, val_x, test_x = normalize(train_x, train_x, test_x)
train_y = train_y.flatten().astype(np.int32)
val_y = val_y.flatten().astype(np.int32)
test_y = test_y.flatten().astype(np.int32)
num_class = (np.unique(train_y)).shape[0]
pdb.set_trace()
# DNN parameters
hidden_layers = [100, 100, 100]
learning_rate = 0.01
batch_size = 200
steps = 500
# Specify that all features have real-value data
feature_columns = [tf.feature_column.numeric_column("x", shape=[train_x.shape[1]])]
# Choose activation function
activation = tf.nn.sigmoid
# Some available gradient descent optimization algorithms
# TODO: [YC1.7] Create optimizer
optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=0.005)
# build a deep neural network
classifier = tf.estimator.DNNClassifier(feature_columns=feature_columns,
hidden_units=hidden_layers,
n_classes=num_class,
activation_fn=activation,
optimizer=optimizer)
# Define the training inputs
# https://www.tensorflow.org/api_docs/python/tf/estimator/inputs/numpy_input_fn
train_input_fn = tf.estimator.inputs.numpy_input_fn(x = {"x": train_x},
y = train_y,
batch_size=batch_size,
shuffle=True,
num_epochs=None)
# Train model.
classifier.train(
input_fn=train_input_fn,
steps=steps)
# Define the test inputs
test_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": test_x},
y=test_y,
num_epochs=1,
shuffle=False)
# Evaluate accuracy.
predict_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": test_x},
num_epochs=1,
shuffle=False)
y_hat = classifier.predict(input_fn=predict_input_fn)
y_hat = list(y_hat)
y_hat = np.asarray([int(x['classes'][0]) for x in y_hat])
test(y_hat, test_y)
if __name__ == '__main__':
np.random.seed(2017)
plt.ion()
bat_classification()
mnist_classification()
| [
"[email protected]"
]
| |
6e8f312ce8d26da7d371c9bd295ee0598f010704 | 5cc1296f10af0d65691fd01a23221d6d85f4deff | /cotizacion/migrations/0009_auto_20150805_1400.py | f213b7ccb1a13cf363c1195baf3b10f04e54fea3 | []
| no_license | yusnelvy/mtvmcotizacion | e52b58fe8c50d3921d36490084de328c52e4e9ea | 07d2bd5f36350b149c16a0aa514bb610b0cd3e18 | refs/heads/master | 2016-09-05T23:31:15.800940 | 2015-11-07T13:12:30 | 2015-11-07T13:12:30 | 35,440,629 | 0 | 0 | null | 2015-12-18T16:16:23 | 2015-05-11T18:01:47 | JavaScript | UTF-8 | Python | false | false | 1,033 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cotizacion', '0008_auto_20150727_1207'),
]
operations = [
migrations.AlterField(
model_name='tiempo_carga',
name='peso_max',
field=models.DecimalField(blank=True, default=0.0, max_digits=8, decimal_places=3),
),
migrations.AlterField(
model_name='tiempo_carga',
name='peso_min',
field=models.DecimalField(blank=True, default=0.0, max_digits=8, decimal_places=3),
),
migrations.AlterField(
model_name='vehiculo',
name='capacidad_peso',
field=models.DecimalField(max_digits=8, decimal_places=3),
),
migrations.AlterField(
model_name='vehiculo',
name='capacidad_volumen',
field=models.DecimalField(max_digits=8, decimal_places=3),
),
]
| [
"[email protected]"
]
| |
d510a984109e30d272424766c0f4ceedc20d77e2 | ec5c35ac5163c4e81262a81a6a6c46667c01733d | /server/api.py | dfdfa338713c8c53b8fe3fb180871a407ed32b13 | []
| no_license | kotawiw/bytedance-exercise-2 | 27b32d81aa7e8040c1c8448acbe9c4ff20ff5b26 | 8db190487a6490ec852d8418d93ba62251a5437f | refs/heads/master | 2022-12-24T00:04:53.047395 | 2020-09-23T11:48:13 | 2020-09-23T11:48:13 | 297,948,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,826 | py | from flask import Blueprint, request, abort, jsonify
from flask import g
from server.auth import login_required
from server.models.users import User
from server.models.events import Event
from server.models.events import EventRegistration
bp = Blueprint("api", __name__, url_prefix="/api")
@bp.route("/events", methods=("GET",))
def query_events():
offset = request.args.get("offset", default=0, type=int)
limit = request.args.get("limit", default=10, type=int)
total_count, events = Event.query_events(offset=offset, limit=limit)
return jsonify(
totalCount=total_count,
values=[event_output(e) for e in events])
@bp.route("/events", methods=("POST",))
@login_required
def create_event():
user = g.user
event = Event.create(
user, request.json
)
return event_output(event)
@bp.route("/event/<string:event_id>", methods=("GET",))
def get_event(event_id):
event = Event.by_identifier(event_id)
if not event:
return abort(404, 'Event not found')
return event_output(event)
@bp.route("/event/<string:event_id>/registrations", methods=("GET",))
def get_registrations(event_id):
event = Event.by_identifier(event_id)
if not event:
return abort(404, 'Event not found')
registrations = EventRegistration.by_event(event)
return jsonify([registration_output(r) for r in registrations])
@bp.route("/event/<string:event_id>/registrations", methods=("PUT",))
def register_event(event_id):
event = Event.by_identifier(event_id)
if not event:
return abort(404, 'Event not found')
user = g.user
if not user:
return abort(401, 'Please login to register for an event')
register = EventRegistration.register(event, user)
return registration_output(register)
@bp.route("/event/<string:event_id>/registrations", methods=("DELETE",))
def unregister_event(event_id):
user = g.user
if not user:
return abort(401, 'Please login to unregister for an event')
event = Event.by_identifier(event_id)
if not event:
return abort(404, 'Event not found')
register = EventRegistration.by_event_user(event, user)
if not register:
return abort(404, 'Event registration not found')
EventRegistration.unregister(register)
return registration_output(register)
def event_output(event: Event):
return dict(
id=event.identifier,
name=event.name,
location=event.location,
description=event.description,
startTimestamp=event.start_timestamp,
endTimestamp=event.end_timestamp)
def registration_output(registration: EventRegistration):
# Todo: De-normalize registration info to include user email
user = User.query.get(registration.user_id)
return dict(
email=user.email
)
| [
"[email protected]"
]
| |
b9950dd4f6bb688de78a9a92c88f0ae70755ed6e | 8f6a9ff4c63fd24d145088077d5da1c3e4caaa3a | /programming trade/easyhistory - download 备份修改/easyhistroy/history.py | ea5e4c058c768ff89e5b70d20e111adb96f0d2fc | []
| no_license | liaofuwei/pythoncoding | 6fd2afba0d27c4a4bbb4b2d321b3fa402a60d6fe | 966bd99459be933cf48287412a40e0c7a3d0b8e5 | refs/heads/master | 2021-07-15T10:34:57.701528 | 2017-10-10T05:27:13 | 2017-10-10T05:27:13 | 107,651,470 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,621 | py | # coding:utf-8
import os
import pandas as pd
import talib
class Indicator(object):
def __init__(self, stock_code, history):
self.stock_code = stock_code
self.history = history
def load_csv_files(self, path):
file_list = [f for f in os.listdir(path) if f.endswith('.csv')]
for stock_csv in file_list:
csv_ext_index_start = -4
stock_code = stock_csv[:csv_ext_index_start]
self.market[stock_code] = pd.read_csv(stock_csv, index_col='date')
def __getattr__(self, item):
def talib_func(*args, **kwargs):
str_args = ''.join(map(str, args))
if self.history.get(item + str_args) is not None:
return self.history
func = getattr(talib, item)
res_arr = func(self.history['close'].values, *args, **kwargs)
self.history[item + str_args] = res_arr
return self.history
return talib_func
class History(object):
def __init__(self, dtype='D', path='history'):
self.market = dict()
data_path = os.path.join(path, 'day', 'data')
self.load_csv_files(data_path)
def load_csv_files(self, path):
file_list = [f for f in os.listdir(path) if f.endswith('.csv')]
for stock_csv in file_list:
csv_ext_index_start = -4
stock_code = stock_csv[:csv_ext_index_start]
csv_path = os.path.join(path, stock_csv)
self.market[stock_code] = Indicator(stock_code, pd.read_csv(csv_path, index_col='date'))
def __getitem__(self, item):
return self.market[item]
| [
"[email protected]"
]
| |
c02a678107f5e807bc54b95fb1bc038e46931756 | f338eb32c45d8d5d002a84798a7df7bb0403b3c4 | /DQM/DTMonitorModule/test/DTkFactValidation_1_TEMPL_cfg.py | 28873b4aebd3900356c5f720350f92f2c2e3d464 | []
| permissive | wouf/cmssw | 0a8a8016e6bebc611f1277379e12bef130464afb | 60da16aec83a0fc016cca9e2a5ed0768ba3b161c | refs/heads/CMSSW_7_3_X | 2022-06-30T04:35:45.380754 | 2015-05-08T17:40:17 | 2015-05-08T17:40:17 | 463,028,972 | 0 | 0 | Apache-2.0 | 2022-02-24T06:05:30 | 2022-02-24T06:05:26 | null | UTF-8 | Python | false | false | 3,607 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("PROD")
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring('resolutionTest_step1',
'resolutionTest_step2',
'resolutionTest_step3'),
cout = cms.untracked.PSet(
threshold = cms.untracked.string('ERROR'),
default = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
resolution = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
noLineBreaks = cms.untracked.bool(True)
),
categories = cms.untracked.vstring('resolution'),
destinations = cms.untracked.vstring('cout')
)
process.load("Configuration.StandardSequences.Geometry_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.GlobalTag.globaltag = "GLOBALTAGTEMPLATE"
process.load("CondCore.DBCommon.CondDBSetup_cfi")
process.load("DQMServices.Core.DQM_cfg")
process.load("RecoLocalMuon.Configuration.RecoLocalMuonCosmics_cff")
process.source = cms.Source("PoolSource",
debugFlag = cms.untracked.bool(True),
debugVebosity = cms.untracked.uint32(10),
fileNames = cms.untracked.vstring()
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.calibDB = cms.ESSource("PoolDBESSource",
process.CondDBSetup,
timetype = cms.string('runnumber'),
toGet = cms.VPSet(cms.PSet(
record = cms.string('DTTtrigRcd'),
tag = cms.string('ttrig')
)),
connect = cms.string('sqlite_file:/afs/cern.ch/cms/CAF/CMSALCA/ALCA_MUONCALIB/DTCALIB/RUNPERIODTEMPLATE/ttrig/ttrig_DUMPDBTEMPL_RUNNUMBERTEMPLATE.db'),
authenticationMethod = cms.untracked.uint32(0)
)
process.es_prefer_calibDB = cms.ESPrefer('PoolDBESSource','calibDB')
# if read from RAW
process.load("EventFilter.DTRawToDigi.dtunpacker_cfi")
process.eventInfoProvider = cms.EDFilter("EventCoordinatesSource",
eventInfoFolder = cms.untracked.string('EventInfo/')
)
process.DTkFactValidation = cms.EDAnalyzer("DTCalibValidation",
# Write the histos on file
OutputMEsInRootFile = cms.bool(True),
# Lable to retrieve 2D segments from the event
segment2DLabel = cms.untracked.string('dt2DSegments'),
OutputFileName = cms.string('residuals.root'),
# Lable to retrieve 4D segments from the event
segment4DLabel = cms.untracked.string('dt4DSegments'),
debug = cms.untracked.bool(False),
# Lable to retrieve RecHits from the event
recHits1DLabel = cms.untracked.string('dt1DRecHits')
)
process.FEVT = cms.OutputModule("PoolOutputModule",
outputCommands = cms.untracked.vstring('drop *',
'keep *_MEtoEDMConverter_*_*'),
fileName = cms.untracked.string('DQM.root')
)
process.load("DQMServices.Components.MEtoEDMConverter_cff")
process.dummyProducer = cms.EDProducer("ThingWithMergeProducer")
# if read from RAW
#process.firstStep = cms.Sequence(process.muonDTDigis*process.dt1DRecHits*process.dt2DSegments*process.dt4DSegments*process.DTkFactValidation)
process.firstStep = cms.Sequence(process.dummyProducer + process.muonDTDigis*process.dt1DRecHits*process.dt2DSegments*process.dt4DSegments*process.DTkFactValidation*process.MEtoEDMConverter)
#process.firstStep = cms.Sequence(process.dummyProducer + process.dt1DRecHits*process.dt2DSegments*process.dt4DSegments*process.DTkFactValidation*process.MEtoEDMConverter)
process.p = cms.Path(process.firstStep)
process.outpath = cms.EndPath(process.FEVT)
process.DQM.collectorHost = ''
| [
"[email protected]"
]
| |
5d009ec1750156835ab05bd369cef58aeaed239e | b4c93bad8ccc9007a7d3e7e1d1d4eb8388f6e988 | /farmercoupon/migrations/0048_auto_20210322_1046.py | 45f4e7b616e00e32a923afc76da686935d36cabb | []
| no_license | flashdreiv/fis | 39b60c010d0d989a34c01b39ea88f7fc3be0a87d | b93277785d6ad113a90a011f7c43b1e3e9209ec5 | refs/heads/main | 2023-04-02T12:46:32.249800 | 2021-03-31T00:27:29 | 2021-03-31T00:27:29 | 343,431,800 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 748 | py | # Generated by Django 3.1.7 on 2021-03-22 02:46
from django.db import migrations, models
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('farmercoupon', '0047_auto_20210321_1524'),
]
operations = [
migrations.AddField(
model_name='farmer',
name='crop',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[(1, 'Item title 2.1'), (2, 'Item title 2.2'), (3, 'Item title 2.3'), (4, 'Item title 2.4'), (5, 'Item title 2.5')], max_length=9, null=True),
),
migrations.AddField(
model_name='farmer',
name='land_area',
field=models.IntegerField(default=0),
),
]
| [
"[email protected]"
]
| |
cccbb148040f217b8a624f39a07f85f4fb552de4 | 433ada0b349e8a68dd85a5af047b90d23aee44c9 | /include/ClientCaches.py | f4a6eb45028de815aa1b2763dfac4061d03724d5 | [
"WTFPL"
]
| permissive | 3wayHimself/hydrus | 7ddfe3507ad2b3e9dc4ab69cb9c6e25efc06c5aa | 804ffe8cecfe01bdb9518070d31dbf826b72e8ef | refs/heads/master | 2020-03-23T04:37:53.849078 | 2018-07-11T20:23:51 | 2018-07-11T20:23:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 106,375 | py | import ClientDefaults
import ClientDownloading
import ClientParsing
import ClientPaths
import ClientRendering
import ClientSearch
import ClientServices
import ClientThreading
import HydrusConstants as HC
import HydrusExceptions
import HydrusFileHandling
import HydrusPaths
import HydrusSerialisable
import HydrusSessions
import HydrusThreading
import itertools
import json
import os
import random
import requests
import threading
import time
import urllib
import wx
import HydrusData
import ClientData
import ClientConstants as CC
import HydrusGlobals as HG
import collections
import HydrusTags
import traceback
# important thing here, and reason why it is recursive, is because we want to preserve the parent-grandparent interleaving
def BuildServiceKeysToChildrenToParents( service_keys_to_simple_children_to_parents ):
def AddParents( simple_children_to_parents, children_to_parents, child, parents ):
for parent in parents:
if parent not in children_to_parents[ child ]:
children_to_parents[ child ].append( parent )
if parent in simple_children_to_parents:
grandparents = simple_children_to_parents[ parent ]
AddParents( simple_children_to_parents, children_to_parents, child, grandparents )
service_keys_to_children_to_parents = collections.defaultdict( HydrusData.default_dict_list )
for ( service_key, simple_children_to_parents ) in service_keys_to_simple_children_to_parents.items():
children_to_parents = service_keys_to_children_to_parents[ service_key ]
for ( child, parents ) in simple_children_to_parents.items():
AddParents( simple_children_to_parents, children_to_parents, child, parents )
return service_keys_to_children_to_parents
def BuildServiceKeysToSimpleChildrenToParents( service_keys_to_pairs_flat ):
service_keys_to_simple_children_to_parents = collections.defaultdict( HydrusData.default_dict_set )
for ( service_key, pairs ) in service_keys_to_pairs_flat.items():
service_keys_to_simple_children_to_parents[ service_key ] = BuildSimpleChildrenToParents( pairs )
return service_keys_to_simple_children_to_parents
def BuildSimpleChildrenToParents( pairs ):
simple_children_to_parents = HydrusData.default_dict_set()
for ( child, parent ) in pairs:
if child == parent:
continue
if LoopInSimpleChildrenToParents( simple_children_to_parents, child, parent ): continue
simple_children_to_parents[ child ].add( parent )
return simple_children_to_parents
def CollapseTagSiblingPairs( groups_of_pairs ):
# This now takes 'groups' of pairs in descending order of precedence
# This allows us to mandate that local tags take precedence
# a pair is invalid if:
# it causes a loop (a->b, b->c, c->a)
# there is already a relationship for the 'bad' sibling (a->b, a->c)
valid_chains = {}
for pairs in groups_of_pairs:
pairs = list( pairs )
pairs.sort()
for ( bad, good ) in pairs:
if bad == good:
# a->a is a loop!
continue
if bad not in valid_chains:
we_have_a_loop = False
current_best = good
while current_best in valid_chains:
current_best = valid_chains[ current_best ]
if current_best == bad:
we_have_a_loop = True
break
if not we_have_a_loop:
valid_chains[ bad ] = good
# now we collapse the chains, turning:
# a->b, b->c ... e->f
# into
# a->f, b->f ... e->f
siblings = {}
for ( bad, good ) in valid_chains.items():
# given a->b, want to find f
if good in siblings:
# f already calculated and added
best = siblings[ good ]
else:
# we don't know f for this chain, so let's figure it out
current_best = good
while current_best in valid_chains:
current_best = valid_chains[ current_best ] # pursue endpoint f
best = current_best
# add a->f
siblings[ bad ] = best
return siblings
def LoopInSimpleChildrenToParents( simple_children_to_parents, child, parent ):
potential_loop_paths = { parent }
while len( potential_loop_paths.intersection( simple_children_to_parents.keys() ) ) > 0:
new_potential_loop_paths = set()
for potential_loop_path in potential_loop_paths.intersection( simple_children_to_parents.keys() ):
new_potential_loop_paths.update( simple_children_to_parents[ potential_loop_path ] )
potential_loop_paths = new_potential_loop_paths
if child in potential_loop_paths: return True
return False
class ClientFilesManager( object ):
def __init__( self, controller ):
self._controller = controller
self._lock = threading.Lock()
self._prefixes_to_locations = {}
self._bad_error_occured = False
self._missing_locations = set()
self._Reinit()
def _GenerateExpectedFilePath( self, hash, mime ):
hash_encoded = hash.encode( 'hex' )
prefix = 'f' + hash_encoded[:2]
location = self._prefixes_to_locations[ prefix ]
path = os.path.join( location, prefix, hash_encoded + HC.mime_ext_lookup[ mime ] )
return path
def _GenerateExpectedFullSizeThumbnailPath( self, hash ):
hash_encoded = hash.encode( 'hex' )
prefix = 't' + hash_encoded[:2]
location = self._prefixes_to_locations[ prefix ]
path = os.path.join( location, prefix, hash_encoded ) + '.thumbnail'
return path
def _GenerateExpectedResizedThumbnailPath( self, hash ):
hash_encoded = hash.encode( 'hex' )
prefix = 'r' + hash_encoded[:2]
location = self._prefixes_to_locations[ prefix ]
path = os.path.join( location, prefix, hash_encoded ) + '.thumbnail.resized'
return path
def _GenerateFullSizeThumbnail( self, hash, mime = None ):
if mime is None:
try:
file_path = self._LookForFilePath( hash )
except HydrusExceptions.FileMissingException:
raise HydrusExceptions.FileMissingException( 'The thumbnail for file ' + hash.encode( 'hex' ) + ' was missing. It could not be regenerated because the original file was also missing. This event could indicate hard drive corruption or an unplugged external drive. Please check everything is ok.' )
mime = HydrusFileHandling.GetMime( file_path )
else:
file_path = self._GenerateExpectedFilePath( hash, mime )
try:
percentage_in = self._controller.new_options.GetInteger( 'video_thumbnail_percentage_in' )
thumbnail = HydrusFileHandling.GenerateThumbnail( file_path, mime, percentage_in = percentage_in )
except Exception as e:
raise HydrusExceptions.FileMissingException( 'The thumbnail for file ' + hash.encode( 'hex' ) + ' was missing. It could not be regenerated from the original file for the above reason. This event could indicate hard drive corruption. Please check everything is ok.' )
full_size_path = self._GenerateExpectedFullSizeThumbnailPath( hash )
try:
HydrusPaths.MakeFileWritable( full_size_path )
with open( full_size_path, 'wb' ) as f:
f.write( thumbnail )
except Exception as e:
raise HydrusExceptions.FileMissingException( 'The thumbnail for file ' + hash.encode( 'hex' ) + ' was missing. It was regenerated from the original file, but hydrus could not write it to the location ' + full_size_path + ' for the above reason. This event could indicate hard drive corruption, and it also suggests that hydrus does not have permission to write to its thumbnail folder. Please check everything is ok.' )
def _GenerateResizedThumbnail( self, hash, mime ):
full_size_path = self._GenerateExpectedFullSizeThumbnailPath( hash )
thumbnail_dimensions = self._controller.options[ 'thumbnail_dimensions' ]
if mime in ( HC.IMAGE_GIF, HC.IMAGE_PNG ):
fullsize_thumbnail_mime = HC.IMAGE_PNG
else:
fullsize_thumbnail_mime = HC.IMAGE_JPEG
try:
thumbnail_resized = HydrusFileHandling.GenerateThumbnailFromStaticImage( full_size_path, thumbnail_dimensions, fullsize_thumbnail_mime )
except:
try:
ClientPaths.DeletePath( full_size_path, always_delete_fully = True )
except:
raise HydrusExceptions.FileMissingException( 'The thumbnail for file ' + hash.encode( 'hex' ) + ' was found, but it would not render. An attempt to delete it was made, but that failed as well. This event could indicate hard drive corruption, and it also suggests that hydrus does not have permission to write to its thumbnail folder. Please check everything is ok.' )
self._GenerateFullSizeThumbnail( hash, mime )
thumbnail_resized = HydrusFileHandling.GenerateThumbnailFromStaticImage( full_size_path, thumbnail_dimensions, fullsize_thumbnail_mime )
resized_path = self._GenerateExpectedResizedThumbnailPath( hash )
try:
HydrusPaths.MakeFileWritable( resized_path )
with open( resized_path, 'wb' ) as f:
f.write( thumbnail_resized )
except Exception as e:
HydrusData.ShowException( e )
raise HydrusExceptions.FileMissingException( 'The thumbnail for file ' + hash.encode( 'hex' ) + ' was found, but the resized version would not save to disk. This event suggests that hydrus does not have permission to write to its thumbnail folder. Please check everything is ok.' )
def _GetRecoverTuple( self ):
all_locations = { location for location in self._prefixes_to_locations.values() }
all_prefixes = self._prefixes_to_locations.keys()
for possible_location in all_locations:
for prefix in all_prefixes:
correct_location = self._prefixes_to_locations[ prefix ]
if possible_location != correct_location and os.path.exists( os.path.join( possible_location, prefix ) ):
recoverable_location = possible_location
return ( prefix, recoverable_location, correct_location )
return None
def _GetRebalanceTuple( self ):
( locations_to_ideal_weights, resized_thumbnail_override, full_size_thumbnail_override ) = self._controller.new_options.GetClientFilesLocationsToIdealWeights()
total_weight = sum( locations_to_ideal_weights.values() )
ideal_locations_to_normalised_weights = { location : weight / total_weight for ( location, weight ) in locations_to_ideal_weights.items() }
current_locations_to_normalised_weights = collections.defaultdict( lambda: 0 )
file_prefixes = [ prefix for prefix in self._prefixes_to_locations if prefix.startswith( 'f' ) ]
for file_prefix in file_prefixes:
location = self._prefixes_to_locations[ file_prefix ]
current_locations_to_normalised_weights[ location ] += 1.0 / 256
for location in current_locations_to_normalised_weights.keys():
if location not in ideal_locations_to_normalised_weights:
ideal_locations_to_normalised_weights[ location ] = 0.0
#
overweight_locations = []
underweight_locations = []
for ( location, ideal_weight ) in ideal_locations_to_normalised_weights.items():
if location in current_locations_to_normalised_weights:
current_weight = current_locations_to_normalised_weights[ location ]
if current_weight < ideal_weight:
underweight_locations.append( location )
elif current_weight >= ideal_weight + 1.0 / 256:
overweight_locations.append( location )
else:
underweight_locations.append( location )
#
if len( underweight_locations ) > 0 and len( overweight_locations ) > 0:
overweight_location = overweight_locations.pop( 0 )
underweight_location = underweight_locations.pop( 0 )
random.shuffle( file_prefixes )
for file_prefix in file_prefixes:
location = self._prefixes_to_locations[ file_prefix ]
if location == overweight_location:
return ( file_prefix, overweight_location, underweight_location )
else:
if full_size_thumbnail_override is None:
for hex_prefix in HydrusData.IterateHexPrefixes():
full_size_prefix = 't' + hex_prefix
file_prefix = 'f' + hex_prefix
full_size_location = self._prefixes_to_locations[ full_size_prefix ]
file_location = self._prefixes_to_locations[ file_prefix ]
if full_size_location != file_location:
return ( full_size_prefix, full_size_location, file_location )
else:
for hex_prefix in HydrusData.IterateHexPrefixes():
full_size_prefix = 't' + hex_prefix
full_size_location = self._prefixes_to_locations[ full_size_prefix ]
if full_size_location != full_size_thumbnail_override:
return ( full_size_prefix, full_size_location, full_size_thumbnail_override )
if resized_thumbnail_override is None:
for hex_prefix in HydrusData.IterateHexPrefixes():
resized_prefix = 'r' + hex_prefix
file_prefix = 'f' + hex_prefix
resized_location = self._prefixes_to_locations[ resized_prefix ]
file_location = self._prefixes_to_locations[ file_prefix ]
if resized_location != file_location:
return ( resized_prefix, resized_location, file_location )
else:
for hex_prefix in HydrusData.IterateHexPrefixes():
resized_prefix = 'r' + hex_prefix
resized_location = self._prefixes_to_locations[ resized_prefix ]
if resized_location != resized_thumbnail_override:
return ( resized_prefix, resized_location, resized_thumbnail_override )
return None
def _IterateAllFilePaths( self ):
for ( prefix, location ) in self._prefixes_to_locations.items():
if prefix.startswith( 'f' ):
dir = os.path.join( location, prefix )
filenames = os.listdir( dir )
for filename in filenames:
yield os.path.join( dir, filename )
def _IterateAllThumbnailPaths( self ):
for ( prefix, location ) in self._prefixes_to_locations.items():
if prefix.startswith( 't' ) or prefix.startswith( 'r' ):
dir = os.path.join( location, prefix )
filenames = os.listdir( dir )
for filename in filenames:
yield os.path.join( dir, filename )
def _LookForFilePath( self, hash ):
for potential_mime in HC.ALLOWED_MIMES:
potential_path = self._GenerateExpectedFilePath( hash, potential_mime )
if os.path.exists( potential_path ):
return potential_path
raise HydrusExceptions.FileMissingException( 'File for ' + hash.encode( 'hex' ) + ' not found!' )
def _Reinit( self ):
self._prefixes_to_locations = self._controller.Read( 'client_files_locations' )
if HG.client_controller.IsFirstStart():
try:
for ( prefix, location ) in self._prefixes_to_locations.items():
HydrusPaths.MakeSureDirectoryExists( location )
subdir = os.path.join( location, prefix )
HydrusPaths.MakeSureDirectoryExists( subdir )
except:
text = 'Attempting to create the database\'s client_files folder structure failed!'
wx.MessageBox( text )
raise
else:
self._missing_locations = set()
for ( prefix, location ) in self._prefixes_to_locations.items():
if os.path.exists( location ):
subdir = os.path.join( location, prefix )
if not os.path.exists( subdir ):
self._missing_locations.add( ( location, prefix ) )
else:
self._missing_locations.add( ( location, prefix ) )
if len( self._missing_locations ) > 0:
self._bad_error_occured = True
#
missing_dict = HydrusData.BuildKeyToListDict( self._missing_locations )
missing_locations = list( missing_dict.keys() )
missing_locations.sort()
missing_string = ''
for l in missing_locations:
missing_prefixes = list( missing_dict[ l ] )
missing_prefixes.sort()
missing_prefixes_string = ' ' + os.linesep.join( ( ', '.join( block ) for block in HydrusData.SplitListIntoChunks( missing_prefixes, 32 ) ) )
missing_string += os.linesep
missing_string += l
missing_string += os.linesep
missing_string += missing_prefixes_string
#
if len( self._missing_locations ) > 4:
text = 'When initialising the client files manager, some file locations did not exist! They have all been written to the log!'
text += os.linesep * 2
text += 'If this is happening on client boot, you should now be presented with a dialog to correct this manually!'
wx.MessageBox( text )
HydrusData.DebugPrint( text )
HydrusData.DebugPrint( 'Missing locations follow:' )
HydrusData.DebugPrint( missing_string )
else:
text = 'When initialising the client files manager, these file locations did not exist:'
text += os.linesep * 2
text += missing_string
text += os.linesep * 2
text += 'If this is happening on client boot, you should now be presented with a dialog to correct this manually!'
wx.MessageBox( text )
HydrusData.DebugPrint( text )
def GetMissing( self ):
return self._missing_locations
def LocklessAddFileFromString( self, hash, mime, data ):
dest_path = self._GenerateExpectedFilePath( hash, mime )
HydrusPaths.MakeFileWritable( dest_path )
with open( dest_path, 'wb' ) as f:
f.write( data )
def LocklessAddFile( self, hash, mime, source_path ):
dest_path = self._GenerateExpectedFilePath( hash, mime )
if not os.path.exists( dest_path ):
successful = HydrusPaths.MirrorFile( source_path, dest_path )
if not successful:
raise Exception( 'There was a problem copying the file from ' + source_path + ' to ' + dest_path + '!' )
def AddFullSizeThumbnail( self, hash, thumbnail ):
with self._lock:
self.LocklessAddFullSizeThumbnail( hash, thumbnail )
def LocklessAddFullSizeThumbnail( self, hash, thumbnail ):
path = self._GenerateExpectedFullSizeThumbnailPath( hash )
HydrusPaths.MakeFileWritable( path )
with open( path, 'wb' ) as f:
f.write( thumbnail )
resized_path = self._GenerateExpectedResizedThumbnailPath( hash )
if os.path.exists( resized_path ):
ClientPaths.DeletePath( resized_path, always_delete_fully = True )
self._controller.pub( 'clear_thumbnails', { hash } )
self._controller.pub( 'new_thumbnails', { hash } )
def CheckFileIntegrity( self, *args, **kwargs ):
with self._lock:
self._controller.WriteSynchronous( 'file_integrity', *args, **kwargs )
def ClearOrphans( self, move_location = None ):
with self._lock:
job_key = ClientThreading.JobKey( cancellable = True )
job_key.SetVariable( 'popup_title', 'clearing orphans' )
job_key.SetVariable( 'popup_text_1', 'preparing' )
self._controller.pub( 'message', job_key )
orphan_paths = []
orphan_thumbnails = []
for ( i, path ) in enumerate( self._IterateAllFilePaths() ):
( i_paused, should_quit ) = job_key.WaitIfNeeded()
if should_quit:
return
if i % 100 == 0:
status = 'reviewed ' + HydrusData.ToHumanInt( i ) + ' files, found ' + HydrusData.ToHumanInt( len( orphan_paths ) ) + ' orphans'
job_key.SetVariable( 'popup_text_1', status )
try:
is_an_orphan = False
( directory, filename ) = os.path.split( path )
should_be_a_hex_hash = filename[:64]
hash = should_be_a_hex_hash.decode( 'hex' )
is_an_orphan = HG.client_controller.Read( 'is_an_orphan', 'file', hash )
except:
is_an_orphan = True
if is_an_orphan:
if move_location is not None:
( source_dir, filename ) = os.path.split( path )
dest = os.path.join( move_location, filename )
dest = HydrusPaths.AppendPathUntilNoConflicts( dest )
HydrusData.Print( 'Moving the orphan ' + path + ' to ' + dest )
HydrusPaths.MergeFile( path, dest )
orphan_paths.append( path )
time.sleep( 2 )
for ( i, path ) in enumerate( self._IterateAllThumbnailPaths() ):
( i_paused, should_quit ) = job_key.WaitIfNeeded()
if should_quit:
return
if i % 100 == 0:
status = 'reviewed ' + HydrusData.ToHumanInt( i ) + ' thumbnails, found ' + HydrusData.ToHumanInt( len( orphan_thumbnails ) ) + ' orphans'
job_key.SetVariable( 'popup_text_1', status )
try:
is_an_orphan = False
( directory, filename ) = os.path.split( path )
should_be_a_hex_hash = filename[:64]
hash = should_be_a_hex_hash.decode( 'hex' )
is_an_orphan = HG.client_controller.Read( 'is_an_orphan', 'thumbnail', hash )
except:
is_an_orphan = True
if is_an_orphan:
orphan_thumbnails.append( path )
time.sleep( 2 )
if move_location is None and len( orphan_paths ) > 0:
status = 'found ' + HydrusData.ToHumanInt( len( orphan_paths ) ) + ' orphans, now deleting'
job_key.SetVariable( 'popup_text_1', status )
time.sleep( 5 )
for path in orphan_paths:
( i_paused, should_quit ) = job_key.WaitIfNeeded()
if should_quit:
return
HydrusData.Print( 'Deleting the orphan ' + path )
status = 'deleting orphan files: ' + HydrusData.ConvertValueRangeToPrettyString( i + 1, len( orphan_paths ) )
job_key.SetVariable( 'popup_text_1', status )
ClientPaths.DeletePath( path )
if len( orphan_thumbnails ) > 0:
status = 'found ' + HydrusData.ToHumanInt( len( orphan_thumbnails ) ) + ' orphan thumbnails, now deleting'
job_key.SetVariable( 'popup_text_1', status )
time.sleep( 5 )
for ( i, path ) in enumerate( orphan_thumbnails ):
( i_paused, should_quit ) = job_key.WaitIfNeeded()
if should_quit:
return
status = 'deleting orphan thumbnails: ' + HydrusData.ConvertValueRangeToPrettyString( i + 1, len( orphan_thumbnails ) )
job_key.SetVariable( 'popup_text_1', status )
HydrusData.Print( 'Deleting the orphan ' + path )
ClientPaths.DeletePath( path, always_delete_fully = True )
if len( orphan_paths ) == 0 and len( orphan_thumbnails ) == 0:
final_text = 'no orphans found!'
else:
final_text = HydrusData.ToHumanInt( len( orphan_paths ) ) + ' orphan files and ' + HydrusData.ToHumanInt( len( orphan_thumbnails ) ) + ' orphan thumbnails cleared!'
job_key.SetVariable( 'popup_text_1', final_text )
HydrusData.Print( job_key.ToString() )
job_key.Finish()
def DelayedDeleteFiles( self, hashes, time_to_delete ):
while not HydrusData.TimeHasPassed( time_to_delete ):
time.sleep( 0.5 )
big_pauser = HydrusData.BigJobPauser( period = 1 )
with self._lock:
for hash in hashes:
try:
path = self._LookForFilePath( hash )
except HydrusExceptions.FileMissingException:
continue
ClientPaths.DeletePath( path )
big_pauser.Pause()
def DelayedDeleteThumbnails( self, hashes, time_to_delete ):
while not HydrusData.TimeHasPassed( time_to_delete ):
time.sleep( 0.5 )
with self._lock:
big_pauser = HydrusData.BigJobPauser( period = 1 )
for hash in hashes:
path = self._GenerateExpectedFullSizeThumbnailPath( hash )
resized_path = self._GenerateExpectedResizedThumbnailPath( hash )
ClientPaths.DeletePath( path, always_delete_fully = True )
ClientPaths.DeletePath( resized_path, always_delete_fully = True )
big_pauser.Pause()
def GetFilePath( self, hash, mime = None ):
with self._lock:
return self.LocklessGetFilePath( hash, mime )
def ImportFile( self, file_import_job ):
( pre_import_status, hash, note ) = file_import_job.GenerateHashAndStatus()
if file_import_job.IsNewToDB():
file_import_job.GenerateInfo()
file_import_job.CheckIsGoodToImport()
( temp_path, thumbnail ) = file_import_job.GetTempPathAndThumbnail()
mime = file_import_job.GetMime()
with self._lock:
self.LocklessAddFile( hash, mime, temp_path )
if thumbnail is not None:
self.LocklessAddFullSizeThumbnail( hash, thumbnail )
( import_status, note ) = self._controller.WriteSynchronous( 'import_file', file_import_job )
else:
import_status = pre_import_status
file_import_job.PubsubContentUpdates()
return ( import_status, hash, note )
def LocklessGetFilePath( self, hash, mime = None ):
if mime is None:
path = self._LookForFilePath( hash )
else:
path = self._GenerateExpectedFilePath( hash, mime )
if not os.path.exists( path ):
raise HydrusExceptions.FileMissingException( 'No file found at path + ' + path + '!' )
return path
def GetFullSizeThumbnailPath( self, hash, mime = None ):
with self._lock:
path = self._GenerateExpectedFullSizeThumbnailPath( hash )
if not os.path.exists( path ):
self._GenerateFullSizeThumbnail( hash, mime )
if not self._bad_error_occured:
self._bad_error_occured = True
HydrusData.ShowText( 'A thumbnail for a file, ' + hash.encode( 'hex' ) + ', was missing. It has been regenerated from the original file, but this event could indicate hard drive corruption. Please check everything is ok. This error may be occuring for many files, but this message will only display once per boot. If you are recovering from a fractured database, you may wish to run \'database->regenerate->all thumbnails\'.' )
return path
def GetResizedThumbnailPath( self, hash, mime ):
with self._lock:
path = self._GenerateExpectedResizedThumbnailPath( hash )
if not os.path.exists( path ):
self._GenerateResizedThumbnail( hash, mime )
return path
def LocklessHasFullSizeThumbnail( self, hash ):
path = self._GenerateExpectedFullSizeThumbnailPath( hash )
return os.path.exists( path )
def Rebalance( self, job_key ):
try:
if self._bad_error_occured:
wx.MessageBox( 'A serious file error has previously occured during this session, so further file moving will not be reattempted. Please restart the client before trying again.' )
return
with self._lock:
rebalance_tuple = self._GetRebalanceTuple()
while rebalance_tuple is not None:
if job_key.IsCancelled():
break
( prefix, overweight_location, underweight_location ) = rebalance_tuple
text = 'Moving \'' + prefix + '\' from ' + overweight_location + ' to ' + underweight_location
HydrusData.Print( text )
job_key.SetVariable( 'popup_text_1', text )
# these two lines can cause a deadlock because the db sometimes calls stuff in here.
self._controller.Write( 'relocate_client_files', prefix, overweight_location, underweight_location )
self._Reinit()
rebalance_tuple = self._GetRebalanceTuple()
recover_tuple = self._GetRecoverTuple()
while recover_tuple is not None:
if job_key.IsCancelled():
break
( prefix, recoverable_location, correct_location ) = recover_tuple
text = 'Recovering \'' + prefix + '\' from ' + recoverable_location + ' to ' + correct_location
HydrusData.Print( text )
job_key.SetVariable( 'popup_text_1', text )
recoverable_path = os.path.join( recoverable_location, prefix )
correct_path = os.path.join( correct_location, prefix )
HydrusPaths.MergeTree( recoverable_path, correct_path )
recover_tuple = self._GetRecoverTuple()
finally:
job_key.SetVariable( 'popup_text_1', 'done!' )
job_key.Finish()
job_key.Delete()
def RebalanceWorkToDo( self ):
with self._lock:
return self._GetRebalanceTuple() is not None
def RegenerateResizedThumbnail( self, hash, mime ):
with self._lock:
self.LocklessRegenerateResizedThumbnail( hash, mime )
def LocklessRegenerateResizedThumbnail( self, hash, mime ):
self._GenerateResizedThumbnail( hash, mime )
def RegenerateThumbnails( self, only_do_missing = False ):
with self._lock:
job_key = ClientThreading.JobKey( cancellable = True )
job_key.SetVariable( 'popup_title', 'regenerating thumbnails' )
job_key.SetVariable( 'popup_text_1', 'creating directories' )
self._controller.pub( 'modal_message', job_key )
num_broken = 0
for ( i, path ) in enumerate( self._IterateAllFilePaths() ):
try:
while job_key.IsPaused() or job_key.IsCancelled():
time.sleep( 0.1 )
if job_key.IsCancelled():
job_key.SetVariable( 'popup_text_1', 'cancelled' )
HydrusData.Print( job_key.ToString() )
return
job_key.SetVariable( 'popup_text_1', HydrusData.ToHumanInt( i ) + ' done' )
( base, filename ) = os.path.split( path )
if '.' in filename:
( hash_encoded, ext ) = filename.split( '.', 1 )
else:
continue # it is an update file, so let's save us some ffmpeg lag and logspam
hash = hash_encoded.decode( 'hex' )
full_size_path = self._GenerateExpectedFullSizeThumbnailPath( hash )
if only_do_missing and os.path.exists( full_size_path ):
continue
mime = HydrusFileHandling.GetMime( path )
if mime in HC.MIMES_WITH_THUMBNAILS:
self._GenerateFullSizeThumbnail( hash, mime )
thumbnail_resized_path = self._GenerateExpectedResizedThumbnailPath( hash )
if os.path.exists( thumbnail_resized_path ):
ClientPaths.DeletePath( thumbnail_resized_path, always_delete_fully = True )
except:
HydrusData.Print( path )
HydrusData.Print( traceback.format_exc() )
num_broken += 1
if num_broken > 0:
job_key.SetVariable( 'popup_text_1', 'done! ' + HydrusData.ToHumanInt( num_broken ) + ' files caused errors, which have been written to the log.' )
else:
job_key.SetVariable( 'popup_text_1', 'done!' )
HydrusData.Print( job_key.ToString() )
job_key.Finish()
class DataCache( object ):
def __init__( self, controller, cache_size, timeout = 1200 ):
self._controller = controller
self._cache_size = cache_size
self._timeout = timeout
self._keys_to_data = {}
self._keys_fifo = collections.OrderedDict()
self._total_estimated_memory_footprint = 0
self._lock = threading.Lock()
self._controller.sub( self, 'MaintainCache', 'memory_maintenance_pulse' )
def _Delete( self, key ):
if key not in self._keys_to_data:
return
deletee_data = self._keys_to_data[ key ]
del self._keys_to_data[ key ]
self._RecalcMemoryUsage()
def _DeleteItem( self ):
( deletee_key, last_access_time ) = self._keys_fifo.popitem( last = False )
self._Delete( deletee_key )
def _RecalcMemoryUsage( self ):
self._total_estimated_memory_footprint = sum( ( data.GetEstimatedMemoryFootprint() for data in self._keys_to_data.values() ) )
def _TouchKey( self, key ):
# have to delete first, rather than overwriting, so the ordereddict updates its internal order
if key in self._keys_fifo:
del self._keys_fifo[ key ]
self._keys_fifo[ key ] = HydrusData.GetNow()
def Clear( self ):
with self._lock:
self._keys_to_data = {}
self._keys_fifo = collections.OrderedDict()
self._total_estimated_memory_footprint = 0
def AddData( self, key, data ):
with self._lock:
if key not in self._keys_to_data:
while self._total_estimated_memory_footprint > self._cache_size:
self._DeleteItem()
self._keys_to_data[ key ] = data
self._TouchKey( key )
self._RecalcMemoryUsage()
def DeleteData( self, key ):
with self._lock:
self._Delete( key )
def GetData( self, key ):
with self._lock:
if key not in self._keys_to_data:
raise Exception( 'Cache error! Looking for ' + HydrusData.ToUnicode( key ) + ', but it was missing.' )
self._TouchKey( key )
return self._keys_to_data[ key ]
def GetIfHasData( self, key ):
with self._lock:
if key in self._keys_to_data:
self._TouchKey( key )
return self._keys_to_data[ key ]
else:
return None
def HasData( self, key ):
with self._lock:
return key in self._keys_to_data
def MaintainCache( self ):
with self._lock:
while True:
if len( self._keys_fifo ) == 0:
break
else:
( key, last_access_time ) = next( self._keys_fifo.iteritems() )
if HydrusData.TimeHasPassed( last_access_time + self._timeout ):
self._DeleteItem()
else:
break
class LocalBooruCache( object ):
def __init__( self, controller ):
self._controller = controller
self._lock = threading.Lock()
self._RefreshShares()
self._controller.sub( self, 'RefreshShares', 'refresh_local_booru_shares' )
self._controller.sub( self, 'RefreshShares', 'restart_booru' )
def _CheckDataUsage( self ):
if not self._local_booru_service.BandwidthOK():
raise HydrusExceptions.ForbiddenException( 'This booru has used all its monthly data. Please try again next month.' )
def _CheckFileAuthorised( self, share_key, hash ):
self._CheckShareAuthorised( share_key )
info = self._GetInfo( share_key )
if hash not in info[ 'hashes_set' ]:
raise HydrusExceptions.NotFoundException( 'That file was not found in that share.' )
def _CheckShareAuthorised( self, share_key ):
self._CheckDataUsage()
info = self._GetInfo( share_key )
timeout = info[ 'timeout' ]
if timeout is not None and HydrusData.TimeHasPassed( timeout ):
raise HydrusExceptions.ForbiddenException( 'This share has expired.' )
def _GetInfo( self, share_key ):
try: info = self._keys_to_infos[ share_key ]
except: raise HydrusExceptions.NotFoundException( 'Did not find that share on this booru.' )
if info is None:
info = self._controller.Read( 'local_booru_share', share_key )
hashes = info[ 'hashes' ]
info[ 'hashes_set' ] = set( hashes )
media_results = self._controller.Read( 'media_results', hashes )
info[ 'media_results' ] = media_results
hashes_to_media_results = { media_result.GetHash() : media_result for media_result in media_results }
info[ 'hashes_to_media_results' ] = hashes_to_media_results
self._keys_to_infos[ share_key ] = info
return info
def _RefreshShares( self ):
self._local_booru_service = self._controller.services_manager.GetService( CC.LOCAL_BOORU_SERVICE_KEY )
self._keys_to_infos = {}
share_keys = self._controller.Read( 'local_booru_share_keys' )
for share_key in share_keys: self._keys_to_infos[ share_key ] = None
def CheckShareAuthorised( self, share_key ):
with self._lock: self._CheckShareAuthorised( share_key )
def CheckFileAuthorised( self, share_key, hash ):
with self._lock: self._CheckFileAuthorised( share_key, hash )
def GetGalleryInfo( self, share_key ):
with self._lock:
self._CheckShareAuthorised( share_key )
info = self._GetInfo( share_key )
name = info[ 'name' ]
text = info[ 'text' ]
timeout = info[ 'timeout' ]
media_results = info[ 'media_results' ]
return ( name, text, timeout, media_results )
def GetMediaResult( self, share_key, hash ):
with self._lock:
info = self._GetInfo( share_key )
media_result = info[ 'hashes_to_media_results' ][ hash ]
return media_result
def GetPageInfo( self, share_key, hash ):
with self._lock:
self._CheckFileAuthorised( share_key, hash )
info = self._GetInfo( share_key )
name = info[ 'name' ]
text = info[ 'text' ]
timeout = info[ 'timeout' ]
media_result = info[ 'hashes_to_media_results' ][ hash ]
return ( name, text, timeout, media_result )
def RefreshShares( self ):
with self._lock:
self._RefreshShares()
class MenuEventIdToActionCache( object ):
def __init__( self ):
self._ids_to_actions = {}
self._actions_to_ids = {}
self._temporary_ids = set()
self._free_temporary_ids = set()
def _ClearTemporaries( self ):
for temporary_id in self._temporary_ids.difference( self._free_temporary_ids ):
temporary_action = self._ids_to_actions[ temporary_id ]
del self._ids_to_actions[ temporary_id ]
del self._actions_to_ids[ temporary_action ]
self._free_temporary_ids = set( self._temporary_ids )
def _GetNewId( self, temporary ):
if temporary:
if len( self._free_temporary_ids ) == 0:
new_id = wx.NewId()
self._temporary_ids.add( new_id )
self._free_temporary_ids.add( new_id )
return self._free_temporary_ids.pop()
else:
return wx.NewId()
def GetAction( self, event_id ):
action = None
if event_id in self._ids_to_actions:
action = self._ids_to_actions[ event_id ]
if event_id in self._temporary_ids:
self._ClearTemporaries()
return action
def GetId( self, command, data = None, temporary = False ):
action = ( command, data )
if action not in self._actions_to_ids:
event_id = self._GetNewId( temporary )
self._ids_to_actions[ event_id ] = action
self._actions_to_ids[ action ] = event_id
return self._actions_to_ids[ action ]
def GetPermanentId( self, command, data = None ):
return self.GetId( command, data, False )
def GetTemporaryId( self, command, data = None ):
temporary = True
if data is None:
temporary = False
return self.GetId( command, data, temporary )
MENU_EVENT_ID_TO_ACTION_CACHE = MenuEventIdToActionCache()
class ParsingCache( object ):
def __init__( self ):
self._html_to_soups = {}
self._json_to_jsons = {}
self._lock = threading.Lock()
def _CleanCache( self ):
for cache in ( self._html_to_soups, self._json_to_jsons ):
dead_datas = set()
for ( data, ( last_accessed, parsed_object ) ) in cache.items():
if HydrusData.TimeHasPassed( last_accessed + 10 ):
dead_datas.add( data )
for dead_data in dead_datas:
del cache[ dead_data ]
def CleanCache( self ):
with self._lock:
self._CleanCache()
def GetJSON( self, json_text ):
with self._lock:
now = HydrusData.GetNow()
if json_text not in self._json_to_jsons:
json_object = json.loads( json_text )
self._json_to_jsons[ json_text ] = ( now, json_object )
( last_accessed, json_object ) = self._json_to_jsons[ json_text ]
if last_accessed != now:
self._json_to_jsons[ json_text ] = ( now, json_object )
if len( self._json_to_jsons ) > 10:
self._CleanCache()
return json_object
def GetSoup( self, html ):
with self._lock:
now = HydrusData.GetNow()
if html not in self._html_to_soups:
soup = ClientParsing.GetSoup( html )
self._html_to_soups[ html ] = ( now, soup )
( last_accessed, soup ) = self._html_to_soups[ html ]
if last_accessed != now:
self._html_to_soups[ html ] = ( now, soup )
if len( self._html_to_soups ) > 10:
self._CleanCache()
return soup
class RenderedImageCache( object ):
def __init__( self, controller ):
self._controller = controller
cache_size = self._controller.options[ 'fullscreen_cache_size' ]
cache_timeout = self._controller.new_options.GetInteger( 'image_cache_timeout' )
self._data_cache = DataCache( self._controller, cache_size, timeout = cache_timeout )
def Clear( self ):
self._data_cache.Clear()
def GetImageRenderer( self, media ):
hash = media.GetHash()
key = hash
result = self._data_cache.GetIfHasData( key )
if result is None:
image_renderer = ClientRendering.ImageRenderer( media )
self._data_cache.AddData( key, image_renderer )
else:
image_renderer = result
return image_renderer
def HasImageRenderer( self, hash ):
key = hash
return self._data_cache.HasData( key )
class ThumbnailCache( object ):
def __init__( self, controller ):
self._controller = controller
cache_size = self._controller.options[ 'thumbnail_cache_size' ]
cache_timeout = self._controller.new_options.GetInteger( 'thumbnail_cache_timeout' )
self._data_cache = DataCache( self._controller, cache_size, timeout = cache_timeout )
self._lock = threading.Lock()
self._waterfall_queue_quick = set()
self._waterfall_queue_random = []
self._waterfall_event = threading.Event()
self._special_thumbs = {}
self.Clear()
self._controller.CallToThreadLongRunning( self.DAEMONWaterfall )
self._controller.sub( self, 'Clear', 'thumbnail_resize' )
self._controller.sub( self, 'ClearThumbnails', 'clear_thumbnails' )
def _GetResizedHydrusBitmapFromHardDrive( self, display_media ):
thumbnail_dimensions = self._controller.options[ 'thumbnail_dimensions' ]
if tuple( thumbnail_dimensions ) == HC.UNSCALED_THUMBNAIL_DIMENSIONS:
full_size = True
else:
full_size = False
hash = display_media.GetHash()
mime = display_media.GetMime()
locations_manager = display_media.GetLocationsManager()
try:
if full_size:
path = self._controller.client_files_manager.GetFullSizeThumbnailPath( hash, mime )
else:
path = self._controller.client_files_manager.GetResizedThumbnailPath( hash, mime )
except HydrusExceptions.FileMissingException as e:
if locations_manager.IsLocal():
HydrusData.ShowException( e )
return self._special_thumbs[ 'hydrus' ]
mime = display_media.GetMime()
try:
hydrus_bitmap = ClientRendering.GenerateHydrusBitmap( path, mime )
except Exception as e:
try:
self._controller.client_files_manager.RegenerateResizedThumbnail( hash, mime )
try:
hydrus_bitmap = ClientRendering.GenerateHydrusBitmap( path, mime )
except Exception as e:
HydrusData.ShowException( e )
raise HydrusExceptions.FileMissingException( 'The thumbnail for file ' + hash.encode( 'hex' ) + ' was broken. It was regenerated, but the new file would not render for the above reason. Please inform the hydrus developer what has happened.' )
except Exception as e:
HydrusData.ShowException( e )
return self._special_thumbs[ 'hydrus' ]
( media_x, media_y ) = display_media.GetResolution()
( actual_x, actual_y ) = hydrus_bitmap.GetSize()
( desired_x, desired_y ) = self._controller.options[ 'thumbnail_dimensions' ]
too_large = actual_x > desired_x or actual_y > desired_y
small_original_image = actual_x == media_x and actual_y == media_y
too_small = actual_x < desired_x and actual_y < desired_y
if too_large or ( too_small and not small_original_image ):
self._controller.client_files_manager.RegenerateResizedThumbnail( hash, mime )
hydrus_bitmap = ClientRendering.GenerateHydrusBitmap( path, mime )
return hydrus_bitmap
def _RecalcWaterfallQueueRandom( self ):
# here we sort by the hash since this is both breddy random and more likely to access faster on a well defragged hard drive!
def sort_by_hash_key( ( page_key, media ) ):
return media.GetDisplayMedia().GetHash()
self._waterfall_queue_random = list( self._waterfall_queue_quick )
self._waterfall_queue_random.sort( key = sort_by_hash_key )
def CancelWaterfall( self, page_key, medias ):
with self._lock:
self._waterfall_queue_quick.difference_update( ( ( page_key, media ) for media in medias ) )
self._RecalcWaterfallQueueRandom()
def Clear( self ):
with self._lock:
self._data_cache.Clear()
self._special_thumbs = {}
names = [ 'hydrus', 'pdf', 'audio', 'video', 'zip' ]
( os_file_handle, temp_path ) = ClientPaths.GetTempPath()
try:
for name in names:
path = os.path.join( HC.STATIC_DIR, name + '.png' )
thumbnail_dimensions = self._controller.options[ 'thumbnail_dimensions' ]
thumbnail = HydrusFileHandling.GenerateThumbnailFromStaticImage( path, thumbnail_dimensions, HC.IMAGE_PNG )
with open( temp_path, 'wb' ) as f:
f.write( thumbnail )
hydrus_bitmap = ClientRendering.GenerateHydrusBitmap( temp_path, HC.IMAGE_PNG )
self._special_thumbs[ name ] = hydrus_bitmap
finally:
HydrusPaths.CleanUpTempPath( os_file_handle, temp_path )
def ClearThumbnails( self, hashes ):
with self._lock:
for hash in hashes:
self._data_cache.DeleteData( hash )
def DoingWork( self ):
with self._lock:
return len( self._waterfall_queue_random ) > 0
def GetThumbnail( self, media ):
try:
display_media = media.GetDisplayMedia()
except:
# sometimes media can get switched around during a collect event, and if this happens during waterfall, we have a problem here
# just return for now, we'll see how it goes
return self._special_thumbs[ 'hydrus' ]
locations_manager = display_media.GetLocationsManager()
if locations_manager.ShouldIdeallyHaveThumbnail():
mime = display_media.GetMime()
if mime in HC.MIMES_WITH_THUMBNAILS:
hash = display_media.GetHash()
result = self._data_cache.GetIfHasData( hash )
if result is None:
if locations_manager.ShouldDefinitelyHaveThumbnail():
# local file, should be able to regen if needed
hydrus_bitmap = self._GetResizedHydrusBitmapFromHardDrive( display_media )
else:
# repository file, maybe not actually available yet
try:
hydrus_bitmap = self._GetResizedHydrusBitmapFromHardDrive( display_media )
except:
hydrus_bitmap = self._special_thumbs[ 'hydrus' ]
self._data_cache.AddData( hash, hydrus_bitmap )
else:
hydrus_bitmap = result
return hydrus_bitmap
elif mime in HC.AUDIO: return self._special_thumbs[ 'audio' ]
elif mime in HC.VIDEO: return self._special_thumbs[ 'video' ]
elif mime == HC.APPLICATION_PDF: return self._special_thumbs[ 'pdf' ]
elif mime in HC.ARCHIVES: return self._special_thumbs[ 'zip' ]
else: return self._special_thumbs[ 'hydrus' ]
else:
return self._special_thumbs[ 'hydrus' ]
def HasThumbnailCached( self, media ):
display_media = media.GetDisplayMedia()
mime = display_media.GetMime()
if mime in HC.MIMES_WITH_THUMBNAILS:
hash = display_media.GetHash()
return self._data_cache.HasData( hash )
else:
return True
def Waterfall( self, page_key, medias ):
with self._lock:
self._waterfall_queue_quick.update( ( ( page_key, media ) for media in medias ) )
self._RecalcWaterfallQueueRandom()
self._waterfall_event.set()
def DAEMONWaterfall( self ):
last_paused = HydrusData.GetNowPrecise()
while not HydrusThreading.IsThreadShuttingDown():
with self._lock:
do_wait = len( self._waterfall_queue_random ) == 0
if do_wait:
self._waterfall_event.wait( 1 )
self._waterfall_event.clear()
last_paused = HydrusData.GetNowPrecise()
start_time = HydrusData.GetNowPrecise()
stop_time = start_time + 0.005 # a bit of a typical frame
page_keys_to_rendered_medias = collections.defaultdict( list )
while not HydrusData.TimeHasPassedPrecise( stop_time ):
with self._lock:
if len( self._waterfall_queue_random ) == 0:
break
result = self._waterfall_queue_random.pop()
self._waterfall_queue_quick.discard( result )
( page_key, media ) = result
try:
self.GetThumbnail( media ) # to load it
page_keys_to_rendered_medias[ page_key ].append( media )
except Exception as e:
HydrusData.ShowException( e )
for ( page_key, rendered_medias ) in page_keys_to_rendered_medias.items():
self._controller.pub( 'waterfall_thumbnails', page_key, rendered_medias )
time.sleep( 0.00001 )
class ServicesManager( object ):
def __init__( self, controller ):
self._controller = controller
self._lock = threading.Lock()
self._keys_to_services = {}
self._services_sorted = []
self.RefreshServices()
self._controller.sub( self, 'RefreshServices', 'notify_new_services_data' )
def _GetService( self, service_key ):
try:
return self._keys_to_services[ service_key ]
except KeyError:
raise HydrusExceptions.DataMissing( 'That service was not found!' )
def _SetServices( self, services ):
self._keys_to_services = { service.GetServiceKey() : service for service in services }
self._keys_to_services[ CC.TEST_SERVICE_KEY ] = ClientServices.GenerateService( CC.TEST_SERVICE_KEY, HC.TEST_SERVICE, CC.TEST_SERVICE_KEY )
def compare_function( a, b ):
return cmp( a.GetName(), b.GetName() )
self._services_sorted = list( services )
self._services_sorted.sort( cmp = compare_function )
def Filter( self, service_keys, desired_types ):
with self._lock:
def func( service_key ):
return self._keys_to_services[ service_key ].GetServiceType() in desired_types
filtered_service_keys = filter( func, service_keys )
return filtered_service_keys
def FilterValidServiceKeys( self, service_keys ):
with self._lock:
def func( service_key ):
return service_key in self._keys_to_services
filtered_service_keys = filter( func, service_keys )
return filtered_service_keys
def GetName( self, service_key ):
with self._lock:
service = self._GetService( service_key )
return service.GetName()
def GetService( self, service_key ):
with self._lock:
return self._GetService( service_key )
def GetServiceType( self, service_key ):
with self._lock:
return self._GetService( service_key ).GetServiceType()
def GetServiceKeys( self, desired_types = HC.ALL_SERVICES ):
with self._lock:
filtered_service_keys = [ service_key for ( service_key, service ) in self._keys_to_services.items() if service.GetServiceType() in desired_types ]
return filtered_service_keys
def GetServices( self, desired_types = HC.ALL_SERVICES, randomised = True ):
with self._lock:
def func( service ):
return service.GetServiceType() in desired_types
services = filter( func, self._services_sorted )
if randomised:
random.shuffle( services )
return services
def RefreshServices( self ):
with self._lock:
services = self._controller.Read( 'services' )
self._SetServices( services )
def ServiceExists( self, service_key ):
with self._lock:
return service_key in self._keys_to_services
class ShortcutsManager( object ):
def __init__( self, controller ):
self._controller = controller
self._shortcuts = {}
self.RefreshShortcuts()
self._controller.sub( self, 'RefreshShortcuts', 'new_shortcuts' )
def GetCommand( self, shortcuts_names, shortcut ):
for name in shortcuts_names:
if name in self._shortcuts:
command = self._shortcuts[ name ].GetCommand( shortcut )
if command is not None:
if HG.gui_report_mode:
HydrusData.ShowText( 'command matched: ' + repr( command ) )
return command
return None
def RefreshShortcuts( self ):
self._shortcuts = {}
all_shortcuts = HG.client_controller.Read( 'serialisable_named', HydrusSerialisable.SERIALISABLE_TYPE_SHORTCUTS )
for shortcuts in all_shortcuts:
self._shortcuts[ shortcuts.GetName() ] = shortcuts
class TagCensorshipManager( object ):
def __init__( self, controller ):
self._controller = controller
self.RefreshData()
self._controller.sub( self, 'RefreshData', 'notify_new_tag_censorship' )
def _CensorshipMatches( self, tag, blacklist, censorships ):
if blacklist:
return not HydrusTags.CensorshipMatch( tag, censorships )
else:
return HydrusTags.CensorshipMatch( tag, censorships )
def GetInfo( self, service_key ):
if service_key in self._service_keys_to_info: return self._service_keys_to_info[ service_key ]
else: return ( True, set() )
def RefreshData( self ):
rows = self._controller.Read( 'tag_censorship' )
self._service_keys_to_info = { service_key : ( blacklist, censorships ) for ( service_key, blacklist, censorships ) in rows }
def FilterPredicates( self, service_key, predicates ):
for service_key_lookup in ( CC.COMBINED_TAG_SERVICE_KEY, service_key ):
if service_key_lookup in self._service_keys_to_info:
( blacklist, censorships ) = self._service_keys_to_info[ service_key_lookup ]
predicates = [ predicate for predicate in predicates if predicate.GetType() != HC.PREDICATE_TYPE_TAG or self._CensorshipMatches( predicate.GetValue(), blacklist, censorships ) ]
return predicates
def FilterStatusesToPairs( self, service_key, statuses_to_pairs ):
for service_key_lookup in ( CC.COMBINED_TAG_SERVICE_KEY, service_key ):
if service_key_lookup in self._service_keys_to_info:
( blacklist, censorships ) = self._service_keys_to_info[ service_key_lookup ]
new_statuses_to_pairs = HydrusData.default_dict_set()
for ( status, pairs ) in statuses_to_pairs.items():
new_statuses_to_pairs[ status ] = { ( one, two ) for ( one, two ) in pairs if self._CensorshipMatches( one, blacklist, censorships ) and self._CensorshipMatches( two, blacklist, censorships ) }
statuses_to_pairs = new_statuses_to_pairs
return statuses_to_pairs
def FilterServiceKeysToStatusesToTags( self, service_keys_to_statuses_to_tags ):
if CC.COMBINED_TAG_SERVICE_KEY in self._service_keys_to_info:
( blacklist, censorships ) = self._service_keys_to_info[ CC.COMBINED_TAG_SERVICE_KEY ]
service_keys = service_keys_to_statuses_to_tags.keys()
for service_key in service_keys:
statuses_to_tags = service_keys_to_statuses_to_tags[ service_key ]
statuses = statuses_to_tags.keys()
for status in statuses:
tags = statuses_to_tags[ status ]
statuses_to_tags[ status ] = { tag for tag in tags if self._CensorshipMatches( tag, blacklist, censorships ) }
for ( service_key, ( blacklist, censorships ) ) in self._service_keys_to_info.items():
if service_key == CC.COMBINED_TAG_SERVICE_KEY:
continue
if service_key in service_keys_to_statuses_to_tags:
statuses_to_tags = service_keys_to_statuses_to_tags[ service_key ]
statuses = statuses_to_tags.keys()
for status in statuses:
tags = statuses_to_tags[ status ]
statuses_to_tags[ status ] = { tag for tag in tags if self._CensorshipMatches( tag, blacklist, censorships ) }
return service_keys_to_statuses_to_tags
def FilterTags( self, service_key, tags ):
for service_key_lookup in ( CC.COMBINED_TAG_SERVICE_KEY, service_key ):
if service_key_lookup in self._service_keys_to_info:
( blacklist, censorships ) = self._service_keys_to_info[ service_key_lookup ]
tags = { tag for tag in tags if self._CensorshipMatches( tag, blacklist, censorships ) }
return tags
class TagParentsManager( object ):
def __init__( self, controller ):
self._controller = controller
self._dirty = False
self._service_keys_to_children_to_parents = collections.defaultdict( HydrusData.default_dict_list )
self._RefreshParents()
self._lock = threading.Lock()
self._controller.sub( self, 'NotifyNewParents', 'notify_new_parents' )
def _RefreshParents( self ):
service_keys_to_statuses_to_pairs = self._controller.Read( 'tag_parents' )
# first collapse siblings
sibling_manager = self._controller.GetManager( 'tag_siblings' )
collapsed_service_keys_to_statuses_to_pairs = collections.defaultdict( HydrusData.default_dict_set )
for ( service_key, statuses_to_pairs ) in service_keys_to_statuses_to_pairs.items():
if service_key == CC.COMBINED_TAG_SERVICE_KEY: continue
for ( status, pairs ) in statuses_to_pairs.items():
pairs = sibling_manager.CollapsePairs( service_key, pairs )
collapsed_service_keys_to_statuses_to_pairs[ service_key ][ status ] = pairs
# now collapse current and pending
service_keys_to_pairs_flat = HydrusData.default_dict_set()
for ( service_key, statuses_to_pairs ) in collapsed_service_keys_to_statuses_to_pairs.items():
pairs_flat = statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ].union( statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] )
service_keys_to_pairs_flat[ service_key ] = pairs_flat
# now create the combined tag service
combined_pairs_flat = set()
for pairs_flat in service_keys_to_pairs_flat.values():
combined_pairs_flat.update( pairs_flat )
service_keys_to_pairs_flat[ CC.COMBINED_TAG_SERVICE_KEY ] = combined_pairs_flat
#
service_keys_to_simple_children_to_parents = BuildServiceKeysToSimpleChildrenToParents( service_keys_to_pairs_flat )
self._service_keys_to_children_to_parents = BuildServiceKeysToChildrenToParents( service_keys_to_simple_children_to_parents )
def ExpandPredicates( self, service_key, predicates ):
if self._controller.new_options.GetBoolean( 'apply_all_parents_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
results = []
with self._lock:
for predicate in predicates:
results.append( predicate )
if predicate.GetType() == HC.PREDICATE_TYPE_TAG:
tag = predicate.GetValue()
parents = self._service_keys_to_children_to_parents[ service_key ][ tag ]
for parent in parents:
parent_predicate = ClientSearch.Predicate( HC.PREDICATE_TYPE_PARENT, parent )
results.append( parent_predicate )
return results
def ExpandTags( self, service_key, tags ):
if self._controller.new_options.GetBoolean( 'apply_all_parents_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
tags_results = set( tags )
for tag in tags:
tags_results.update( self._service_keys_to_children_to_parents[ service_key ][ tag ] )
return tags_results
def GetParents( self, service_key, tag ):
if self._controller.new_options.GetBoolean( 'apply_all_parents_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
return self._service_keys_to_children_to_parents[ service_key ][ tag ]
def NotifyNewParents( self ):
with self._lock:
self._dirty = True
self._controller.CallLater( 1.0, self.RefreshParentsIfDirty )
def RefreshParentsIfDirty( self ):
with self._lock:
if self._dirty:
self._RefreshParents()
self._dirty = False
class TagSiblingsManager( object ):
def __init__( self, controller ):
self._controller = controller
self._dirty = False
self._service_keys_to_siblings = collections.defaultdict( dict )
self._service_keys_to_reverse_lookup = collections.defaultdict( dict )
self._RefreshSiblings()
self._lock = threading.Lock()
self._controller.sub( self, 'NotifyNewSiblings', 'notify_new_siblings_data' )
def _CollapseTags( self, service_key, tags ):
siblings = self._service_keys_to_siblings[ service_key ]
return { siblings[ tag ] if tag in siblings else tag for tag in tags }
def _RefreshSiblings( self ):
self._service_keys_to_siblings = collections.defaultdict( dict )
self._service_keys_to_reverse_lookup = collections.defaultdict( dict )
local_tags_pairs = set()
tag_repo_pairs = set()
service_keys_to_statuses_to_pairs = self._controller.Read( 'tag_siblings' )
for ( service_key, statuses_to_pairs ) in service_keys_to_statuses_to_pairs.items():
all_pairs = statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ].union( statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] )
if service_key == CC.LOCAL_TAG_SERVICE_KEY:
local_tags_pairs = set( all_pairs )
else:
tag_repo_pairs.update( all_pairs )
siblings = CollapseTagSiblingPairs( [ all_pairs ] )
self._service_keys_to_siblings[ service_key ] = siblings
reverse_lookup = collections.defaultdict( list )
for ( bad, good ) in siblings.items():
reverse_lookup[ good ].append( bad )
self._service_keys_to_reverse_lookup[ service_key ] = reverse_lookup
combined_siblings = CollapseTagSiblingPairs( [ local_tags_pairs, tag_repo_pairs ] )
self._service_keys_to_siblings[ CC.COMBINED_TAG_SERVICE_KEY ] = combined_siblings
combined_reverse_lookup = collections.defaultdict( list )
for ( bad, good ) in combined_siblings.items():
combined_reverse_lookup[ good ].append( bad )
self._service_keys_to_reverse_lookup[ CC.COMBINED_TAG_SERVICE_KEY ] = combined_reverse_lookup
self._controller.pub( 'new_siblings_gui' )
def CollapsePredicates( self, service_key, predicates ):
if self._controller.new_options.GetBoolean( 'apply_all_siblings_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
siblings = self._service_keys_to_siblings[ service_key ]
results = [ predicate for predicate in predicates if predicate.GetType() != HC.PREDICATE_TYPE_TAG ]
tag_predicates = [ predicate for predicate in predicates if predicate.GetType() == HC.PREDICATE_TYPE_TAG ]
tags_to_predicates = { predicate.GetValue() : predicate for predicate in predicates if predicate.GetType() == HC.PREDICATE_TYPE_TAG }
tags = tags_to_predicates.keys()
tags_to_include_in_results = set()
for tag in tags:
if tag in siblings:
old_tag = tag
old_predicate = tags_to_predicates[ old_tag ]
new_tag = siblings[ old_tag ]
if new_tag not in tags_to_predicates:
( old_pred_type, old_value, old_inclusive ) = old_predicate.GetInfo()
new_predicate = ClientSearch.Predicate( old_pred_type, new_tag, old_inclusive )
tags_to_predicates[ new_tag ] = new_predicate
tags_to_include_in_results.add( new_tag )
new_predicate = tags_to_predicates[ new_tag ]
new_predicate.AddCounts( old_predicate )
else:
tags_to_include_in_results.add( tag )
results.extend( [ tags_to_predicates[ tag ] for tag in tags_to_include_in_results ] )
return results
def CollapsePairs( self, service_key, pairs ):
if self._controller.new_options.GetBoolean( 'apply_all_siblings_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
siblings = self._service_keys_to_siblings[ service_key ]
result = set()
for ( a, b ) in pairs:
if a in siblings:
a = siblings[ a ]
if b in siblings:
b = siblings[ b ]
result.add( ( a, b ) )
return result
def CollapseStatusesToTags( self, service_key, statuses_to_tags ):
if self._controller.new_options.GetBoolean( 'apply_all_siblings_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
statuses = statuses_to_tags.keys()
new_statuses_to_tags = HydrusData.default_dict_set()
for status in statuses:
new_statuses_to_tags[ status ] = self._CollapseTags( service_key, statuses_to_tags[ status ] )
return new_statuses_to_tags
def CollapseTag( self, service_key, tag ):
if self._controller.new_options.GetBoolean( 'apply_all_siblings_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
siblings = self._service_keys_to_siblings[ service_key ]
if tag in siblings:
return siblings[ tag ]
else:
return tag
def CollapseTags( self, service_key, tags ):
if self._controller.new_options.GetBoolean( 'apply_all_siblings_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
return self._CollapseTags( service_key, tags )
def CollapseTagsToCount( self, service_key, tags_to_count ):
if self._controller.new_options.GetBoolean( 'apply_all_siblings_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
siblings = self._service_keys_to_siblings[ service_key ]
results = collections.Counter()
for ( tag, count ) in tags_to_count.items():
if tag in siblings:
tag = siblings[ tag ]
results[ tag ] += count
return results
def GetAutocompleteSiblings( self, service_key, search_text, exact_match = False ):
if self._controller.new_options.GetBoolean( 'apply_all_siblings_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
siblings = self._service_keys_to_siblings[ service_key ]
reverse_lookup = self._service_keys_to_reverse_lookup[ service_key ]
if exact_match:
key_based_matching_values = set()
if search_text in siblings:
key_based_matching_values = { siblings[ search_text ] }
else:
key_based_matching_values = set()
value_based_matching_values = { value for value in siblings.values() if value == search_text }
else:
matching_keys = ClientSearch.FilterTagsBySearchText( service_key, search_text, siblings.keys(), search_siblings = False )
key_based_matching_values = { siblings[ key ] for key in matching_keys }
value_based_matching_values = ClientSearch.FilterTagsBySearchText( service_key, search_text, siblings.values(), search_siblings = False )
matching_values = key_based_matching_values.union( value_based_matching_values )
# all the matching values have a matching sibling somewhere in their network
# so now fetch the networks
lists_of_matching_keys = [ reverse_lookup[ value ] for value in matching_values ]
matching_keys = itertools.chain.from_iterable( lists_of_matching_keys )
matches = matching_values.union( matching_keys )
return matches
def GetSibling( self, service_key, tag ):
if self._controller.new_options.GetBoolean( 'apply_all_siblings_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
siblings = self._service_keys_to_siblings[ service_key ]
if tag in siblings:
return siblings[ tag ]
else:
return None
def GetAllSiblings( self, service_key, tag ):
if self._controller.new_options.GetBoolean( 'apply_all_siblings_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
siblings = self._service_keys_to_siblings[ service_key ]
reverse_lookup = self._service_keys_to_reverse_lookup[ service_key ]
if tag in siblings:
best_tag = siblings[ tag ]
elif tag in reverse_lookup:
best_tag = tag
else:
return [ tag ]
all_siblings = list( reverse_lookup[ best_tag ] )
all_siblings.append( best_tag )
return all_siblings
def NotifyNewSiblings( self ):
with self._lock:
self._dirty = True
self._controller.CallLater( 1.0, self.RefreshSiblingsIfDirty )
def RefreshSiblingsIfDirty( self ):
with self._lock:
if self._dirty:
self._RefreshSiblings()
self._dirty = False
class UndoManager( object ):
def __init__( self, controller ):
self._controller = controller
self._commands = []
self._inverted_commands = []
self._current_index = 0
self._lock = threading.Lock()
self._controller.sub( self, 'Undo', 'undo' )
self._controller.sub( self, 'Redo', 'redo' )
def _FilterServiceKeysToContentUpdates( self, service_keys_to_content_updates ):
filtered_service_keys_to_content_updates = {}
for ( service_key, content_updates ) in service_keys_to_content_updates.items():
filtered_content_updates = []
for content_update in content_updates:
( data_type, action, row ) = content_update.ToTuple()
if data_type == HC.CONTENT_TYPE_FILES:
if action in ( HC.CONTENT_UPDATE_ADD, HC.CONTENT_UPDATE_DELETE, HC.CONTENT_UPDATE_UNDELETE, HC.CONTENT_UPDATE_RESCIND_PETITION, HC.CONTENT_UPDATE_ADVANCED ):
continue
elif data_type == HC.CONTENT_TYPE_MAPPINGS:
if action in ( HC.CONTENT_UPDATE_RESCIND_PETITION, HC.CONTENT_UPDATE_ADVANCED ):
continue
else:
continue
filtered_content_update = HydrusData.ContentUpdate( data_type, action, row )
filtered_content_updates.append( filtered_content_update )
if len( filtered_content_updates ) > 0:
filtered_service_keys_to_content_updates[ service_key ] = filtered_content_updates
return filtered_service_keys_to_content_updates
def _InvertServiceKeysToContentUpdates( self, service_keys_to_content_updates ):
inverted_service_keys_to_content_updates = {}
for ( service_key, content_updates ) in service_keys_to_content_updates.items():
inverted_content_updates = []
for content_update in content_updates:
( data_type, action, row ) = content_update.ToTuple()
inverted_row = row
if data_type == HC.CONTENT_TYPE_FILES:
if action == HC.CONTENT_UPDATE_ARCHIVE: inverted_action = HC.CONTENT_UPDATE_INBOX
elif action == HC.CONTENT_UPDATE_INBOX: inverted_action = HC.CONTENT_UPDATE_ARCHIVE
elif action == HC.CONTENT_UPDATE_PEND: inverted_action = HC.CONTENT_UPDATE_RESCIND_PEND
elif action == HC.CONTENT_UPDATE_RESCIND_PEND: inverted_action = HC.CONTENT_UPDATE_PEND
elif action == HC.CONTENT_UPDATE_PETITION:
inverted_action = HC.CONTENT_UPDATE_RESCIND_PETITION
( hashes, reason ) = row
inverted_row = hashes
elif data_type == HC.CONTENT_TYPE_MAPPINGS:
if action == HC.CONTENT_UPDATE_ADD: inverted_action = HC.CONTENT_UPDATE_DELETE
elif action == HC.CONTENT_UPDATE_DELETE: inverted_action = HC.CONTENT_UPDATE_ADD
elif action == HC.CONTENT_UPDATE_PEND: inverted_action = HC.CONTENT_UPDATE_RESCIND_PEND
elif action == HC.CONTENT_UPDATE_RESCIND_PEND: inverted_action = HC.CONTENT_UPDATE_PEND
elif action == HC.CONTENT_UPDATE_PETITION:
inverted_action = HC.CONTENT_UPDATE_RESCIND_PETITION
( tag, hashes, reason ) = row
inverted_row = ( tag, hashes )
inverted_content_update = HydrusData.ContentUpdate( data_type, inverted_action, inverted_row )
inverted_content_updates.append( inverted_content_update )
inverted_service_keys_to_content_updates[ service_key ] = inverted_content_updates
return inverted_service_keys_to_content_updates
def AddCommand( self, action, *args, **kwargs ):
with self._lock:
inverted_action = action
inverted_args = args
inverted_kwargs = kwargs
if action == 'content_updates':
( service_keys_to_content_updates, ) = args
service_keys_to_content_updates = self._FilterServiceKeysToContentUpdates( service_keys_to_content_updates )
if len( service_keys_to_content_updates ) == 0: return
inverted_service_keys_to_content_updates = self._InvertServiceKeysToContentUpdates( service_keys_to_content_updates )
if len( inverted_service_keys_to_content_updates ) == 0: return
inverted_args = ( inverted_service_keys_to_content_updates, )
else: return
self._commands = self._commands[ : self._current_index ]
self._inverted_commands = self._inverted_commands[ : self._current_index ]
self._commands.append( ( action, args, kwargs ) )
self._inverted_commands.append( ( inverted_action, inverted_args, inverted_kwargs ) )
self._current_index += 1
self._controller.pub( 'notify_new_undo' )
def GetUndoRedoStrings( self ):
with self._lock:
( undo_string, redo_string ) = ( None, None )
if self._current_index > 0:
undo_index = self._current_index - 1
( action, args, kwargs ) = self._commands[ undo_index ]
if action == 'content_updates':
( service_keys_to_content_updates, ) = args
undo_string = 'undo ' + ClientData.ConvertServiceKeysToContentUpdatesToPrettyString( service_keys_to_content_updates )
if len( self._commands ) > 0 and self._current_index < len( self._commands ):
redo_index = self._current_index
( action, args, kwargs ) = self._commands[ redo_index ]
if action == 'content_updates':
( service_keys_to_content_updates, ) = args
redo_string = 'redo ' + ClientData.ConvertServiceKeysToContentUpdatesToPrettyString( service_keys_to_content_updates )
return ( undo_string, redo_string )
def Undo( self ):
action = None
with self._lock:
if self._current_index > 0:
self._current_index -= 1
( action, args, kwargs ) = self._inverted_commands[ self._current_index ]
if action is not None:
self._controller.WriteSynchronous( action, *args, **kwargs )
self._controller.pub( 'notify_new_undo' )
def Redo( self ):
action = None
with self._lock:
if len( self._commands ) > 0 and self._current_index < len( self._commands ):
( action, args, kwargs ) = self._commands[ self._current_index ]
self._current_index += 1
if action is not None:
self._controller.WriteSynchronous( action, *args, **kwargs )
self._controller.pub( 'notify_new_undo' )
| [
"[email protected]"
]
| |
60860eacc8024b7eec8832f1bace9276b752943b | 9af43f9f52ab8726caacdd594980d5e0bf462c40 | /flask_transmute/decorators.py | 29e4c8463f456ffad3e1540e4880e4cebb3c4467 | []
| no_license | elindell/flask-transmute | 3b28509fee071e606be0021bfdc63bff85b51a38 | bd3c103c5eca9a5e4071f71be4a12460acddfd26 | refs/heads/master | 2021-01-22T09:16:45.945064 | 2016-04-04T08:49:08 | 2016-04-04T08:49:08 | 67,669,319 | 0 | 0 | null | 2016-09-08T04:48:59 | 2016-09-08T04:48:59 | null | UTF-8 | Python | false | false | 769 | py | def updates(f):
"""
this labels a function as one that updates data.
"""
f.updates = True
return f
def creates(f):
"""
this labels a function as one that creates data.
"""
f.creates = True
return f
def deletes(f):
"""
this labels a function as one that deletes data.
"""
f.deletes = True
return f
def annotate(annotations):
"""
in python2, native annotions on parameters do not exist:
def foo(a : str, b: int) -> bool:
...
this provides a way to provide attribute annotations:
@annotate({"a": str, "b": int, "return": bool})
def foo(a, b):
...
"""
def decorate(func):
func.__annotations__ = annotations
return func
return decorate
| [
"[email protected]"
]
| |
94cb36fc55af1eb504fcbf88f2c20c31038bd4dc | 917b85156ddfb653592b3b0994e7e7e9802a9eed | /ejerXML.py | c8789ca346bf35fd1f02bff24c1534fdec3609d4 | []
| no_license | antoniogomezvarela/XML | 3d2f2e8e1949b4a7f335a0b7c6ea229544d816a4 | c6dfeed3d782c4a28e56c7992414accf9fdcc660 | refs/heads/master | 2021-01-22T03:25:47.441160 | 2015-03-06T07:28:34 | 2015-03-06T07:28:34 | 31,011,138 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,135 | py | # -*- coding: utf-8 -*-
from lxml import etree
from datetime import date
tree = etree.parse('becas_premios.xml')
documento = tree.getroot()
#MENU
print "1- Buscar beca o premio por teclado"
print "2- Mostrar becas y enlaces"
print "3- Buscar las becas y premios que su fecha de publicación este entre febrero y abril"
print "4- Contar cuantas becas y premios se han dado."
print "5- Mostrar las id de las becas y añadir cuantos dias ha estado abierta"
opcion= raw_input("Elige una opción: ")
#Ejercicio 1
if opcion == '1':
encontrado = False
identificacion = raw_input("Introduce una id: ")
for i in documento:
if i[0].text==identificacion:
encontrado = True
print "ID: ",i[0].text
print "Titulo: ",i[1].text
print "Fecha: ",i[2].text
print "Descripción: ",i[3].text
print "Estado: ",i[5].text
if encontrado == False:
print "Esa ID no existe"
elif opcion == '2':
for i in documento:
print "ID: ",i[0].text,", Enlace: ",i[4].text
elif opcion == '3':
for i in documento:
fecha1=i[2].text
fecha2=fecha1.split("-")
if fecha2[1] >= "02" and fecha2[1] <= "04":
print "ID: ",i[0].text,", Fecha: ",i[2].text
elif opcion == '4':
becas = 0
premios = 0
for i in documento:
titulo = i[1].text
titulo = titulo.split(" ")
if titulo[0] == "Becas":
becas += 1
elif titulo[0] == "Premios":
premios += 1
print "Número de becas concedidas: ",becas
print "Número de premios concedidos: ",premios
elif opcion == '5':
date_format = "%Y/%m/%d"
for i in documento:
incial = i.findall("plazopresentacion/plazopresentacion_item/incial")
final = i.findall("plazopresentacion/plazopresentacion_item/final")
inicial= str(incial[0].text)
final= str(final[0].text)
if inicial != "None" or final != "None":
inicial = inicial.split("T")
final = final.split("T")
inicial = inicial[0].split("-")
final = final[0].split("-")
d0 = date(int(inicial[0]),int(inicial[1]),int(inicial[2]))
d1 = date(int(final[0]),int(final[1]),int(final[2]))
dias = d1-d0
print "la beca ",i[0].text," estuvo abierta ",dias.days," dias"
else:
print "Elige una opción correcta" | [
"root@debian"
]
| root@debian |
a29090ef119e51b024e2fc4af969d65ecaef476a | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_155/1805.py | f216188bcb5e778686fc1da1297901988727a426 | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 566 | py |
def get_min_members(smax, audience):
standing = 0
friends = 0
i = 1
standing += audience[0]
while i <= smax:
if standing < i:
new_friends = i - standing
standing += new_friends
friends += new_friends
standing += audience[i]
i += 1
return friends
# cases = [(4, "11111"), (1, "09"), (5, "110011"), (0, "1")]
t = input()
for i in range(t):
smax, audience = raw_input().split()
result = get_min_members(int(smax), map(int, audience))
print "Case #%d: %d" % (i+1, result)
| [
"[email protected]"
]
| |
e0e32be403a6963887949ef4f1269a652f11e196 | 89e6c3548fbdd06178aae712de1ff19004bc2faa | /my_django/contrib/localflavor/sk/forms.py | f5428d879572000d4ed3f57df9882da6f007f378 | []
| no_license | bhgv/ublog_git.hg.repo-django.python-engine | a3f3cdcbacc95ec98f022f9719d3b300dd6541d4 | 74cdae100bff5e8ab8fb9c3e8ba95623333c2d43 | refs/heads/master | 2020-03-23T01:04:07.431749 | 2018-07-25T12:59:21 | 2018-07-25T12:59:21 | 140,899,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,560 | py | """
Slovak-specific form helpers
"""
from __future__ import absolute_import
from my_django.contrib.localflavor.sk.sk_districts import DISTRICT_CHOICES
from my_django.contrib.localflavor.sk.sk_regions import REGION_CHOICES
from my_django.forms.fields import Select, RegexField
from my_django.utils.translation import ugettext_lazy as _
class SKRegionSelect(Select):
"""
A select widget widget with list of Slovak regions as choices.
"""
def __init__(self, attrs=None):
super(SKRegionSelect, self).__init__(attrs, choices=REGION_CHOICES)
class SKDistrictSelect(Select):
"""
A select widget with list of Slovak districts as choices.
"""
def __init__(self, attrs=None):
super(SKDistrictSelect, self).__init__(attrs, choices=DISTRICT_CHOICES)
class SKPostalCodeField(RegexField):
"""
A form field that validates its input as Slovak postal code.
Valid form is XXXXX or XXX XX, where X represents integer.
"""
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XXXXX or XXX XX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(SKPostalCodeField, self).__init__(r'^\d{5}$|^\d{3} \d{2}$',
max_length, min_length, *args, **kwargs)
def clean(self, value):
"""
Validates the input and returns a string that contains only numbers.
Returns an empty string for empty values.
"""
v = super(SKPostalCodeField, self).clean(value)
return v.replace(' ', '')
| [
"[email protected]"
]
| |
e784cfeb07b1b4b44de67e5f78c4e17cfbf1338b | 1d717c797e93b451f7da7c810a0fb4075b1050d5 | /src/data/dataset/basic_dataset.py | bc875ea6516703ea40caa5028c2b7984ad5dd2fa | []
| no_license | jessie0624/nlp-task | 32338b08051a3ea192db2bf74c9c969bdff1f6ad | aaeeed86341356d9fd061664f6f7bccf2ac353d0 | refs/heads/master | 2023-01-24T12:06:13.323646 | 2020-12-10T08:38:23 | 2020-12-10T08:38:23 | 292,151,135 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,072 | py |
import numpy as np
from src.base import BaseDataset
class BasicDataset(BaseDataset):
def __init__(self, x: list, y: list, callbacks=None):
super().__init__(callbacks=callbacks)
self.x = x
self.y = y
self.sample() # 先获取候选的索引池(index pool)
def get_index_pool(self):
'''
index_pool用来保存每一次索引返回的list
:return:
'''
# 默认为x的长度,这里要保证是二维的,便于统一,即[[0], [1], [2],...]
index_pool = np.expand_dims(range(len(self.x)), axis=1).tolist()
return index_pool
def sort(self):
'''
按照x中数据的长度进行排序
'''
old_index_pool = self._index_pool
lengths = [len(item) for item in self.x]
sort_index = np.argsort(lengths)
self._index_pool = [old_index_pool[index] for index in sort_index]
def __getitem__(self, item: int):
x, y = self.x[item], self.y[item]
self._handle_callback_on_batch(x, y)
return x, y | [
"[email protected]"
]
| |
971dd6b3cb304f9c7d87eacd5e07e92e1786bc2e | f8d181f293ce950f1a70bef1d023139d9e70a2c7 | /tests/contrib/operators/test_gcp_vision_operator_system.py | 2b75642d6f3a3c93aab282d82e823a4a09d01087 | [
"Apache-2.0",
"BSD-3-Clause",
"Python-2.0",
"MIT",
"BSD-2-Clause"
]
| permissive | Piboonsak/airflow | d242f79561d893111ad73b9e3481b9180adecfd4 | dce92a54190155898c75c0f3392d42fb28f1884a | refs/heads/master | 2020-04-29T15:16:06.779329 | 2019-03-18T05:16:14 | 2019-03-18T05:16:14 | 176,222,528 | 1 | 0 | Apache-2.0 | 2019-03-18T06:57:38 | 2019-03-18T06:57:38 | null | UTF-8 | Python | false | false | 1,397 | py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from tests.contrib.utils.base_gcp_system_test_case import DagGcpSystemTestCase, SKIP_TEST_WARNING
from tests.contrib.utils.gcp_authenticator import GCP_AI_KEY
@unittest.skipIf(DagGcpSystemTestCase.skip_check(GCP_AI_KEY), SKIP_TEST_WARNING)
class CloudVisionExampleDagsSystemTest(DagGcpSystemTestCase):
def __init__(self, method_name='runTest'):
super(CloudVisionExampleDagsSystemTest, self).__init__(
method_name, dag_id='example_gcp_vision', gcp_key=GCP_AI_KEY
)
def test_run_example_dag_function(self):
self._run_dag()
| [
"[email protected]"
]
| |
0b5713449027037d0ab2ad412af79684d0153c48 | 1a166165ab8287d01cbb377a13efdb5eff5dfef0 | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_06_01/aio/operations/_vpn_site_links_operations.py | 5637893f1b1e6e52468412705bc4b471675b3407 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | manoj0806/azure-sdk-for-python | 7a14b202ff80f528abd068bf50334e91001a9686 | aab999792db1132232b2f297c76800590a901142 | refs/heads/master | 2023-04-19T16:11:31.984930 | 2021-04-29T23:19:49 | 2021-04-29T23:19:49 | 363,025,016 | 1 | 0 | MIT | 2021-04-30T04:23:35 | 2021-04-30T04:23:35 | null | UTF-8 | Python | false | false | 8,712 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VpnSiteLinksOperations:
"""VpnSiteLinksOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
vpn_site_name: str,
vpn_site_link_name: str,
**kwargs
) -> "_models.VpnSiteLink":
"""Retrieves the details of a VPN site link.
:param resource_group_name: The resource group name of the VpnSite.
:type resource_group_name: str
:param vpn_site_name: The name of the VpnSite.
:type vpn_site_name: str
:param vpn_site_link_name: The name of the VpnSiteLink being retrieved.
:type vpn_site_link_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VpnSiteLink, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_06_01.models.VpnSiteLink
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnSiteLink"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vpnSiteName': self._serialize.url("vpn_site_name", vpn_site_name, 'str'),
'vpnSiteLinkName': self._serialize.url("vpn_site_link_name", vpn_site_link_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnSiteLink', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}/vpnSiteLinks/{vpnSiteLinkName}'} # type: ignore
def list_by_vpn_site(
self,
resource_group_name: str,
vpn_site_name: str,
**kwargs
) -> AsyncIterable["_models.ListVpnSiteLinksResult"]:
"""Lists all the vpnSiteLinks in a resource group for a vpn site.
:param resource_group_name: The resource group name of the VpnSite.
:type resource_group_name: str
:param vpn_site_name: The name of the VpnSite.
:type vpn_site_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVpnSiteLinksResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_06_01.models.ListVpnSiteLinksResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVpnSiteLinksResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_vpn_site.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vpnSiteName': self._serialize.url("vpn_site_name", vpn_site_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVpnSiteLinksResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_vpn_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}/vpnSiteLinks'} # type: ignore
| [
"[email protected]"
]
| |
2e21fbc4566ec48ec6d3e36c44da1af16c81e5ea | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/abc101/A/4927211.py | 4e4246f50058560e91aa73a6173b4e550e2b0b90 | []
| no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 51 | py | s = list(input())
print(s.count('+')-s.count('-')) | [
"[email protected]"
]
| |
cfb58a7a49bde127229470f43e7c101d5f9d7168 | ba1ddbc6b364dc2fd55f83ea807b50bf45ce3d1a | /PageObject/VivaVideo/home.py | 23b61b58c20490654f07d632cf8e5bfc9c4414a4 | []
| no_license | zlmone/ATX-UI | 81c58fa722586fe6fb20cd39e3a85afa6057db93 | 44bfa67ed2274c2eeb36f905d5bd482fd96a6707 | refs/heads/master | 2022-05-28T09:03:40.380824 | 2020-05-06T11:39:39 | 2020-05-06T11:39:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,770 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from Public.Decorator import *
from Public.Test_data import *
log = Log()
class home_Page(BasePage):
'''创作页首页'''
# @teststep
# def wait_page(self):
# try:
# if self.d(resourceId="com.quvideo.xiaoying:id/iv_vip_home8_cut").wait(timeout=10):
# pass
# else:
# raise Exception('Not in Creation_Page')
# except Exception:
# raise Exception('Not in Creation_Page')
# @teststep
# def close_float_imag(self):
# if self.d(resourceId="com.quvideo.xiaoying:id/float_imageview").wait(timeout=5):
# log.i('关闭创作页浮窗图片')
# self.d(resourceId="com.quvideo.xiaoying:id/float_imageview").child(className="android.widget.ImageView",
# instance=1).click_exists(timeout=3)
# else:
# log.i('没有创作页浮窗图片,跳过')
# pass
@teststep
def close_popup(self):
log.i('关闭首页家庭政策弹窗')
try:
self.d(resourceId="com.quvideo.xiaoying:id/iv_close").click(3)
except:
log.i('弹窗未弹出或者已消除')
pass
@teststep
def close_ad_popup(self,timeout = 3):
log.i('关闭广告弹窗 ')
self.d(resourceId="com.quvideo.xiaoying:id/tt_insert_dislike_icon_img").click_exists(timeout=timeout)
@teststep
def click_template_btn(self):
log.i('点击底部拍同款按钮')
self.d(resourceId="com.quvideo.xiaoying:id/tv_home_tab", text="拍同款").click()
@teststep
def click_home_btn(self):
log.i('点击底部剪辑按钮')
self.d(resourceId="com.quvideo.xiaoying:id/tv_home_tab", text="剪辑").click()
@teststep
def click_me_btn(self):
log.i('点击底部我按钮')
self.d(resourceId="com.quvideo.xiaoying:id/tv_home_tab", text="我").click()
@teststep
def click_vip_btn(self):
log.i('点击VIP按钮')
self.d(resourceId="com.quvideo.xiaoying:id/iv_vip_home8_cut").click()
@teststep
def click_edit_btn(self):
log.i('点击视频剪辑')
self.d(resourceId="com.quvideo.xiaoying:id/iv_edit_home8_cut").click()
try:
self.d(resourceId="com.quvideo.xiaoying:id/imgbtn_help_exit").implicitly_wait(3).click()
except:
log.i("立刻升级页面已消除")
pass
@teststep
def click_mv_btn(self):
log.i('点击相册MV')
self.d(resourceId="com.quvideo.xiaoying:id/iv_mv_home8_cut").click()
@teststep
def click_draft_btn(self):
log.i('点击草稿')
self.d(resourceId="com.quvideo.xiaoying:id/tv_draft_icon_home8_cut",text= '草稿').click()
@teststep
def click_home_more(self):
log.i('点击素材中心查看更多按钮')
self.d(text="查看更多").click()
@teststep
def click_camera_btn(self):
log.i('点击拍摄按钮')
self.watch_device('取消|允许|始终允许')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight4_home8_cut").click()
time.sleep(5) # 等待相机加载完成
self.d.click(0.5, 0.5) # 点击对焦,取消弹出的滤镜
@teststep
def click_sec_addText(self):
log.i('点击次要功能位加字幕')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight0_home8_cut").click()
@teststep
def click_sec_Mixer(self):
log.i('点击次要功能位画中画')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight1_home8_cut").click()
@teststep
def click_sec_Mosaic(self):
log.i('点击次要功能位马赛克')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight2_home8_cut").click()\
@teststep
def click_sec_FAQ(self):
log.i('点击次要功能位新手教程')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight3_home8_cut").click()
@teststep
def click_sec_Capture(self):
log.i('点击次要功能位拍摄')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight4_home8_cut").click()
@teststep
def click_sec_musicExtraction(self):
log.i('点击次要功能位音频提取')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight5_home8_cut").click()
# @teststep
# def click_view_pager_btn(self, text):
# '''
# 次要功能位置,各个按钮的点击操作
# :param text: 次要功能位置的text名称
# :return:
# '''
# log.i('查找次要功能位 %s 并进行点击操作'% text)
# if self.d(text=text).wait(timeout=1):
# self.d(text=text).click()
# return True
# else:
# try:
# self.d(resourceId="com.quvideo.xiaoying:id/view_pager", scrollable=True).scroll.horiz.to(text=text)
# self.d(text=text).click()
# return True
# except UiObjectNotFoundError:
# log.i("找不到控件-->%s" % text)
# return False
# @teststep
# def select_studio_view(self, inst=1):
# '''
# 点击我的工作室的view 默认第一个
# :param inst: 0为第一个view 以此类推 1、2、3--> 一二三
# '''
# log.i('点击我的工作室第%s个草稿' % inst)
# self.d(resourceId="com.quvideo.xiaoying:id/layout_draft_item").child(className='android.widget.ImageView')[inst-1].click()
if __name__ == '__main__':
from Public.Log import Log
Log().set_logger('udid', './log.log')
BasePage().set_driver(None)
home_Page().close_ad_popup()
| [
"[email protected]"
]
| |
fe1cc4e8b6b8201c08c79ccc09f50d705606c468 | 69e7dca194ab7b190e1a72928e28aa3821b47cfb | /Concepts/Strings/49.py | 579955f18e9b68d977d8b50ba8f8ff8b211b3947 | []
| no_license | Dinesh94Singh/PythonArchivedSolutions | a392891b431d47de0d5f606f7342a11b3127df4d | 80cca595dc688ca67c1ebb45b339e724ec09c374 | refs/heads/master | 2023-06-14T14:56:44.470466 | 2021-07-11T06:07:38 | 2021-07-11T06:07:38 | 384,871,541 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 611 | py | """
49. Group Anagrams
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
All inputs will be in lowercase.
The order of your output does not matter.
"""
import collections
def group_anagrams(strs):
dic = collections.defaultdict(list)
ans = []
for each_word in strs:
dic[tuple(sorted(each_word))].append(each_word)
for key, values in dic.items():
ans.append(values)
return ans
group_anagrams(["eat", "tea", "tan", "ate", "nat", "bat"])
| [
"[email protected]"
]
| |
31475d7e6cd976e2ad2ea6c3ecd3f56b4ae48fbc | 326a026bcc6bad962159677110d78d3d836532ed | /markote/api/notebook.py | e05023873ff40b79701ec2540061e8c2d53ca0e2 | [
"MIT"
]
| permissive | Frederick-S/markote | f63a5007fd0a70ce4b3ae9d03425ae9f9c8b54f3 | 095dabe3da83b5d8809593758661eb78fa527f49 | refs/heads/master | 2023-03-04T16:50:30.541147 | 2022-08-12T01:24:43 | 2022-08-12T01:24:43 | 110,396,888 | 9 | 2 | MIT | 2023-03-04T13:11:38 | 2017-11-12T02:04:32 | Vue | UTF-8 | Python | false | false | 870 | py | from flask import jsonify, request
from markote.api.api_blueprint import api_blueprint
from markote.oauth import oauth
@api_blueprint.route('/notebooks', methods=['GET'])
def get_notebooks():
oauth_client = oauth.microsoft_graph
response = oauth_client.get(
'me/onenote/notebooks?$select=id,displayName')
return jsonify(response.json()), response.status_code
@api_blueprint.route('/notebooks/<notebook_id>/sections', methods=['GET'])
def get_sections(notebook_id):
name = request.args.get('name')
query_filter = '$filter=displayName eq \'{0}\''.format(name) \
if name else ''
oauth_client = oauth.microsoft_graph
response = oauth_client.get(
'me/onenote/notebooks/{0}/sections?$select=id,displayName&{1}'.format(
notebook_id, query_filter))
return jsonify(response.json()), response.status_code
| [
"[email protected]"
]
| |
4b9a62611c764cd8d705fcf54fd46f2a5624deae | d9e26e516ab3863b6e7d00c4e3cdecf1af7028eb | /src/oaklib/io/rollup_report_writer.py | e4644c058309aeb0aeae82b0c4cc2fa52f2b5e04 | [
"Apache-2.0"
]
| permissive | INCATools/ontology-access-kit | 2f08a64b7308e8307d1aaac2a81764e7d98b5928 | 8d2a124f7af66fe2e796f9e0ece55585438796a5 | refs/heads/main | 2023-08-30T14:28:57.201198 | 2023-08-29T17:40:19 | 2023-08-29T17:40:19 | 475,072,415 | 67 | 15 | Apache-2.0 | 2023-09-07T01:06:04 | 2022-03-28T15:50:45 | Jupyter Notebook | UTF-8 | Python | false | false | 3,444 | py | from typing import Dict, List, TextIO
from airium import Airium
from linkml_runtime.dumpers import json_dumper, yaml_dumper
def format_object(curie, label):
if label:
return f"{label} [{curie}]"
else:
return curie
def add_association_group(doc: Airium, associations: List[Dict], subject: str, header_label: str):
associations_for_subject = [a for a in associations if a.get("subject") == subject]
if associations_for_subject:
with doc.div(klass="association-group"):
doc.div(_t=header_label, klass="association-group-header")
with doc.ul(klass="association-group-list"):
for association in associations_for_subject:
label = format_object(
association.get("object"), association.get("object_label")
)
doc.li(_t=label)
def generate_html(subjects: List[str], groups: List[Dict]) -> str:
doc = Airium()
doc("<!DOCTYPE html>")
with doc.html(lang="en"):
with doc.head():
doc.meta(charset="utf-8")
doc.title(_t="Rollup Table")
doc.style(
_t="""
.rollup-table {
border-collapse: collapse;
width: 100%;
}
.rollup-table tr {
vertical-align: top;
}
.rollup-table td {
padding: 0.25rem;
border-top: 1px solid black;
}
.primary-group-label {
font-weight: bold;
}
.association-group {
margin-bottom: 1rem;
}
.association-group-header {
font-style: italic;
}
.association-group-list {
margin: 0;
}
"""
)
with doc.body():
with doc.table(klass="rollup-table"):
with doc.tr():
doc.td(_t="Subject", klass="primary-group-label")
for subject in subjects:
doc.td(_t=subject)
for group in groups:
with doc.tr():
label = format_object(
group.get("group_object"), group.get("group_object_label")
)
doc.td(_t=label, klass="primary-group-label")
for subject in subjects:
with doc.td():
for sub_group in group.get("sub_groups", []):
add_association_group(
doc,
sub_group.get("associations", []),
subject,
format_object(
sub_group.get("group_object"),
sub_group.get("group_object_label"),
),
)
add_association_group(
doc, group.get("associations", []), subject, "Other"
)
return str(doc)
def write_report(subjects: List[str], groups: List[Dict], output: TextIO, format: str):
if format == "json":
output.write(json_dumper.dumps(groups, inject_type=False))
elif format == "yaml":
output.write(yaml_dumper.dumps(groups))
elif format == "html":
output.write(generate_html(subjects, groups))
else:
raise ValueError(f"Unsupported format: {format}")
| [
"[email protected]"
]
| |
b46a3f8bb2a7aa7189a03e9bb03385aa2adc1203 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_135/3207.py | 49316f0601d1e454902936007d3f7d43574994a8 | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | with open("a.in", 'r') as f:
T = int(f.readline())
for t in range(1, T+1):
S = set(range(1, 17))
for i in range(2):
n = int(f.readline())
for j in range(1, 5):
line = f.readline()
if n == j:
S = S & set(map(int, line.split()))
if len(S) == 0:
print("Case #%d: Volunteer cheated!" % t)
elif len(S) > 1:
print("Case #%d: Bad magician!" % t)
else:
print("Case #%d: %d" % (t, list(S)[0]))
| [
"[email protected]"
]
| |
b79a9b710f88b92e919b4b75f4e4d0094a5287ed | c7b31209cc7b5a015ca34d1174e7978730ce6733 | /rpplugins/env_probes/environment_capture_stage.py | 9bbe136a0b8d893af6e96f81148c9d987fbae7be | [
"MIT"
]
| permissive | gitter-badger/RenderPipeline | c244343def6dd33e55e78cd828f0c703b338ce1a | 4d4bf4164c8dcb188f93e46749ba52de8f61b37f | refs/heads/master | 2021-01-22T00:52:25.396315 | 2016-04-16T13:13:57 | 2016-04-16T13:15:27 | 56,395,593 | 0 | 0 | null | 2016-04-16T17:04:37 | 2016-04-16T17:04:37 | null | UTF-8 | Python | false | false | 7,877 | py | """
RenderPipeline
Copyright (c) 2014-2016 tobspr <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import division
from rplibs.six.moves import range
from rplibs.six import itervalues
from panda3d.core import Camera, PerspectiveLens, Vec4, Vec3, PTAInt, GraphicsOutput
from rpcore.globals import Globals
from rpcore.image import Image
from rpcore.render_stage import RenderStage
class EnvironmentCaptureStage(RenderStage):
""" This stage renders the scene to a cubemap """
required_inputs = ["DefaultEnvmap", "AllLightsData", "maxLightIndex"]
required_pipes = []
def __init__(self, pipeline):
RenderStage.__init__(self, pipeline)
self.resolution = 128
self.diffuse_resolution = 4
self.regions = []
self.cameras = []
self.rig_node = Globals.render.attach_new_node("EnvmapCamRig")
self.pta_index = PTAInt.empty_array(1)
self.storage_tex = None
self.storage_tex_diffuse = None
def create(self):
self.target = self.create_target("CaptureScene")
self.target.size = self.resolution * 6, self.resolution
self.target.add_depth_attachment(bits=16)
self.target.add_color_attachment(bits=16, alpha=True)
self.target.prepare_render(None)
# Remove all unused display regions
internal_buffer = self.target.internal_buffer
internal_buffer.remove_all_display_regions()
internal_buffer.disable_clears()
internal_buffer.get_overlay_display_region().disable_clears()
self._setup_camera_rig()
self._create_store_targets()
self._create_filter_targets()
def _setup_camera_rig(self):
""" Setups the cameras to render a cubemap """
directions = (Vec3(1, 0, 0), Vec3(-1, 0, 0), Vec3(0, 1, 0),
Vec3(0, -1, 0), Vec3(0, 0, 1), Vec3(0, 0, -1))
# Prepare the display regions
for i in range(6):
region = self.target.internal_buffer.make_display_region(
i / 6, i / 6 + 1 / 6, 0, 1)
region.set_sort(25 + i)
region.set_active(True)
region.disable_clears()
# Set the correct clears
region.set_clear_depth_active(True)
region.set_clear_depth(1.0)
region.set_clear_color_active(True)
region.set_clear_color(Vec4(0))
lens = PerspectiveLens()
lens.set_fov(90)
lens.set_near_far(0.001, 1.0)
camera = Camera("EnvmapCam-" + str(i), lens)
camera_np = self.rig_node.attach_new_node(camera)
camera_np.look_at(camera_np, directions[i])
region.set_camera(camera_np)
self.regions.append(region)
self.cameras.append(camera_np)
self.cameras[0].set_r(90)
self.cameras[1].set_r(-90)
self.cameras[3].set_r(180)
self.cameras[5].set_r(180)
# Register cameras
for camera_np in self.cameras:
self._pipeline.tag_mgr.register_envmap_camera(camera_np.node())
def _create_store_targets(self):
""" Creates the targets which copy the result texture into the actual storage """
self.target_store = self.create_target("StoreCubemap")
self.target_store.size = self.resolution * 6, self.resolution
self.target_store.prepare_buffer()
self.target_store.set_shader_input("SourceTex", self.target.color_tex)
self.target_store.set_shader_input("DestTex", self.storage_tex)
self.target_store.set_shader_input("currentIndex", self.pta_index)
self.temporary_diffuse_map = Image.create_cube("DiffuseTemp", self.resolution, "RGBA16")
self.target_store_diff = self.create_target("StoreCubemapDiffuse")
self.target_store_diff.size = self.resolution * 6, self.resolution
self.target_store_diff.prepare_buffer()
self.target_store_diff.set_shader_input("SourceTex", self.target.color_tex)
self.target_store_diff.set_shader_input("DestTex", self.temporary_diffuse_map)
self.target_store_diff.set_shader_input("currentIndex", self.pta_index)
def _create_filter_targets(self):
""" Generates the targets which filter the specular cubemap """
self.filter_targets = []
mip = 0
size = self.resolution
while size > 1:
size = size // 2
mip += 1
target = self.create_target("FilterCubemap:{0}-{1}x{1}".format(mip, size))
target.size = size * 6, size
target.prepare_buffer()
target.set_shader_input("currentIndex", self.pta_index)
target.set_shader_input("currentMip", mip)
target.set_shader_input("SourceTex", self.storage_tex)
target.set_shader_input("DestTex", self.storage_tex, False, True, -1, mip, 0)
self.filter_targets.append(target)
# Target to filter the diffuse cubemap
self.filter_diffuse_target = self.create_target("FilterCubemapDiffuse")
self.filter_diffuse_target.size = self.diffuse_resolution * 6, self.diffuse_resolution
self.filter_diffuse_target.prepare_buffer()
self.filter_diffuse_target.set_shader_input("SourceTex", self.temporary_diffuse_map)
self.filter_diffuse_target.set_shader_input("DestTex", self.storage_tex_diffuse)
self.filter_diffuse_target.set_shader_input("currentIndex", self.pta_index)
def set_probe(self, probe):
self.rig_node.set_mat(probe.matrix)
self.pta_index[0] = probe.index
def update(self):
# First, disable all targets
for target in itervalues(self._targets):
target.active = False
# Check for updated faces
for i in range(6):
if self._pipeline.task_scheduler.is_scheduled("envprobes_capture_envmap_face" + str(i)):
self.regions[i].set_active(True)
# Check for filtering
if self._pipeline.task_scheduler.is_scheduled("envprobes_filter_and_store_envmap"):
self.target_store.active = True
self.target_store_diff.active = True
self.filter_diffuse_target.active = True
for target in self.filter_targets:
target.active = True
def set_shader_input(self, *args):
Globals.render.set_shader_input(*args)
def reload_shaders(self):
self.target_store.shader = self.load_plugin_shader(
"store_cubemap.frag.glsl")
self.target_store_diff.shader = self.load_plugin_shader(
"store_cubemap_diffuse.frag.glsl")
self.filter_diffuse_target.shader = self.load_plugin_shader(
"filter_cubemap_diffuse.frag.glsl")
for i, target in enumerate(self.filter_targets):
target.shader = self.load_plugin_shader("mips/{}.autogen.glsl".format(i))
| [
"[email protected]"
]
| |
9c69e890954b39c53456d3274149e26adb8cba6e | 2cf4c28f533065153b23c3b4084bf905467f4e23 | /utils/tensor_viewer/plugins/sandwich.py | 4395a597082b8f236ed00847d43cfbb277d7c9a2 | []
| no_license | WilliamRo/tframe | 94e75b4d7fd482ab5edeff2db966f4316390e32b | 2ac00b2a05fd65529adb7edf7123b3eea6e5e6f2 | refs/heads/master | 2023-09-01T22:02:02.372416 | 2023-08-24T08:10:26 | 2023-08-24T08:10:26 | 92,593,033 | 17 | 7 | null | 2022-07-23T01:35:10 | 2017-05-27T10:55:48 | Python | UTF-8 | Python | false | false | 1,082 | py | import re
import numpy as np
import matplotlib
from matplotlib.ticker import FuncFormatter
from tframe.utils.tensor_viewer.plugin import Plugin, VariableWithView
def _recursive_modify(v_dict, level=0):
if len(v_dict) == 0: return
assert isinstance(v_dict, dict)
if isinstance(list(v_dict.values())[0], dict):
for e_key, e_dict in v_dict.items():
print('>> Modifying dict {} ...'.format(e_key))
_recursive_modify(e_dict, level=level + 1)
return
# Here the values in v_dict must be lists
for key in list(v_dict.keys()):
if not re.fullmatch(r'dL/dS\[\d+\]', key): continue
triangle_list = v_dict[key]
new_list = []
for triangle in triangle_list:
assert isinstance(triangle, np.ndarray) and len(triangle.shape) == 2
bottom = np.sum(triangle, axis=0, keepdims=True)
new_list.append(np.concatenate(
[triangle, np.zeros_like(bottom), bottom], axis=0))
v_dict[key] = new_list
def modifier(v_dict):
print('>> Modifying by sandwich ...')
_recursive_modify(v_dict)
plugin = Plugin(dict_modifier=modifier)
| [
"[email protected]"
]
| |
395b9d6b3eeb3dda9279993faf701f3d4c1cf382 | 5aa80aab7a75d76b0aa838bf8f74a276a12c876e | /src/ifmap/SConscript | 91667c2cbadf3ccc91e2f3828066f422d4455f95 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
]
| permissive | tungstenfabric/tf-controller | 83b6d58afadb5697b540b5345711a5b2af90d201 | f825fde287f4eb2089aba2225ca73eeab3888040 | refs/heads/master | 2023-08-28T02:56:27.329584 | 2023-08-20T12:15:38 | 2023-08-20T12:31:34 | 231,070,970 | 55 | 29 | Apache-2.0 | 2023-07-23T01:38:17 | 2019-12-31T10:24:38 | C++ | UTF-8 | Python | false | false | 4,531 | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
# -*- mode: python; -*-
Import('BuildEnv')
env = BuildEnv.Clone()
env.CppDisableExceptions()
env.Append(CPPPATH = env['TOP'])
env.Append(CPPPATH = [env['TOP'] + '/ifmap'])
env.Append(CPPPATH = [env['TOP'] + '/config-client-mgr'])
env.Append(CPPPATH = [env['TOP'] + '/base/sandesh'])
env.Append(CPPPATH = [env['TOP'] + '/database'])
env.Prepend(LIBS=['sandesh', 'http', 'http_parser', 'curl', 'io', 'base'])
except_env = BuildEnv.Clone()
except_env.Append(CPPPATH = [env['TOP'] + '/ifmap'])
except_env.Append(CPPPATH = [env['TOP'] + '/config-client-mgr'])
except_env.Append(CPPPATH = [env['TOP'] + '/base/sandesh'])
except_env.CppEnableExceptions()
except_env.Append(CPPPATH = env['TOP'])
SandeshGenFiles = env.SandeshGenCpp('ifmap_server_show.sandesh')
SandeshGenFiles += env.SandeshGenOnlyCpp('ifmap_server_show_internal.sandesh')
SandeshGenFiles += env.SandeshGenCpp('ifmap_log.sandesh')
SandeshGenSrcs = env.ExtractCpp(SandeshGenFiles)
sandesh_objs = []
for src in SandeshGenSrcs:
objname = src.replace('.cpp', '.o')
obj = except_env.Object(objname, src)
sandesh_objs.append(obj)
ifmap_server = except_env.Object('ifmap_server.o', 'ifmap_server.cc')
ifmap_server_show = except_env.Object('ifmap_server_show.o', 'ifmap_server_show.cc')
ifmap_xmpp = except_env.Object('ifmap_xmpp.o', 'ifmap_xmpp.cc')
ifmap_xmpp_client_show = except_env.Object('ifmap_xmpp_client_show.o', 'ifmap_xmpp_client_show.cc')
AgentSandeshGenFiles = env.SandeshGenCpp('ifmap_agent.sandesh')
AgentSandeshGenSrcs = env.ExtractCpp(AgentSandeshGenFiles)
libifmap_common = env.Library('ifmap_common',
['ifmap_dependency_tracker.cc',
'ifmap_table.cc',
'ifmap_link.cc',
'ifmap_link_table.cc',
'ifmap_node.cc',
'ifmap_object.cc',
'ifmap_log.cc'] + sandesh_objs)
# control-node
libifmap = env.Library('ifmap_server',
['ifmap_client.cc',
'ifmap_config_listener.cc',
'ifmap_encoder.cc',
'ifmap_exporter.cc',
'ifmap_factory.cc',
'ifmap_graph_walker.cc',
'ifmap_node_proxy.cc',
ifmap_server_show,
ifmap_server,
'ifmap_server_parser.cc',
'ifmap_server_table.cc',
'ifmap_update.cc',
'ifmap_update_queue.cc',
'ifmap_update_sender.cc',
'ifmap_util.cc',
'ifmap_uuid_mapper.cc',
ifmap_xmpp,
ifmap_xmpp_client_show,
] + sandesh_objs)
# agent-module
libifmap_agent = env.Library('ifmap_agent',
[ 'ifmap_agent_parser.cc',
'ifmap_agent_table.cc',
'ifmap_agent_sandesh.cc',
] + AgentSandeshGenSrcs)
test_suite = env.SConscript('client/SConscript', exports='BuildEnv',
duplicate = 0)
test_suite += env.SConscript('test/SConscript', exports='BuildEnv',
duplicate = 0)
def code_coverage(target, source, env):
import shutil
shutil.rmtree(target[0].path, ignore_errors = True)
# lcov --base-directory $ROOT -- directory . --zerocounters -q
import os
os.system('lcov --base-directory . --directory ' + Dir('.').path +
' --zerocounters -q')
# execute tests
import subprocess
ShEnv = {env['ENV_SHLIB_PATH']: 'build/lib'}
for test in test_suite:
cmd = test[0].path
logfile = open(cmd + '.log', 'w')
subprocess.call([cmd], stdout=logfile, env=ShEnv)
# lcov --base-directory $ROOT -- directory . -c -o ifmap_test.info
os.system('lcov --base-directory . --directory ' + Dir('.').path +
' -c -o ifmap_test.info')
# genhtml -o ifmap/test_coverage ifmap_test.info
os.system('genhtml -o ' + target[0].path +
' -t "test coverage" --num-spaces 4 ifmap_test.info')
if env['OPT'] == 'coverage':
test_coverage = env.Command(Dir('test_coverage'), '', code_coverage)
env.AlwaysBuild(test_coverage)
env.Alias('src/ifmap:test_coverage', test_coverage)
| [
"[email protected]"
]
| ||
13ab29e5ceffbff9de60963df7ec385ba55cad77 | ee6fc02e8392ff780a4f0d1a5789776e4d0b6a29 | /code/abc/150/abc150_b.py | c748c5aa7445dd342d4b813baf31ee0e24d77bef | []
| no_license | mollinaca/ac | e99bb5d5c07159b3ef98cd7067424fa2751c0256 | 2f40dd4333c2b39573b75b45b06ad52cf36d75c3 | refs/heads/master | 2020-12-22T11:02:13.269855 | 2020-09-18T01:02:29 | 2020-09-18T01:02:29 | 236,757,685 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
n = int(input())
s = str(input())
count = 0
for i in range(0,n):
if s[i] == "A":
if i+2 <= n-1:
if s[i+1] == "B" and s[i+2] == "C":
count += 1
print (count) | [
"[email protected]"
]
| |
d0b9760362c2c896a11394121621237ba007551c | 64afcac06e3a64215d7d7152c4fa5662164a41e6 | /src/jk_sql/DBColDef.py | e36c558919fedb53a78002e8dc3f70873f63d78a | [
"Apache-2.0"
]
| permissive | jkpubsrc/python-module-jk-sql | 7b4f12783b8384540404fa60c469c911955202a6 | cc716f4042af4cbc503056bd3f71cde9acd12ce2 | refs/heads/master | 2022-09-15T17:20:24.458796 | 2017-10-20T10:01:34 | 2017-10-20T10:01:34 | 107,655,550 | 0 | 1 | null | 2022-09-03T19:29:01 | 2017-10-20T08:58:04 | Python | UTF-8 | Python | false | false | 2,376 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import sqlite3
from .EnumDBColType import EnumDBColType
from .EnumDBIndexType import EnumDBIndexType
#
# This class represents a definition of a column. Objects of this type are used to either define a column or get information about a table column.
#
class DBColDef(object):
def __init__(self, fieldName, fieldType, bIsNullable, indexType):
assert isinstance(fieldName, str)
assert isinstance(fieldType, EnumDBColType)
assert isinstance(bIsNullable, bool)
assert isinstance(indexType, EnumDBIndexType)
if fieldType == EnumDBColType.PK:
bIsNullable = False
indexType = EnumDBIndexType.NONE
self.__name = fieldName
self.__type = fieldType
self.__bIsNullable = bIsNullable
self.__indexType = indexType
#
@property
def index(self):
return self.__indexType
#
@property
def nullable(self):
return self.__bIsNullable
#
@property
def unique(self):
return self.__indexType == EnumDBIndexType.UNIQUE_INDEX
#
@property
def type(self):
return self.__type
#
@property
def name(self):
return self.__name
#
def isEqualWithoutIndex(self, other):
return (self.__name == other.name) and (self.__type == other.type) and (self.__bIsNullable == other.nullable)
#
def __ne__(self, other):
return (self.__name != other.name) or (self.__type != other.type) or (self.__bIsNullable != other.nullable) or (self.__indexType != other.index)
#
def __eq__(self, other):
return (self.__name == other.name) and (self.__type == other.type) and (self.__bIsNullable == other.nullable) and (self.__indexType == other.index)
#
def __str__(self):
return self.__type + ": " + self.__name
#
def __repr__(self):
return self.__type + ": " + self.__name
#
def __copy__(self):
return DBColDef(self.__name, self.__type, self.__bIsNullable, self.__indexType)
#
def __deepcopy__(self, memo):
return DBColDef(self.__name, self.__type, self.__bIsNullable, self.__indexType)
#
@staticmethod
def loadFromJSON(jsonDef):
t = jsonDef["type"]
i = jsonDef["index"]
return DBColDef(jsonDef["name"], EnumDBColType.parse(t), jsonDef["nullable"], EnumDBIndexType.parse(i))
#
def toJSON(self):
return {
"name" : self.__name,
"type" : str(self.__type),
"nullable" : self.__bIsNullable,
"index" : str(self.__indexType)
}
#
#
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.