blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ae7809f3604c6604f22ba1fa70a1e9c3ed5f2c69 | 57d978b0ba29c745cf409e819f2f7e42e98516dc | /ibs/_input.py | ce3078f28411719e86def04fdd019b03afda1c2e | [
"MIT"
] | permissive | lnls-fac/lnls | 26564b9bdaf0602e2f50d67e294d0d8ba5cc0f82 | 91299d84ce7628d33c46861687d17a574428bc85 | refs/heads/master | 2023-02-09T00:26:47.298350 | 2022-11-25T19:25:50 | 2022-11-25T19:25:50 | 33,893,299 | 5 | 2 | MIT | 2022-11-25T19:25:51 | 2015-04-13T20:51:15 | Python | UTF-8 | Python | false | false | 2,116 | py | import numpy as _np
import copy as _copy
def read_energy_acceptance_file(fname, eRF):
# reads raw data from file
lines = [line.strip() for line in open(fname)]
# processes raw data
accp, accn = [], []
for line in lines:
if not line or line[0] == '#':
continue
values = [float(word) for word in line.split()]
pos, e_ac = values[4], values[7]
if e_ac > 0.0:
accp.append([pos,min(abs(e_ac),eRF)])
else:
accn.append([pos,min(abs(e_ac),eRF)])
accp = _np.array(accp)
accn = _np.array(accn)
return (accp,accn)
def read_twiss_file(fname, orig_parameters):
# reads raw data from file
lines = [line.strip() for line in open(fname)]
parameters = _copy.deepcopy(orig_parameters)
# processes raw data into twiss and element structures
twiss, elements = [], []
for line in lines:
words = line.split()
if not words or words[0][0] == '*':
continue
if words[0][0] == '#':
if words[0] == '#MCF':
parameters.mcf = float(words[1])
elif words[0] == '#I1':
parameters.latt_i1 = float(words[1])
elif words[0] == '#I2':
parameters.latt_i2 = float(words[1])
elif words[0] == '#I3':
parameters.latt_i3 = float(words[1])
elif words[0] == '#I4':
parameters.latt_i4 = float(words[1])
elif words[0] == '#I5':
parameters.latt_i5 = float(words[1])
elif words[0] == '#I6':
parameters.latt_i6 = float(words[1])
else:
pass
continue
else:
if float(words[3]) > 0:
values = [float(word) for word in words[2:]]
values = values + [0,0] # for acceptances insertion latter on
#print(values)
twiss.append(values)
elements.append(words[0])
twiss = _np.array(twiss)
elements = _np.array(elements)
return (elements, twiss, parameters)
| [
"[email protected]"
] | |
bf10c5772f31298525eb55957cf0421f17ad7983 | 87cac4166f07729f1c94066259996c8b752c1202 | /examples/calc/calc_distance.py | dc0d473aceccd019cbfe3f28a154c888c08db7e6 | [] | no_license | danielsocials/bbox | 068238a15880468d214109a23017a19e70fc13ec | 292e350b1cefbbab987baf8c946d4021abd211ea | refs/heads/master | 2020-03-16T06:25:47.907369 | 2018-05-08T04:42:45 | 2018-05-08T04:42:45 | 132,554,332 | 0 | 0 | null | 2018-05-08T04:36:35 | 2018-05-08T04:36:35 | null | UTF-8 | Python | false | false | 589 | py | import asyncio
from aiobbox.cluster import get_cluster
from aiobbox.client import pool
from aiobbox.handler import BaseHandler
class Handler(BaseHandler):
def add_arguments(self, parser):
parser.add_argument('--a',
type=float)
parser.add_argument('--b',
type=float)
async def run(self, args):
#for _ in range(2):
#r = await pool.calc.echostr('888', retry=100)
# print(r)
# await asyncio.sleep(3)
r = await pool.calc.add2num(args.a, args.b)
print(r)
| [
"[email protected]"
] | |
c26c22a572fc6788014f0e253f267b21cc87a9dd | 1d928c3f90d4a0a9a3919a804597aa0a4aab19a3 | /python/statsmodels/2016/12/test_tsaplots.py | 91616d7673599e86c8dfeba8b0c28021f08f1d27 | [] | no_license | rosoareslv/SED99 | d8b2ff5811e7f0ffc59be066a5a0349a92cbb845 | a062c118f12b93172e31e8ca115ce3f871b64461 | refs/heads/main | 2023-02-22T21:59:02.703005 | 2021-01-28T19:40:51 | 2021-01-28T19:40:51 | 306,497,459 | 1 | 1 | null | 2020-11-24T20:56:18 | 2020-10-23T01:18:07 | null | UTF-8 | Python | false | false | 4,803 | py | from statsmodels.compat.python import lmap, map
import numpy as np
import pandas as pd
from numpy.testing import dec, assert_equal
import statsmodels.api as sm
from statsmodels.graphics.tsaplots import (plot_acf, plot_pacf, month_plot,
quarter_plot, seasonal_plot)
import statsmodels.tsa.arima_process as tsp
try:
import matplotlib.pyplot as plt
have_matplotlib = True
except:
have_matplotlib = False
@dec.skipif(not have_matplotlib)
def test_plot_acf():
# Just test that it runs.
fig = plt.figure()
ax = fig.add_subplot(111)
ar = np.r_[1., -0.9]
ma = np.r_[1., 0.9]
armaprocess = tsp.ArmaProcess(ar, ma)
rs = np.random.RandomState(1234)
acf = armaprocess.generate_sample(100, distrvs=rs.standard_normal)
plot_acf(acf, ax=ax, lags=10)
plot_acf(acf, ax=ax)
plot_acf(acf, ax=ax, alpha=None)
plt.close(fig)
@dec.skipif(not have_matplotlib)
def test_plot_acf_irregular():
# Just test that it runs.
fig = plt.figure()
ax = fig.add_subplot(111)
ar = np.r_[1., -0.9]
ma = np.r_[1., 0.9]
armaprocess = tsp.ArmaProcess(ar, ma)
rs = np.random.RandomState(1234)
acf = armaprocess.generate_sample(100, distrvs=rs.standard_normal)
plot_acf(acf, ax=ax, lags=np.arange(1, 11))
plot_acf(acf, ax=ax, lags=10, zero=False)
plot_acf(acf, ax=ax, alpha=None, zero=False)
plt.close(fig)
@dec.skipif(not have_matplotlib)
def test_plot_pacf():
# Just test that it runs.
fig = plt.figure()
ax = fig.add_subplot(111)
ar = np.r_[1., -0.9]
ma = np.r_[1., 0.9]
armaprocess = tsp.ArmaProcess(ar, ma)
rs = np.random.RandomState(1234)
pacf = armaprocess.generate_sample(100, distrvs=rs.standard_normal)
plot_pacf(pacf, ax=ax)
plot_pacf(pacf, ax=ax, alpha=None)
plt.close(fig)
@dec.skipif(not have_matplotlib)
def test_plot_pacf_irregular():
# Just test that it runs.
fig = plt.figure()
ax = fig.add_subplot(111)
ar = np.r_[1., -0.9]
ma = np.r_[1., 0.9]
armaprocess = tsp.ArmaProcess(ar, ma)
rs = np.random.RandomState(1234)
pacf = armaprocess.generate_sample(100, distrvs=rs.standard_normal)
plot_pacf(pacf, ax=ax, lags=np.arange(1, 11))
plot_pacf(pacf, ax=ax, lags=10, zero=False)
plot_pacf(pacf, ax=ax, alpha=None, zero=False)
plt.close(fig)
@dec.skipif(not have_matplotlib)
def test_plot_month():
dta = sm.datasets.elnino.load_pandas().data
dta['YEAR'] = dta.YEAR.astype(int).apply(str)
dta = dta.set_index('YEAR').T.unstack()
dates = lmap(lambda x: pd.tseries.tools.parse_time_string('1 '+' '.join(x))[0],
dta.index.values)
# test dates argument
fig = month_plot(dta.values, dates=dates, ylabel='el nino')
plt.close(fig)
# test with a TimeSeries DatetimeIndex with no freq
dta.index = pd.DatetimeIndex(dates)
fig = month_plot(dta)
plt.close(fig)
# w freq
dta.index = pd.DatetimeIndex(dates, freq='MS')
fig = month_plot(dta)
plt.close(fig)
# test with a TimeSeries PeriodIndex
dta.index = pd.PeriodIndex(dates, freq='M')
fig = month_plot(dta)
plt.close(fig)
@dec.skipif(not have_matplotlib)
def test_plot_quarter():
dta = sm.datasets.macrodata.load_pandas().data
dates = lmap('Q'.join, zip(dta.year.astype(int).apply(str),
dta.quarter.astype(int).apply(str)))
# test dates argument
quarter_plot(dta.unemp.values, dates)
plt.close('all')
# test with a DatetimeIndex with no freq
parser = pd.tseries.tools.parse_time_string
dta.set_index(pd.DatetimeIndex((x[0] for x in map(parser, dates))),
inplace=True)
quarter_plot(dta.unemp)
plt.close('all')
# w freq
# see pandas #6631
dta.index = pd.DatetimeIndex((x[0] for x in map(parser, dates)),
freq='QS-Oct')
quarter_plot(dta.unemp)
plt.close('all')
# w PeriodIndex
dta.index = pd.PeriodIndex((x[0] for x in map(parser, dates)),
freq='Q')
quarter_plot(dta.unemp)
plt.close('all')
@dec.skipif(not have_matplotlib)
def test_seasonal_plot():
rs = np.random.RandomState(1234)
data = rs.randn(20,12)
data += 6*np.sin(np.arange(12.0)/11*np.pi)[None,:]
data = data.ravel()
months = np.tile(np.arange(1,13),(20,1))
months = months.ravel()
df = pd.DataFrame([data,months],index=['data','months']).T
grouped = df.groupby('months')['data']
labels = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']
fig = seasonal_plot(grouped, labels)
ax = fig.get_axes()[0]
output = [tl.get_text() for tl in ax.get_xticklabels()]
assert_equal(labels, output)
plt.close('all')
| [
"[email protected]"
] | |
46db86e79771ab26fcdc605bdf0ce05b8ed2ab07 | 2735c5f1a9b1f1a3d2468f0838fc0f20725cbe31 | /usr/lib/pymodules/python2.7/numpy/core/tests/test_ufunc.py | 43f4f539e4e53be8feb7e24a89b82b135eb556ca | [] | no_license | sanyaade-iot/rpi-sysroot | f202b9188fd99c372d28b59ebe1b8fcabbfb7a67 | 6e13f05b5b2b44b29ead66c96230a17f077d5198 | refs/heads/master | 2020-04-08T16:14:25.745147 | 2014-03-21T06:27:54 | 2014-03-21T09:47:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 50 | py | /usr/share/pyshared/numpy/core/tests/test_ufunc.py | [
"[email protected]"
] | |
71ae2a5380a5dac7a539c945d44e63ada72a53ba | bf4f4731f099dcdc964509b51ffc6ce8875f6041 | /ll.py | c48d2277a0c507a6b81eae89b47f552294de014e | [
"MIT"
] | permissive | YanglanWang/squad | 8c9b98d3bd4ff0fe824bc08fc2a314fb45936b38 | 1019e4c7bf1a90c049d16ed1b48553964468c790 | refs/heads/master | 2020-06-19T19:02:30.891520 | 2019-08-02T15:53:15 | 2019-08-02T15:53:15 | 196,834,600 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,217 | py |
# import torch
# import torch.nn.functional as F
# import torch.nn as nn
# # input is of size N x C = 3 x 5
# m = nn.LogSoftmax(dim=1)
# input = torch.randn(3, 5, requires_grad=True)
# print(input)
# print(F.softmax(input))
# print(F.log_softmax(input))
# print(m(input))
# # each element in target has to have 0 <= value < C
# target = torch.tensor([1, 0, 4])
# output = F.nll_loss(m(input), target)
# print(output)
# print(output.backward())
# import string
#
# def remove_punc(text):
# exclude = set(string.punctuation)
# return ''.join(ch for ch in text if ch not in exclude)
# def white_space_fix(text):
# return ' '.join(text.split())
# b=remove_punc('I LOVE the Beijing!')
# a=white_space_fix(b)
# print(a)
# from collections import Counter
#
# a=Counter(['a','abc','bca','js','a','b','b'])
# b=Counter(['c','ccc','aa','a'])
# c=a&b
# print(c)
# prediction='abc'
# ground_truths=['abc','aa']
# b=(float(bool(prediction) == bool(ground_truths)))
# print(b)
import torch
a=torch.randn(3,5)
print(a)
max_in_row,_=torch.max(a,dim=1)
max_in_col,_=torch.max(a,dim=0)
print(max_in_row,max_in_col)
start_idxs=torch.argmax(max_in_row,-1)
end_idxs=torch.argmax(max_in_col,-1)
print(start_idxs,end_idxs) | [
"[email protected]"
] | |
818a6404248346ef4bcc70ea941cc25a5cc2006c | 1b7b13984a90b5d11331966fe1b1bb88f2b85dd7 | /modeling/test.py | b583abada1374a6055aa896f3d6fcaa16e67abe6 | [] | no_license | yekeren/VCR | 23cf6b17ce7adcc057a531182898900dcd75727b | 6a573edf3183e9a8ef2056449c53865d8e27125d | refs/heads/master | 2023-04-06T05:41:09.851825 | 2020-04-14T18:26:42 | 2020-04-14T18:26:42 | 232,395,848 | 0 | 0 | null | 2023-03-24T23:35:46 | 2020-01-07T19:01:27 | Python | UTF-8 | Python | false | false | 2,259 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
from absl import logging
import os
import numpy as np
import tensorflow as tf
from google.protobuf import text_format
from protos import pipeline_pb2
from modeling import trainer
from readers import reader
from readers.vcr_text_only_reader import InputFields
from readers.vcr_text_only_reader import NUM_CHOICES
from vcr import builder
from protos import pipeline_pb2
import json
flags.DEFINE_string('model_dir', None,
'Path to the directory which holds model checkpoints.')
flags.DEFINE_string('pipeline_proto', None, 'Path to the pipeline proto file.')
FLAGS = flags.FLAGS
FIELD_ANSWER_PREDICTION = 'answer_prediction'
def _load_pipeline_proto(filename):
"""Loads pipeline proto from file.
Args:
filename: Path to the pipeline config file.
Returns:
An instance of pipeline_pb2.Pipeline.
"""
with tf.io.gfile.GFile(filename, 'r') as fp:
return text_format.Merge(fp.read(), pipeline_pb2.Pipeline())
def main(_):
logging.set_verbosity(logging.DEBUG)
for gpu in tf.config.experimental.list_physical_devices('GPU'):
tf.config.experimental.set_memory_growth(gpu, True)
pipeline_proto = _load_pipeline_proto(FLAGS.pipeline_proto)
for example in trainer.predict(pipeline_proto, FLAGS.model_dir):
batch_size = len(example['question'])
for i in range(batch_size):
print('#' * 128)
print(example['question'][i])
print(example['answer_label'][i])
import pdb
pdb.set_trace()
for j in range(4):
sentence = []
for token, indicator in zip(example['answer_choices'][i, j],
example['shortcut_mask'][i, j]):
if not indicator:
sentence.append(token.decode('utf8') + '[REMOVE]')
else:
sentence.append(token.decode('utf8'))
print(' '.join(sentence))
print(example['answer_logits'][i][j].tolist())
print(example['a_soft_sample'][i][j].tolist())
print()
if __name__ == '__main__':
flags.mark_flag_as_required('model_dir')
flags.mark_flag_as_required('pipeline_proto')
app.run(main)
| [
"[email protected]"
] | |
96df20298ac9dc1fdaca45b61783c50aaaad575f | 219d7694180482e0b9944deb6dee11dcf7bf0e23 | /morecvutils/connectedComponents.py | 58ed9279dc1b6fef5516c8a7b21f18623978e5ce | [
"MIT"
] | permissive | Aresthu/morecvutils | 5e3bfcba2b5c48ec022e641f19b40e2836d2d6bd | 4856d98c45dbd6bcfb86c87f7ec9987c378e244c | refs/heads/master | 2021-04-09T16:03:01.956218 | 2018-02-18T07:14:41 | 2018-02-18T07:14:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,580 | py | import cv2
try: #OpenCV 2.4
from cv2 import SimpleBlobDetector as SimpleBlobDetector
except ImportError: #OpenCV 3
from cv2 import SimpleBlobDetector_create as SimpleBlobDetector
from numpy import asarray
def doblob(morphed,blobdet,img,anno=True):
"""
img: can be RGB (MxNx3) or gray (MxN)
http://docs.opencv.org/master/modules/features2d/doc/drawing_function_of_keypoints_and_matches.html
http://docs.opencv.org/trunk/modules/features2d/doc/drawing_function_of_keypoints_and_matches.html
"""
keypoints = blobdet.detect(morphed)
nkey = len(keypoints)
kpsize = asarray([k.size for k in keypoints])
final = img.copy() # is the .copy necessary?
final = cv2.drawKeypoints(img, keypoints, outImage=final,
flags=cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
#%% plot count of blobs
if anno:
cv2.putText(final, text=str(nkey), org=(int(img.shape[1]*.9),25),
fontFace=cv2.FONT_HERSHEY_PLAIN, fontScale=2,
color=(0,255,0), thickness=2)
return final,nkey,kpsize
def setupblob(minarea, maxarea, mindist):
blobparam = cv2.SimpleBlobDetector_Params()
blobparam.filterByArea = True
blobparam.filterByColor = False
blobparam.filterByCircularity = False
blobparam.filterByInertia = False
blobparam.filterByConvexity = False
blobparam.minDistBetweenBlobs = mindist
blobparam.minArea = minarea
blobparam.maxArea = maxarea
#blobparam.minThreshold = 40 #we have already made a binary image
return SimpleBlobDetector(blobparam)
| [
"[email protected]"
] | |
f004405fef2fdf69649331b17e36bc392d5c02ba | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/wikipedia/testcase/pwpreferencecases/PW_pre_8.py | 59c846093a4300d149fa46929cd4e09dfe78d39a | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,018 | py | #coding=utf-8
import os
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'org.wikipedia',
'appActivity' : 'org.wikipedia.main.MainActivity',
'resetKeyboard' : True,
'noReset' : True
}
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
def scrollToFindElement(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
swipe(driver, 0.5, 0.55, 0.5, 0.2)
else:
return element
return
def clickoncheckable(driver, str, value = "true") :
parents = driver.find_elements_by_class_name("android.widget.LinearLayout")
for parent in parents:
try :
parent.find_element_by_android_uiautomator(str)
lists = parent.find_elements_by_class_name("android.widget.LinearLayout")
if (len(lists) == 1) :
innere = parent.find_element_by_android_uiautomator("new UiSelector().checkable(true)")
nowvalue = innere.get_attribute("checked")
if (nowvalue != value) :
parent.click()
break
except NoSuchElementException:
continue
# preference setting and exit
try :
os.popen("adb shell am start -a android.intent.action.VIEW -d file:///mnt/sdcard/music/MoonFlow.mp3 -t audio/wav -f 1")
os.popen("adb shell svc data enable")
os.popen("adb shell service call bluetooth_manager 8")
os.popen("adb shell svc wifi diable")
os.popen("adb shell settings put secure location_providers_allowed 'false'")
os.popen("adb shell settings put secure location_providers_allowed network,gps")
time.sleep(5)
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
os.popen("adb shell am start -n org.wikipedia/org.wikipedia.settings.DeveloperSettingsActivity")
scrollToFindElement(driver, "new UiSelector().text(\"useRestbase_setManually\")").click()
clickoncheckable(driver, "new UiSelector().text(\"useRestbase_setManually\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"useRestbase\")").click()
clickoncheckable(driver, "new UiSelector().text(\"useRestbase\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"mediaWikiBaseUriSupportsLangCode\")").click()
clickoncheckable(driver, "new UiSelector().text(\"mediaWikiBaseUriSupportsLangCode\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"Retrofit Log Level (restart required)\")").click()
scrollToFindElement(driver, "new UiSelector().text(\"NONE\")").click()
scrollToFindElement(driver, "new UiSelector().text(\"suppressNotificationPolling\")").click()
clickoncheckable(driver, "new UiSelector().text(\"suppressNotificationPolling\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"showDeveloperSettings\")").click()
clickoncheckable(driver, "new UiSelector().text(\"showDeveloperSettings\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"memoryLeakTest\")").click()
clickoncheckable(driver, "new UiSelector().text(\"memoryLeakTest\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"crashedBeforeActivityCreated\")").click()
clickoncheckable(driver, "new UiSelector().text(\"crashedBeforeActivityCreated\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"initialOnboardingEnabled\")").click()
clickoncheckable(driver, "new UiSelector().text(\"initialOnboardingEnabled\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"descriptionEditTutorialEnabled\")").click()
clickoncheckable(driver, "new UiSelector().text(\"descriptionEditTutorialEnabled\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"tocTutorialEnabled\")").click()
clickoncheckable(driver, "new UiSelector().text(\"tocTutorialEnabled\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"selectTextTutorialEnabled\")").click()
clickoncheckable(driver, "new UiSelector().text(\"selectTextTutorialEnabled\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"shareTutorialEnabled\")").click()
clickoncheckable(driver, "new UiSelector().text(\"shareTutorialEnabled\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"multilingualSearchTutorialEnabled\")").click()
clickoncheckable(driver, "new UiSelector().text(\"multilingualSearchTutorialEnabled\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"readingListTutorialEnabled\")").click()
clickoncheckable(driver, "new UiSelector().text(\"readingListTutorialEnabled\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"readingListSyncReminder\")").click()
clickoncheckable(driver, "new UiSelector().text(\"readingListSyncReminder\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"readingListLoginReminder\")").click()
clickoncheckable(driver, "new UiSelector().text(\"readingListLoginReminder\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"zeroTutorialEnabled\")").click()
clickoncheckable(driver, "new UiSelector().text(\"zeroTutorialEnabled\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"feedCustomizeOnboardingCardEnabled\")").click()
clickoncheckable(driver, "new UiSelector().text(\"feedCustomizeOnboardingCardEnabled\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"feedReadingListsSyncOnboardingCardEnabled\")").click()
clickoncheckable(driver, "new UiSelector().text(\"feedReadingListsSyncOnboardingCardEnabled\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"showReadingListsSyncPrompt\")").click()
clickoncheckable(driver, "new UiSelector().text(\"showReadingListsSyncPrompt\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"readingListsFirstTimeSync\")").click()
clickoncheckable(driver, "new UiSelector().text(\"readingListsFirstTimeSync\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"showRemoveChineseVariantPrompt\")").click()
clickoncheckable(driver, "new UiSelector().text(\"showRemoveChineseVariantPrompt\")", "false")
driver.press_keycode(4)
time.sleep(2)
os.popen("adb shell am start -n org.wikipedia/org.wikipedia.settings.SettingsActivity")
scrollToFindElement(driver, "new UiSelector().text(\"Show link previews\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Show link previews\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"Reading list syncing\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Reading list syncing\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"Download only over Wi-Fi\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Download only over Wi-Fi\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"Show images\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Show images\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"Prefer offline content\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Prefer offline content\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"Send usage reports\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Send usage reports\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"Send crash reports\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Send crash reports\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"Warn if leaving Wikipedia Zero\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Warn if leaving Wikipedia Zero\")", "true")
driver.press_keycode(4)
time.sleep(2)
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
finally :
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
os.open("adb shell input keyevent 127")
os.popen("adb shell svc data enable")
os.popen("adb shell service call bluetooth_manager 6")
os.popen("adb shell svc wifi enable")
os.popen("adb shell settings put secure location_providers_allowed gps, network")
driver.quit()
| [
"[email protected]"
] | |
df595c77d73ecc81233488dc652b38172a935850 | d86c52f4098fd9c1a102c2d3f5630556e0610fa2 | /fitle/myenv/Lib/site-packages/django/urls/base.py | 0caa424c33ca350337f9994d131b8e12e6d6d6ca | [] | no_license | makadama/bitbucket | 24f05c4946168ed15d4f56bfdc45fd6c0774e0f2 | cabfd551b92fe1af6d9d14ab9eb3d9974b64aa79 | refs/heads/master | 2023-06-19T19:04:03.894599 | 2021-07-15T12:10:39 | 2021-07-15T12:10:39 | 385,203,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:b1fa9d73dc9c6873504ab270b4646eb91e5cd202f9f0a9fffff2342e2183d8ab
size 5587
| [
"[email protected]"
] | |
5b6d5e796aa35788250c17aa14808559a36e5c1d | a3b306df800059a5b74975793251a28b8a5f49c7 | /Graphs/LX-2/molecule_otsu = False/BioImageXD-1.0/ITK/lib/InsightToolkit/WrapITK/lib/itkCannyEdgeDetectionImageFilterPython.py | 5b48b0c6d95f111348c8b5f16df1963f18580000 | [] | no_license | giacomo21/Image-analysis | dc17ba2b6eb53f48963fad931568576fda4e1349 | ea8bafa073de5090bd8f83fb4f5ca16669d0211f | refs/heads/master | 2016-09-06T21:42:13.530256 | 2013-07-22T09:35:56 | 2013-07-22T09:35:56 | 11,384,784 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 44,738 | py | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 1.3.40
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3,0,0):
new_instancemethod = lambda func, inst, cls: _itkCannyEdgeDetectionImageFilterPython.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_itkCannyEdgeDetectionImageFilterPython', [dirname(__file__)])
except ImportError:
import _itkCannyEdgeDetectionImageFilterPython
return _itkCannyEdgeDetectionImageFilterPython
if fp is not None:
try:
_mod = imp.load_module('_itkCannyEdgeDetectionImageFilterPython', fp, pathname, description)
finally:
fp.close()
return _mod
_itkCannyEdgeDetectionImageFilterPython = swig_import_helper()
del swig_import_helper
else:
import _itkCannyEdgeDetectionImageFilterPython
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import itkFixedArrayPython
import pyBasePython
import ITKRegionsPython
import ITKCommonBasePython
import itkEventObjectsPython
import itkSizePython
import itkIndexPython
import itkOffsetPython
import itkImageToImageFilterAPython
import itkImagePython
import itkCovariantVectorPython
import vnl_vectorPython
import vcl_complexPython
import vnl_matrixPython
import itkVectorPython
import vnl_vector_refPython
import itkPointPython
import itkMatrixPython
import vnl_matrix_fixedPython
import itkRGBAPixelPython
import itkSymmetricSecondRankTensorPython
import itkRGBPixelPython
import itkImageSourcePython
import itkVectorImagePython
import itkVariableLengthVectorPython
def itkCannyEdgeDetectionImageFilterID3ID3_New():
return itkCannyEdgeDetectionImageFilterID3ID3.New()
def itkCannyEdgeDetectionImageFilterID2ID2_New():
return itkCannyEdgeDetectionImageFilterID2ID2.New()
def itkCannyEdgeDetectionImageFilterIF3IF3_New():
return itkCannyEdgeDetectionImageFilterIF3IF3.New()
def itkCannyEdgeDetectionImageFilterIF2IF2_New():
return itkCannyEdgeDetectionImageFilterIF2IF2.New()
class itkCannyEdgeDetectionImageFilterID2ID2(itkImageToImageFilterAPython.itkImageToImageFilterID2ID2):
"""Proxy of C++ itkCannyEdgeDetectionImageFilterID2ID2 class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
ImageDimension = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_ImageDimension
OutputImageDimension = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_OutputImageDimension
InputHasNumericTraitsCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_InputHasNumericTraitsCheck
OutputHasNumericTraitsCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_OutputHasNumericTraitsCheck
SameDimensionCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SameDimensionCheck
InputIsFloatingPointCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_InputIsFloatingPointCheck
OutputIsFloatingPointCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_OutputIsFloatingPointCheck
def __New_orig__():
"""__New_orig__()"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def GetVariance(self):
"""GetVariance(self) -> itkFixedArrayD2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetVariance(self)
def GetMaximumError(self):
"""GetMaximumError(self) -> itkFixedArrayD2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetMaximumError(self)
def SetVariance(self, *args):
"""
SetVariance(self, itkFixedArrayD2 _arg)
SetVariance(self, double v)
"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetVariance(self, *args)
def SetMaximumError(self, *args):
"""
SetMaximumError(self, itkFixedArrayD2 _arg)
SetMaximumError(self, double v)
"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetMaximumError(self, *args)
def SetThreshold(self, *args):
"""SetThreshold(self, double th)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetThreshold(self, *args)
def GetThreshold(self, *args):
"""GetThreshold(self, double th) -> double"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetThreshold(self, *args)
def SetUpperThreshold(self, *args):
"""SetUpperThreshold(self, double _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetUpperThreshold(self, *args)
def GetUpperThreshold(self):
"""GetUpperThreshold(self) -> double"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetUpperThreshold(self)
def SetLowerThreshold(self, *args):
"""SetLowerThreshold(self, double _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetLowerThreshold(self, *args)
def GetLowerThreshold(self):
"""GetLowerThreshold(self) -> double"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetLowerThreshold(self)
def SetOutsideValue(self, *args):
"""SetOutsideValue(self, double _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetOutsideValue(self, *args)
def GetOutsideValue(self):
"""GetOutsideValue(self) -> double"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetOutsideValue(self)
def GetNonMaximumSuppressionImage(self):
"""GetNonMaximumSuppressionImage(self) -> itkImageD2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetNonMaximumSuppressionImage(self)
def GenerateInputRequestedRegion(self):
"""GenerateInputRequestedRegion(self)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GenerateInputRequestedRegion(self)
__swig_destroy__ = _itkCannyEdgeDetectionImageFilterPython.delete_itkCannyEdgeDetectionImageFilterID2ID2
def cast(*args):
"""cast(itkLightObject obj) -> itkCannyEdgeDetectionImageFilterID2ID2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkCannyEdgeDetectionImageFilterID2ID2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkCannyEdgeDetectionImageFilterID2ID2
Create a new object of the class itkCannyEdgeDetectionImageFilterID2ID2 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkCannyEdgeDetectionImageFilterID2ID2.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkCannyEdgeDetectionImageFilterID2ID2.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkCannyEdgeDetectionImageFilterID2ID2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkCannyEdgeDetectionImageFilterID2ID2.GetVariance = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetVariance,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.GetMaximumError = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetMaximumError,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.SetVariance = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetVariance,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.SetMaximumError = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetMaximumError,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.SetThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetThreshold,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.GetThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetThreshold,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.SetUpperThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetUpperThreshold,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.GetUpperThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetUpperThreshold,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.SetLowerThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetLowerThreshold,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.GetLowerThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetLowerThreshold,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.SetOutsideValue = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_SetOutsideValue,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.GetOutsideValue = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetOutsideValue,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.GetNonMaximumSuppressionImage = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetNonMaximumSuppressionImage,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.GenerateInputRequestedRegion = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GenerateInputRequestedRegion,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2.GetPointer = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_GetPointer,None,itkCannyEdgeDetectionImageFilterID2ID2)
itkCannyEdgeDetectionImageFilterID2ID2_swigregister = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_swigregister
itkCannyEdgeDetectionImageFilterID2ID2_swigregister(itkCannyEdgeDetectionImageFilterID2ID2)
def itkCannyEdgeDetectionImageFilterID2ID2___New_orig__():
"""itkCannyEdgeDetectionImageFilterID2ID2___New_orig__()"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2___New_orig__()
def itkCannyEdgeDetectionImageFilterID2ID2_cast(*args):
"""itkCannyEdgeDetectionImageFilterID2ID2_cast(itkLightObject obj) -> itkCannyEdgeDetectionImageFilterID2ID2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID2ID2_cast(*args)
class itkCannyEdgeDetectionImageFilterID3ID3(itkImageToImageFilterAPython.itkImageToImageFilterID3ID3):
"""Proxy of C++ itkCannyEdgeDetectionImageFilterID3ID3 class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
ImageDimension = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_ImageDimension
OutputImageDimension = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_OutputImageDimension
InputHasNumericTraitsCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_InputHasNumericTraitsCheck
OutputHasNumericTraitsCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_OutputHasNumericTraitsCheck
SameDimensionCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SameDimensionCheck
InputIsFloatingPointCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_InputIsFloatingPointCheck
OutputIsFloatingPointCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_OutputIsFloatingPointCheck
def __New_orig__():
"""__New_orig__()"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def GetVariance(self):
"""GetVariance(self) -> itkFixedArrayD3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetVariance(self)
def GetMaximumError(self):
"""GetMaximumError(self) -> itkFixedArrayD3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetMaximumError(self)
def SetVariance(self, *args):
"""
SetVariance(self, itkFixedArrayD3 _arg)
SetVariance(self, double v)
"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetVariance(self, *args)
def SetMaximumError(self, *args):
"""
SetMaximumError(self, itkFixedArrayD3 _arg)
SetMaximumError(self, double v)
"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetMaximumError(self, *args)
def SetThreshold(self, *args):
"""SetThreshold(self, double th)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetThreshold(self, *args)
def GetThreshold(self, *args):
"""GetThreshold(self, double th) -> double"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetThreshold(self, *args)
def SetUpperThreshold(self, *args):
"""SetUpperThreshold(self, double _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetUpperThreshold(self, *args)
def GetUpperThreshold(self):
"""GetUpperThreshold(self) -> double"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetUpperThreshold(self)
def SetLowerThreshold(self, *args):
"""SetLowerThreshold(self, double _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetLowerThreshold(self, *args)
def GetLowerThreshold(self):
"""GetLowerThreshold(self) -> double"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetLowerThreshold(self)
def SetOutsideValue(self, *args):
"""SetOutsideValue(self, double _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetOutsideValue(self, *args)
def GetOutsideValue(self):
"""GetOutsideValue(self) -> double"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetOutsideValue(self)
def GetNonMaximumSuppressionImage(self):
"""GetNonMaximumSuppressionImage(self) -> itkImageD3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetNonMaximumSuppressionImage(self)
def GenerateInputRequestedRegion(self):
"""GenerateInputRequestedRegion(self)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GenerateInputRequestedRegion(self)
__swig_destroy__ = _itkCannyEdgeDetectionImageFilterPython.delete_itkCannyEdgeDetectionImageFilterID3ID3
def cast(*args):
"""cast(itkLightObject obj) -> itkCannyEdgeDetectionImageFilterID3ID3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkCannyEdgeDetectionImageFilterID3ID3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkCannyEdgeDetectionImageFilterID3ID3
Create a new object of the class itkCannyEdgeDetectionImageFilterID3ID3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkCannyEdgeDetectionImageFilterID3ID3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkCannyEdgeDetectionImageFilterID3ID3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkCannyEdgeDetectionImageFilterID3ID3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkCannyEdgeDetectionImageFilterID3ID3.GetVariance = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetVariance,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.GetMaximumError = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetMaximumError,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.SetVariance = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetVariance,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.SetMaximumError = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetMaximumError,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.SetThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetThreshold,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.GetThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetThreshold,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.SetUpperThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetUpperThreshold,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.GetUpperThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetUpperThreshold,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.SetLowerThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetLowerThreshold,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.GetLowerThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetLowerThreshold,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.SetOutsideValue = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_SetOutsideValue,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.GetOutsideValue = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetOutsideValue,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.GetNonMaximumSuppressionImage = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetNonMaximumSuppressionImage,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.GenerateInputRequestedRegion = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GenerateInputRequestedRegion,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3.GetPointer = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_GetPointer,None,itkCannyEdgeDetectionImageFilterID3ID3)
itkCannyEdgeDetectionImageFilterID3ID3_swigregister = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_swigregister
itkCannyEdgeDetectionImageFilterID3ID3_swigregister(itkCannyEdgeDetectionImageFilterID3ID3)
def itkCannyEdgeDetectionImageFilterID3ID3___New_orig__():
"""itkCannyEdgeDetectionImageFilterID3ID3___New_orig__()"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3___New_orig__()
def itkCannyEdgeDetectionImageFilterID3ID3_cast(*args):
"""itkCannyEdgeDetectionImageFilterID3ID3_cast(itkLightObject obj) -> itkCannyEdgeDetectionImageFilterID3ID3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterID3ID3_cast(*args)
class itkCannyEdgeDetectionImageFilterIF2IF2(itkImageToImageFilterAPython.itkImageToImageFilterIF2IF2):
"""Proxy of C++ itkCannyEdgeDetectionImageFilterIF2IF2 class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
ImageDimension = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_ImageDimension
OutputImageDimension = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_OutputImageDimension
InputHasNumericTraitsCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_InputHasNumericTraitsCheck
OutputHasNumericTraitsCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_OutputHasNumericTraitsCheck
SameDimensionCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SameDimensionCheck
InputIsFloatingPointCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_InputIsFloatingPointCheck
OutputIsFloatingPointCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_OutputIsFloatingPointCheck
def __New_orig__():
"""__New_orig__()"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def GetVariance(self):
"""GetVariance(self) -> itkFixedArrayD2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetVariance(self)
def GetMaximumError(self):
"""GetMaximumError(self) -> itkFixedArrayD2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetMaximumError(self)
def SetVariance(self, *args):
"""
SetVariance(self, itkFixedArrayD2 _arg)
SetVariance(self, double v)
"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetVariance(self, *args)
def SetMaximumError(self, *args):
"""
SetMaximumError(self, itkFixedArrayD2 _arg)
SetMaximumError(self, double v)
"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetMaximumError(self, *args)
def SetThreshold(self, *args):
"""SetThreshold(self, float th)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetThreshold(self, *args)
def GetThreshold(self, *args):
"""GetThreshold(self, float th) -> float"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetThreshold(self, *args)
def SetUpperThreshold(self, *args):
"""SetUpperThreshold(self, float _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetUpperThreshold(self, *args)
def GetUpperThreshold(self):
"""GetUpperThreshold(self) -> float"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetUpperThreshold(self)
def SetLowerThreshold(self, *args):
"""SetLowerThreshold(self, float _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetLowerThreshold(self, *args)
def GetLowerThreshold(self):
"""GetLowerThreshold(self) -> float"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetLowerThreshold(self)
def SetOutsideValue(self, *args):
"""SetOutsideValue(self, float _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetOutsideValue(self, *args)
def GetOutsideValue(self):
"""GetOutsideValue(self) -> float"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetOutsideValue(self)
def GetNonMaximumSuppressionImage(self):
"""GetNonMaximumSuppressionImage(self) -> itkImageF2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetNonMaximumSuppressionImage(self)
def GenerateInputRequestedRegion(self):
"""GenerateInputRequestedRegion(self)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GenerateInputRequestedRegion(self)
__swig_destroy__ = _itkCannyEdgeDetectionImageFilterPython.delete_itkCannyEdgeDetectionImageFilterIF2IF2
def cast(*args):
"""cast(itkLightObject obj) -> itkCannyEdgeDetectionImageFilterIF2IF2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkCannyEdgeDetectionImageFilterIF2IF2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkCannyEdgeDetectionImageFilterIF2IF2
Create a new object of the class itkCannyEdgeDetectionImageFilterIF2IF2 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkCannyEdgeDetectionImageFilterIF2IF2.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkCannyEdgeDetectionImageFilterIF2IF2.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkCannyEdgeDetectionImageFilterIF2IF2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkCannyEdgeDetectionImageFilterIF2IF2.GetVariance = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetVariance,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.GetMaximumError = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetMaximumError,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.SetVariance = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetVariance,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.SetMaximumError = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetMaximumError,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.SetThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetThreshold,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.GetThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetThreshold,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.SetUpperThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetUpperThreshold,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.GetUpperThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetUpperThreshold,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.SetLowerThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetLowerThreshold,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.GetLowerThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetLowerThreshold,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.SetOutsideValue = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_SetOutsideValue,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.GetOutsideValue = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetOutsideValue,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.GetNonMaximumSuppressionImage = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetNonMaximumSuppressionImage,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.GenerateInputRequestedRegion = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GenerateInputRequestedRegion,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2.GetPointer = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_GetPointer,None,itkCannyEdgeDetectionImageFilterIF2IF2)
itkCannyEdgeDetectionImageFilterIF2IF2_swigregister = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_swigregister
itkCannyEdgeDetectionImageFilterIF2IF2_swigregister(itkCannyEdgeDetectionImageFilterIF2IF2)
def itkCannyEdgeDetectionImageFilterIF2IF2___New_orig__():
"""itkCannyEdgeDetectionImageFilterIF2IF2___New_orig__()"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2___New_orig__()
def itkCannyEdgeDetectionImageFilterIF2IF2_cast(*args):
"""itkCannyEdgeDetectionImageFilterIF2IF2_cast(itkLightObject obj) -> itkCannyEdgeDetectionImageFilterIF2IF2"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF2IF2_cast(*args)
class itkCannyEdgeDetectionImageFilterIF3IF3(itkImageToImageFilterAPython.itkImageToImageFilterIF3IF3):
"""Proxy of C++ itkCannyEdgeDetectionImageFilterIF3IF3 class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
ImageDimension = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_ImageDimension
OutputImageDimension = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_OutputImageDimension
InputHasNumericTraitsCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_InputHasNumericTraitsCheck
OutputHasNumericTraitsCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_OutputHasNumericTraitsCheck
SameDimensionCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SameDimensionCheck
InputIsFloatingPointCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_InputIsFloatingPointCheck
OutputIsFloatingPointCheck = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_OutputIsFloatingPointCheck
def __New_orig__():
"""__New_orig__()"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def GetVariance(self):
"""GetVariance(self) -> itkFixedArrayD3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetVariance(self)
def GetMaximumError(self):
"""GetMaximumError(self) -> itkFixedArrayD3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetMaximumError(self)
def SetVariance(self, *args):
"""
SetVariance(self, itkFixedArrayD3 _arg)
SetVariance(self, double v)
"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetVariance(self, *args)
def SetMaximumError(self, *args):
"""
SetMaximumError(self, itkFixedArrayD3 _arg)
SetMaximumError(self, double v)
"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetMaximumError(self, *args)
def SetThreshold(self, *args):
"""SetThreshold(self, float th)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetThreshold(self, *args)
def GetThreshold(self, *args):
"""GetThreshold(self, float th) -> float"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetThreshold(self, *args)
def SetUpperThreshold(self, *args):
"""SetUpperThreshold(self, float _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetUpperThreshold(self, *args)
def GetUpperThreshold(self):
"""GetUpperThreshold(self) -> float"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetUpperThreshold(self)
def SetLowerThreshold(self, *args):
"""SetLowerThreshold(self, float _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetLowerThreshold(self, *args)
def GetLowerThreshold(self):
"""GetLowerThreshold(self) -> float"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetLowerThreshold(self)
def SetOutsideValue(self, *args):
"""SetOutsideValue(self, float _arg)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetOutsideValue(self, *args)
def GetOutsideValue(self):
"""GetOutsideValue(self) -> float"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetOutsideValue(self)
def GetNonMaximumSuppressionImage(self):
"""GetNonMaximumSuppressionImage(self) -> itkImageF3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetNonMaximumSuppressionImage(self)
def GenerateInputRequestedRegion(self):
"""GenerateInputRequestedRegion(self)"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GenerateInputRequestedRegion(self)
__swig_destroy__ = _itkCannyEdgeDetectionImageFilterPython.delete_itkCannyEdgeDetectionImageFilterIF3IF3
def cast(*args):
"""cast(itkLightObject obj) -> itkCannyEdgeDetectionImageFilterIF3IF3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkCannyEdgeDetectionImageFilterIF3IF3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkCannyEdgeDetectionImageFilterIF3IF3
Create a new object of the class itkCannyEdgeDetectionImageFilterIF3IF3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkCannyEdgeDetectionImageFilterIF3IF3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkCannyEdgeDetectionImageFilterIF3IF3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkCannyEdgeDetectionImageFilterIF3IF3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkCannyEdgeDetectionImageFilterIF3IF3.GetVariance = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetVariance,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.GetMaximumError = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetMaximumError,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.SetVariance = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetVariance,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.SetMaximumError = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetMaximumError,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.SetThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetThreshold,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.GetThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetThreshold,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.SetUpperThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetUpperThreshold,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.GetUpperThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetUpperThreshold,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.SetLowerThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetLowerThreshold,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.GetLowerThreshold = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetLowerThreshold,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.SetOutsideValue = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_SetOutsideValue,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.GetOutsideValue = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetOutsideValue,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.GetNonMaximumSuppressionImage = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetNonMaximumSuppressionImage,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.GenerateInputRequestedRegion = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GenerateInputRequestedRegion,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3.GetPointer = new_instancemethod(_itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_GetPointer,None,itkCannyEdgeDetectionImageFilterIF3IF3)
itkCannyEdgeDetectionImageFilterIF3IF3_swigregister = _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_swigregister
itkCannyEdgeDetectionImageFilterIF3IF3_swigregister(itkCannyEdgeDetectionImageFilterIF3IF3)
def itkCannyEdgeDetectionImageFilterIF3IF3___New_orig__():
"""itkCannyEdgeDetectionImageFilterIF3IF3___New_orig__()"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3___New_orig__()
def itkCannyEdgeDetectionImageFilterIF3IF3_cast(*args):
"""itkCannyEdgeDetectionImageFilterIF3IF3_cast(itkLightObject obj) -> itkCannyEdgeDetectionImageFilterIF3IF3"""
return _itkCannyEdgeDetectionImageFilterPython.itkCannyEdgeDetectionImageFilterIF3IF3_cast(*args)
| [
"[email protected]"
] | |
3a5912957350ed986573050c9f331ab000478692 | 5d0e8ac83fc0e39adb1b031cc01187bcdeb3a452 | /h2o-py/tests/testdir_javapredict/pyunit_javapredict_irisDRF.py | dde1d997ffd534e4322911996f0d558a268813e9 | [
"Apache-2.0"
] | permissive | xxushans/h2o-3 | f466a3faebb7342c7e41266b5d8ba0a40a2d1cff | 1567366c926b932acf8051a9ef579b966133f5f8 | refs/heads/master | 2021-01-14T08:56:24.477570 | 2015-10-20T20:25:23 | 2015-10-20T20:25:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 576 | py |
def javapredict_iris_drf():
# optional parameters
params = {'ntrees':100, 'max_depth':5, 'min_rows':10}
print "Parameter list:"
for k,v in zip(params.keys(), params.values()): print "{0}, {1}".format(k,v)
train = h2o.import_file(pyunit_utils.locate("smalldata/iris/iris_train.csv"))
test = h2o.import_file(pyunit_utils.locate("smalldata/iris/iris_train.csv"))
x = ["sepal_len","sepal_wid","petal_len","petal_wid"]
y = "species"
pyunit_utils.javapredict("random_forest", "class", train, test, x, y, **params)
javapredict_iris_drf()
| [
"[email protected]"
] | |
4693d026ff3eb51517ea1524650b2119fb6af8db | 77c4ca9b33e007daecfc4318537d7babea5dde84 | /tensorflow/python/keras/mixed_precision/experimental/keras_test.py | 498077829da14e10358b297127d0a5d1b268ac03 | [
"Apache-2.0"
] | permissive | RJ722/tensorflow | 308eede8e911e2b6a6930fef3e24a493ab9a2a61 | 6c935289da11da738f2eaed18644082f3a6938d6 | refs/heads/master | 2020-12-20T16:51:12.767583 | 2020-01-25T06:46:50 | 2020-01-25T06:51:20 | 236,138,137 | 2 | 3 | Apache-2.0 | 2020-01-25T07:12:41 | 2020-01-25T07:12:40 | null | UTF-8 | Python | false | false | 46,076 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests mixed precision works correctly with Keras layers and models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl.testing import parameterized
import numpy as np
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.distribute import mirrored_strategy
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.keras import backend
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import layers
from tensorflow.python.keras import models
from tensorflow.python.keras import optimizers
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.engine import base_layer
from tensorflow.python.keras.engine import base_layer_utils
from tensorflow.python.keras.engine import input_spec
from tensorflow.python.keras.layers import core
from tensorflow.python.keras.mixed_precision.experimental import get_layer_policy
from tensorflow.python.keras.mixed_precision.experimental import loss_scale_optimizer
from tensorflow.python.keras.mixed_precision.experimental import policy
from tensorflow.python.keras.mixed_precision.experimental import test_util as mp_test_util
from tensorflow.python.keras.optimizer_v2 import gradient_descent
from tensorflow.python.keras.saving import save
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training.experimental import loss_scale as loss_scale_module
from tensorflow.python.training.tracking import util as trackable_utils
# Pylint's static analysis incorrectly believes many layers are non-callable, so
# we disable the lint error.
# pylint: disable=not-callable
class AddLayerWithoutAutoCast(mp_test_util.AddLayer):
"""Same as AddLayer, but does not use AutoCastVariables."""
def build(self, _):
dtype = self.dtype
if dtype in ('float16', 'bfloat16'):
dtype = 'float32'
self.v = self.add_weight(
'v', (),
initializer='ones',
dtype=dtype,
experimental_autocast=False,
regularizer=self._regularizer)
self.built = True
def call(self, inputs):
self.assert_input_types(inputs)
assert self.v.dtype in (dtypes.float32, dtypes.float64)
return self._add(inputs, math_ops.cast(self.v, inputs.dtype))
class AddLayerWithFunction(mp_test_util.AddLayer):
"""Same as AddLayer, but _add is decorated with a tf.function."""
@def_function.function
def _add(self, x, y):
return super(AddLayerWithFunction, self)._add(x, y)
# If called outside any strategy.scope() calls, this will return the default
# strategy.
default_strategy_fn = distribution_strategy_context.get_strategy
def create_mirrored_strategy():
if context.num_gpus() >= 1:
return mirrored_strategy.MirroredStrategy(['cpu:0', 'gpu:0'])
else:
return mirrored_strategy.MirroredStrategy(['cpu:0'])
TESTCASES = ({
'testcase_name': 'base',
'strategy_fn': default_strategy_fn
}, {
'testcase_name': 'distribute',
'strategy_fn': create_mirrored_strategy
})
class KerasLayerTest(keras_parameterized.TestCase):
"""Test mixed precision with Keras layers."""
@parameterized.named_parameters(*TESTCASES)
@test_util.run_in_graph_and_eager_modes
def test_mixed_policies_(self, strategy_fn):
for dtype in 'float16', 'bfloat16':
x = constant_op.constant([1.])
policy_name = 'mixed_' + dtype
with strategy_fn().scope(), policy.policy_scope(policy_name):
layer = mp_test_util.AddLayer(assert_type=dtype)
self.assertEqual(layer.dtype, dtypes.float32)
self.assertEqual(get_layer_policy.get_layer_policy(layer).name,
policy_name)
y = layer(x)
self.assertEqual(layer.v.dtype, dtypes.float32)
self.assertEqual(y.dtype, dtype)
self.assertEqual(layer.dtype, dtypes.float32)
self.assertEqual(get_layer_policy.get_layer_policy(layer).name,
policy_name)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(self.evaluate(y), 2.)
@test_util.run_in_graph_and_eager_modes
def test_layer_with_int_variable(self):
class LayerWithIntVar(base_layer.Layer):
def build(self, _):
self.v = self.add_weight('v', dtype='int32', trainable=False)
def call(self, inputs):
# Only float variables should be autocasted. This will fail if self.v is
# autocasted to float32
return math_ops.cast(inputs, 'int32') + self.v
x = constant_op.constant([1.])
layer = LayerWithIntVar(dtype=policy.Policy('mixed_float16'))
self.assertEqual(layer(x).dtype, 'int32')
@parameterized.named_parameters(*TESTCASES)
@test_util.run_in_graph_and_eager_modes
def test_layer_with_non_autocast_variable(self, strategy_fn):
x = constant_op.constant([1.])
with strategy_fn().scope():
with policy.policy_scope('mixed_float16'):
layer = AddLayerWithoutAutoCast(assert_type=dtypes.float16)
y = layer(x)
self.assertEqual(layer.v.dtype, dtypes.float32)
self.assertEqual(y.dtype, dtypes.float16)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(self.evaluate(y), 2.)
@parameterized.named_parameters(*TESTCASES)
@test_util.run_in_graph_and_eager_modes
def test_layer_calling_tf_function(self, strategy_fn):
x = constant_op.constant([1.])
with strategy_fn().scope():
with policy.policy_scope('mixed_float16'):
layer = AddLayerWithFunction(assert_type=dtypes.float16)
y = layer(x)
self.assertEqual(layer.v.dtype, dtypes.float32)
self.assertEqual(y.dtype, dtypes.float16)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(self.evaluate(y), 2.)
@parameterized.named_parameters(*TESTCASES)
@test_util.run_in_graph_and_eager_modes
def test_layer_regularizer_runs_in_var_dtype(self, strategy_fn):
x = constant_op.constant([1.])
with strategy_fn().scope():
with policy.policy_scope('mixed_float16'):
# Test on AddLayer
layer = mp_test_util.AddLayer(
assert_type=dtypes.float16,
regularizer=mp_test_util.IdentityRegularizer())
layer(x)
(regularizer_loss,) = layer.losses
self.assertEqual(regularizer_loss.dtype, dtypes.float32)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(self.evaluate(regularizer_loss), 1.)
# Test on AddLayerWithoutAutoCast
layer = AddLayerWithoutAutoCast(
assert_type=dtypes.float16,
regularizer=mp_test_util.IdentityRegularizer())
layer(x)
(regularizer_loss,) = layer.losses
self.assertEqual(regularizer_loss.dtype, dtypes.float32)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(self.evaluate(regularizer_loss), 1.)
@parameterized.named_parameters(*TESTCASES)
@test_util.run_in_graph_and_eager_modes
def test_passing_policy_to_layer(self, strategy_fn):
x = constant_op.constant([1.], dtype=dtypes.float16)
with strategy_fn().scope():
# Passing a Policy to 'dtype' sets the policy for that layer.
layer = mp_test_util.AddLayer(
assert_type=dtypes.float16, dtype=policy.Policy('mixed_float16'))
# layer.dtype refers to the variable dtype
self.assertEqual(layer.dtype, dtypes.float32)
layer(x)
self.assertEqual(layer.v.dtype, dtypes.float32)
with policy.policy_scope('mixed_float16'):
# Passing a Policy to dtype overrides the global Policy
layer = mp_test_util.AddLayer(
assert_type=dtypes.float64, dtype=policy.Policy('float64'))
self.assertEqual(layer.dtype, 'float64')
self.assertEqual(layer(x).dtype, dtypes.float64)
self.assertEqual(layer.v.dtype, dtypes.float64)
@test_util.run_in_graph_and_eager_modes
def test_error_passing_policy_string_to_layer(self):
with self.assertRaisesRegexp(
TypeError, "Cannot convert value 'mixed_float16' to a "
"TensorFlow DType"):
# This is not allowed, as otherwise a "mixed_float16" policy could be
# created without an API call that has the name "experimental" in it.
mp_test_util.AddLayer(dtype='mixed_float16')
@parameterized.named_parameters(*TESTCASES)
@test_util.run_in_graph_and_eager_modes
def test_gradient(self, strategy_fn):
x = constant_op.constant([1.])
with strategy_fn().scope() as strategy:
with policy.policy_scope('mixed_float16'):
layer = mp_test_util.AddLayer(assert_type=dtypes.float16)
def run_fn():
with backprop.GradientTape() as tape:
y = layer(x)
# Divide by num_replicas_in_sync, as the effective total loss is the
# sum of each of the replica's losses.
y /= strategy.num_replicas_in_sync
# Learning rate is small enough that if applied to a float16 variable,
# the variable will not change. So this tests the learning rate is not
# applied to a float16 value, but instead the float32 variable.
opt = gradient_descent.SGD(2**-14)
grad = tape.gradient(y, layer.v)
return opt.apply_gradients([(grad, layer.v)])
op = strategy.experimental_run(run_fn)
if not context.executing_eagerly():
self.evaluate(variables.global_variables_initializer())
self.evaluate(op)
# The gradient with respective to the variable is 1. Since the
# variable is initialized with 1 and the learning rate is 2**-14, the
# new variable value should be: init_val - gradient * learning_rate,
# which is 1 - 1 * 2**-14
self.assertEqual(self.evaluate(layer.v), 1 - 2**-14)
def _test_checkpointing_layer_weights(self, strategy_fn,
mixed_prec_when_saving,
mixed_prec_when_loading):
# In this test, we potentially save with mixed precision enabled and load
# with mixed precision disabled, or vice versa. This is possible because
# variables are float32 regardless of whether mixed precision is enabled.
save_policy = 'mixed_float16' if mixed_prec_when_saving else 'float32'
load_policy = 'mixed_float16' if mixed_prec_when_loading else 'float32'
save_input_dtype = 'float16' if mixed_prec_when_saving else 'float32'
load_input_dtype = 'float16' if mixed_prec_when_loading else 'float32'
# Create a layer and save a checkpoint.
x = constant_op.constant([1.])
with strategy_fn().scope():
with policy.policy_scope(save_policy):
layer = mp_test_util.AddLayer(assert_type=save_input_dtype)
layer(x) # Build layer
layer.set_weights([np.array(100.)])
self.assertEqual(self.evaluate(layer(x)), 101.)
checkpoint = trackable_utils.Checkpoint(layer=layer)
prefix = os.path.join(self.get_temp_dir(), 'ckpt')
save_path = checkpoint.save(prefix)
# Create a new layer and restore the checkpoint.
x = constant_op.constant([1.])
with strategy_fn().scope():
with policy.policy_scope(load_policy):
layer = mp_test_util.AddLayer(assert_type=load_input_dtype)
layer(x) # Build layer
layer.set_weights([np.array(200.)])
self.assertEqual(self.evaluate(layer(x)), 201.)
checkpoint = trackable_utils.Checkpoint(layer=layer)
checkpoint.restore(save_path).assert_consumed().run_restore_ops()
self.assertEqual(layer.get_weights(), [100.])
self.assertEqual(self.evaluate(layer(x)), 101.)
@parameterized.named_parameters(*TESTCASES)
@test_util.run_in_graph_and_eager_modes
def test_checkpointing_layer_weights(self, strategy_fn):
self._test_checkpointing_layer_weights(
strategy_fn, mixed_prec_when_saving=True, mixed_prec_when_loading=True)
self._test_checkpointing_layer_weights(
strategy_fn, mixed_prec_when_saving=True, mixed_prec_when_loading=False)
self._test_checkpointing_layer_weights(
strategy_fn, mixed_prec_when_saving=False, mixed_prec_when_loading=True)
@parameterized.named_parameters(*TESTCASES)
@test_util.run_in_graph_and_eager_modes
def test_config(self, strategy_fn):
x = constant_op.constant([1.], dtype=dtypes.float16)
with strategy_fn().scope():
for layer, dtype in (
(mp_test_util.AddLayer(), 'float32'),
(mp_test_util.AddLayer(dtype='float64'), 'float64'),
(mp_test_util.AddLayer(dtype=policy.Policy('float64')), 'float64')):
config = layer.get_config()
self.assertEqual(config['dtype'], dtype)
self.assertIsInstance(config['dtype'], str)
layer = mp_test_util.AddLayer.from_config(config)
self.assertEqual(layer.dtype, dtype)
self.assertEqual(layer(x).dtype, dtype)
self.assertEqual(layer.v.dtype, dtype)
layer = mp_test_util.AddLayer(dtype=policy.Policy('mixed_float16'))
config = layer.get_config()
self.assertEqual(config['dtype'],
{'class_name': 'Policy',
'config': {'name': 'mixed_float16'}})
layer = mp_test_util.AddLayer.from_config(config)
self.assertEqual(layer.dtype, 'float32')
self.assertEqual(layer(x).dtype, 'float16')
self.assertEqual(layer.v.dtype, 'float32')
layer = mp_test_util.AddLayer(dtype=policy.Policy('mixed_float16',
loss_scale=None))
config = layer.get_config()
self.assertEqual(config['dtype'],
{'class_name': 'Policy',
'config': {'name': 'mixed_float16',
'loss_scale': None}})
layer = mp_test_util.AddLayer.from_config(config)
self.assertEqual(layer.dtype, 'float32')
self.assertEqual(layer(x).dtype, 'float16')
self.assertEqual(layer.v.dtype, 'float32')
layer = mp_test_util.AddLayer(dtype=policy.Policy('float64',
loss_scale=2.))
config = layer.get_config()
self.assertEqual(config['dtype'],
{'class_name': 'Policy',
'config': {'name': 'float64',
'loss_scale': {
'class_name': 'FixedLossScale',
'config': {'loss_scale_value': 2.0}}}})
layer = mp_test_util.AddLayer.from_config(config)
self.assertEqual(layer.dtype, 'float64')
self.assertEqual(layer(x).dtype, 'float64')
self.assertEqual(layer.v.dtype, 'float64')
layer = mp_test_util.AddLayer(dtype=policy.Policy('infer'))
config = layer.get_config()
self.assertIsNone(config['dtype'])
layer = mp_test_util.AddLayer.from_config(config)
# If a layer is serialized with the "infer" policy, when deserialized into
# TF 2 it will have the global policy instead of "infer". This is because
# "infer" is serialized into None, and passing dtype=None in TensorFlow 2
# indicates to use the global policy.
self.assertEqual(layer.dtype, 'float32')
self.assertEqual(layer(x).dtype, 'float32')
self.assertEqual(layer.v.dtype, 'float32')
layer = mp_test_util.AddLayer(dtype=policy.Policy('infer', loss_scale=2.))
config = layer.get_config()
self.assertEqual(config['dtype'],
{'class_name': 'Policy',
'config': {'name': 'infer',
'loss_scale': {
'class_name': 'FixedLossScale',
'config': {'loss_scale_value': 2.0}}}})
layer = mp_test_util.AddLayer.from_config(config)
self.assertEqual(layer.dtype, None)
self.assertEqual(layer(x).dtype, 'float16')
self.assertEqual(layer.v.dtype, 'float16')
@test_util.run_in_graph_and_eager_modes
def test_delete_variable(self):
layer = base_layer.Layer(dtype=policy.Policy('mixed_float16'))
layer.x = layer.add_weight('x')
self.assertEqual(layer.trainable_weights, [layer.x])
del layer.x
self.assertEqual(layer.trainable_weights, [])
@test_util.run_in_graph_and_eager_modes
def test_build_and_call_layer_in_function(self):
layer = mp_test_util.AddLayer(dtype=policy.Policy('mixed_float16'))
@def_function.function
def f():
return layer(1.)
y = f()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(y.dtype, 'float16')
self.assertEqual(layer.v.dtype, 'float32')
self.assertEqual(self.evaluate(y), 2.)
class KerasModelTest(keras_parameterized.TestCase):
"""Test mixed precision with Keras models."""
def _skip_if_strategy_unsupported(self, strategy_fn, check_model_type=False):
if (strategy_fn != default_strategy_fn and
(testing_utils.should_run_eagerly() or
(check_model_type and testing_utils.get_model_type() == 'subclass'))):
self.skipTest('Non-default strategies are unsupported with subclassed '
'models or with passing run_eagerly=True to '
'Model.compile()')
def _skip_if_save_format_unsupported(self, save_format):
model_type = testing_utils.get_model_type()
if save_format == 'h5' and model_type == 'subclass':
self.skipTest('Saving subclassed models with the HDF5 format is '
'unsupported')
if (save_format == 'tf' and model_type == 'subclass' and
not testing_utils.should_run_tf_function()):
self.skipTest('b/142352416: This combination of features is currently '
'broken.')
if (save_format == 'tf' and model_type != 'subclass' and
not context.executing_eagerly()):
self.skipTest('b/134519980: This combination of features is currently '
'broken.')
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters(
{
'testcase_name': 'base',
'strategy_fn': default_strategy_fn
}, {
'testcase_name': 'distribute',
'strategy_fn': create_mirrored_strategy,
}, {
'testcase_name': 'operator',
'strategy_fn': create_mirrored_strategy,
'use_operator': True
}, {
'testcase_name': 'regularizer',
'strategy_fn': create_mirrored_strategy,
'use_regularizer': True
}, {
'testcase_name': 'get_config',
'strategy_fn': create_mirrored_strategy,
'get_config': True,
'use_regularizer': True,
}, {
'testcase_name': 'saved_model',
'strategy_fn': default_strategy_fn,
'save_format': 'tf',
'use_regularizer': True,
}, {
'testcase_name': 'saved_model_input_spec',
'strategy_fn': default_strategy_fn,
'save_format': 'tf',
'use_regularizer': True,
'use_input_spec': True,
}, {
'testcase_name': 'h5',
'strategy_fn': default_strategy_fn,
'save_format': 'h5',
'use_regularizer': True,
}, {
'testcase_name': 'saved_model_distribute',
'strategy_fn': create_mirrored_strategy,
'save_format': 'tf',
'use_regularizer': True,
}, {
'testcase_name': 'saved_model_input_spec_distribute',
'strategy_fn': create_mirrored_strategy,
'save_format': 'tf',
'use_regularizer': True,
'use_input_spec': True,
}, {
'testcase_name': 'h5_distribute',
'strategy_fn': create_mirrored_strategy,
'save_format': 'h5',
'use_regularizer': True,
}, {
'testcase_name': 'norun_distributed',
'strategy_fn': create_mirrored_strategy,
'experimental_run_tf_function': False
})
def test_model(self,
strategy_fn,
use_operator=False,
use_regularizer=False,
policy_name='mixed_float16',
get_config=False,
save_format=None,
use_input_spec=False,
experimental_run_tf_function=True):
self._skip_if_strategy_unsupported(strategy_fn, check_model_type=True)
self._skip_if_save_format_unsupported(save_format)
regularizer = (mp_test_util.IdentityRegularizer() if use_regularizer
else None)
with strategy_fn().scope():
# Pass loss_scale=None, as this test will fail if the DynamicLossScale
# skips applying gradients for a step
with policy.policy_scope(policy.Policy(policy_name, loss_scale=None)):
layer = mp_test_util.AddLayer(
assert_type=dtypes.float16,
use_operator=use_operator,
regularizer=regularizer,
input_shape=(1,))
if use_input_spec:
layer.input_spec = input_spec.InputSpec(shape=(2, 1))
cast_f32_layer = layers.Lambda(lambda x: math_ops.cast(x, 'float32'))
model = testing_utils.get_model_from_layers(
[layer, cast_f32_layer], input_shape=(1,),
input_dtype=dtypes.float16)
if get_config:
config = model.get_config()
model = model.__class__.from_config(
config, custom_objects={'AddLayer': mp_test_util.AddLayer})
(layer,) = (layer for layer in model.layers
if isinstance(layer, mp_test_util.AddLayer))
def loss_fn(y_true, y_pred):
del y_true
return math_ops.reduce_mean(y_pred)
# Learning rate is small enough that if applied to a float16 variable,
# the variable will not change. So this tests the learning rate not
# applied to a float16 value, but instead the float32 variable.
opt = gradient_descent.SGD(2**-14)
model.compile(
opt,
loss=loss_fn,
run_eagerly=testing_utils.should_run_eagerly(),
experimental_run_tf_function=testing_utils.should_run_tf_function())
x = np.ones((2, 1))
y = np.ones((2, 1))
dataset = dataset_ops.Dataset.from_tensor_slices((x, y)).batch(2)
model.fit(dataset)
# Variable starts at 1, and should have gradient of 2 ** -14 subtracted
# from it.
expected = 1 - 2**-14
if use_regularizer:
# Regularizer adds another 2 ** -14 to the gradient.
expected -= 2**-14
self.assertEqual(backend.eval(layer.v), expected)
if save_format:
with generic_utils.CustomObjectScope(
{'AddLayer': mp_test_util.AddLayer, 'loss_fn': loss_fn}):
self._test_saving(model, dataset, save_format, use_regularizer)
def _test_saving(self, model, dataset, save_format, use_regularizer):
# Save and load model, asserting variable does not change
save_path = os.path.join(self.get_temp_dir(), 'model')
model.save(save_path, save_format=save_format)
model = save.load_model(save_path)
(layer,) = (layer for layer in model.layers
if 'AddLayer' in layer.__class__.__name__)
expected = 1 - 2**-14
if use_regularizer:
expected -= 2**-14
self.assertEqual(backend.eval(layer.v), expected)
# Continue training, and assert variable is correct value
model.fit(dataset)
new_expected = expected - 2 ** -14
if use_regularizer:
new_expected -= 2 ** -14
self.assertEqual(backend.eval(layer.v), new_expected)
# Load saved model again, and assert variable is previous value
model = save.load_model(save_path)
(layer,) = (layer for layer in model.layers
if 'AddLayer' in layer.__class__.__name__)
self.assertEqual(backend.eval(layer.v), expected)
# Ensure various dtype-related aspects of the layer are correct
self.assertEqual(layer.dtype, 'float32')
self.assertEqual(get_layer_policy.get_layer_policy(layer).name,
'mixed_float16')
self.assertEqual(layer.v.dtype, 'float32')
self.assertEqual(layer(np.ones((2, 1))).dtype, 'float16')
@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters(
{
'testcase_name': 'base',
'strategy_fn': default_strategy_fn
}, {
'testcase_name': 'distribute',
'strategy_fn': create_mirrored_strategy,
}, {
'testcase_name': 'norun_distributed',
'strategy_fn': create_mirrored_strategy,
'experimental_run_tf_function': False,
})
def test_fixed_loss_scaling(self,
strategy_fn,
experimental_run_tf_function=True):
# Note: We do not test mixed precision in this method, only loss scaling.
self._skip_if_strategy_unsupported(strategy_fn)
loss_scale = 8.
batch_size = 4
with strategy_fn().scope():
x = layers.Input(shape=(1,), batch_size=batch_size)
layer = mp_test_util.AddLayer()
y = layer(x)
# The gradient of 'y' at this point is 1. With loss scaling, the gradient
# is 'loss_scale'. We divide by the batch size since the loss is averaged
# across batch elements.
expected_gradient = loss_scale / batch_size
identity_with_grad_check_fn = (
mp_test_util.create_identity_with_grad_check_fn([expected_gradient]))
y = core.Lambda(identity_with_grad_check_fn)(y)
model = models.Model(inputs=x, outputs=y)
def loss_fn(y_true, y_pred):
del y_true
return math_ops.reduce_mean(y_pred)
opt = gradient_descent.SGD(1.)
opt = loss_scale_optimizer.LossScaleOptimizer(opt, loss_scale)
model.compile(
opt,
loss=loss_fn,
run_eagerly=testing_utils.should_run_eagerly(),
experimental_run_tf_function=testing_utils.should_run_tf_function())
self.assertEqual(backend.eval(layer.v), 1)
x = np.ones((batch_size, 1))
y = np.ones((batch_size, 1))
dataset = dataset_ops.Dataset.from_tensor_slices((x, y)).batch(batch_size)
model.fit(dataset)
# Variable starts at 1, and should have gradient of 1 subtracted from it.
expected = 0
self.assertEqual(backend.eval(layer.v), expected)
@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters(
{
'testcase_name': 'base',
'strategy_fn': default_strategy_fn
}, {
'testcase_name': 'distribute',
'strategy_fn': create_mirrored_strategy,
}, {
'testcase_name': 'loss_scaling',
'strategy_fn': create_mirrored_strategy,
'use_loss_scaling': True
})
def test_advanced_model(self, strategy_fn, use_loss_scaling=False):
# The advanced model tests mixed-precision-related features that would occur
# in a resnet50 model. It tests a model that has:
# * Multiple layers, some which use auto-cast variables and some which do
# not
# * Regularization on some variables and not others.
# * A fixed loss scale (if use_loss_scaling is True)
self._skip_if_strategy_unsupported(strategy_fn)
strategy = strategy_fn()
if use_loss_scaling:
loss_scale = 8.
else:
loss_scale = None
learning_rate = 2**-14
with strategy.scope():
with policy.policy_scope(policy.Policy('mixed_float16',
loss_scale=loss_scale)):
x = layers.Input(shape=(1,), batch_size=2)
layer1 = mp_test_util.AddLayer(
assert_type=dtypes.float16,
regularizer=mp_test_util.IdentityRegularizer(),
use_operator=True)
layer2 = AddLayerWithoutAutoCast(
assert_type=dtypes.float16, use_operator=True)
layer3 = mp_test_util.AddLayer(assert_type=dtypes.float16,
use_operator=False)
layer4 = AddLayerWithoutAutoCast(
assert_type=dtypes.float16,
regularizer=mp_test_util.IdentityRegularizer(),
use_operator=False)
y = layer1(x)
y = layer2(y)
y = layer3(y)
y = layer4(y)
if use_loss_scaling:
# The gradient of 'y' at this point is 1. With loss scaling, the
# gradient is 'loss_scale'. We divide by the batch size of 2 since the
# loss is averaged across batch elements.
expected_gradient = loss_scale / 2
identity_with_grad_check_fn = (
mp_test_util.create_identity_with_grad_check_fn(
expected_dtype=dtypes.float16,
expected_gradient=[expected_gradient]))
y = core.Lambda(identity_with_grad_check_fn)(y)
y = math_ops.cast(y, dtypes.float32)
model = models.Model(inputs=x, outputs=y)
def loss_fn(y_true, y_pred):
self.assertEqual(y_true.dtype, dtypes.float32)
self.assertEqual(y_pred.dtype, dtypes.float32)
return math_ops.reduce_mean(y_pred)
opt = gradient_descent.SGD(learning_rate)
model.compile(
opt,
loss=loss_fn,
run_eagerly=testing_utils.should_run_eagerly(),
experimental_run_tf_function=testing_utils.should_run_tf_function())
x = np.ones((2, 1))
y = np.ones((2, 1))
dataset = dataset_ops.Dataset.from_tensor_slices((x, y)).batch(2)
model.fit(dataset)
for layer in (layer1, layer2, layer3, layer4):
if layer.losses:
# Layer has weight regularizer
self.assertEqual(backend.eval(layer.v), 1 - 2 * learning_rate)
else:
# Layer does not have weight regularizer
self.assertEqual(backend.eval(layer.v), 1 - learning_rate)
@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters(
{
'testcase_name': 'base',
'strategy_fn': default_strategy_fn
}, {
'testcase_name': 'distribute',
'strategy_fn': create_mirrored_strategy,
}, {
'testcase_name': 'pass_loss_scale_to_policy',
'strategy_fn': create_mirrored_strategy,
'pass_loss_scale_to_policy': True,
}, {
'testcase_name': 'get_config',
'strategy_fn': create_mirrored_strategy,
'get_config': True,
}, {
'testcase_name': 'get_config_and_pass_loss_scale_to_policy',
'strategy_fn': create_mirrored_strategy,
'get_config': True,
'pass_loss_scale_to_policy': True,
}, {
'testcase_name': 'norun_distributed',
'strategy_fn': create_mirrored_strategy,
'experimental_run_tf_function': False,
})
def test_dynamic_loss_scaling(self,
strategy_fn,
pass_loss_scale_to_policy=False,
get_config=False,
experimental_run_tf_function=True):
self._skip_if_strategy_unsupported(strategy_fn)
strategy = strategy_fn()
initial_loss_scale = 2.
batch_size = 4
loss_scale = loss_scale_module.DynamicLossScale(
initial_loss_scale=initial_loss_scale, increment_period=2)
expected_gradient = backend.variable([initial_loss_scale / batch_size],
dtype=dtypes.float16)
# If this variable is set to True, the model below will have NaN gradients
have_nan_gradients = backend.variable(False, dtype=dtypes.bool)
with strategy.scope():
opt = gradient_descent.SGD(1.)
if pass_loss_scale_to_policy:
p = policy.Policy('mixed_float16', loss_scale=loss_scale)
else:
p = policy.Policy('mixed_float16', loss_scale=None)
opt = loss_scale_optimizer.LossScaleOptimizer(opt, loss_scale)
with policy.policy_scope(p):
x = layers.Input(
shape=(1,), batch_size=batch_size, dtype=dtypes.float16)
layer = mp_test_util.AddLayer(assert_type=dtypes.float16)
y = layer(x)
identity_with_nan_grads = (
mp_test_util.create_identity_with_nan_gradients_fn(
have_nan_gradients))
y = core.Lambda(identity_with_nan_grads)(y)
identity_with_grad_check_fn = (
mp_test_util.create_identity_with_grad_check_fn(
expected_dtype=dtypes.float16,
expected_gradient=expected_gradient))
y = core.Lambda(identity_with_grad_check_fn)(y)
y = math_ops.cast(y, dtypes.float32)
model = models.Model(inputs=x, outputs=y)
if get_config:
config = model.get_config()
model = model.__class__.from_config(
config, custom_objects={'AddLayer': mp_test_util.AddLayer})
(layer,) = (layer for layer in model.layers
if isinstance(layer, mp_test_util.AddLayer))
def loss_fn(y_true, y_pred):
del y_true
return math_ops.reduce_mean(y_pred)
model.compile(
opt,
loss=loss_fn,
run_eagerly=testing_utils.should_run_eagerly(),
experimental_run_tf_function=testing_utils.should_run_tf_function())
self.assertEqual(backend.eval(layer.v), 1)
x = np.ones((batch_size, 1))
y = np.ones((batch_size, 1))
dataset = dataset_ops.Dataset.from_tensor_slices((x, y)).batch(batch_size)
model.fit(dataset)
# The variables starts with 1 and has a gradient of 1, so will go down by 1
# each step.
self.assertEqual(backend.eval(layer.v), 0)
model.fit(dataset)
self.assertEqual(backend.eval(layer.v), -1)
# There have been two steps without NaNs, so the loss scale will double
backend.set_value(expected_gradient,
backend.get_value(expected_gradient * 2))
model.fit(dataset)
self.assertEqual(backend.eval(layer.v), -2)
# Next test with NaN gradients.
backend.set_value(have_nan_gradients, True)
model.fit(dataset)
# Variable should not be updated
self.assertEqual(backend.eval(layer.v), -2)
# Test with finite gradients again
backend.set_value(have_nan_gradients, False)
# The loss scale will be halved due to the NaNs, so the gradient will also
# be halved
backend.set_value(expected_gradient,
backend.get_value(expected_gradient / 2))
model.fit(dataset)
self.assertEqual(backend.eval(layer.v), -3)
@test_util.run_in_graph_and_eager_modes
def test_loss_scale_optimizer_overrides_policy_loss_scale(self):
with policy.policy_scope(policy.Policy('float32', loss_scale=10.)):
opt = gradient_descent.SGD(1.)
opt = loss_scale_optimizer.LossScaleOptimizer(opt, loss_scale=5.)
x = layers.Input(shape=(1,))
y = mp_test_util.AddLayer()(x)
model = models.Model(x, y)
model.compile(opt, loss='mse')
self.assertEqual(self.evaluate(model.optimizer.loss_scale()), 5.)
@test_util.run_in_graph_and_eager_modes
def test_pass_invalid_optimizer_with_loss_scaling(self):
with policy.policy_scope(policy.Policy('float32', loss_scale=10.)):
x = layers.Input(shape=(1,))
y = mp_test_util.AddLayer()(x)
model = models.Model(x, y)
if context.executing_eagerly():
error_msg = 'Use a `tf.keras` Optimizer instead'
else:
error_msg = 'optimizer" must be an instance of '
with self.assertRaisesRegexp(ValueError, error_msg):
model.compile(optimizers.SGD(1.), 'mse')
@test_util.run_in_graph_and_eager_modes
def test_functional_model_loss_dtype(self):
with policy.policy_scope('float16'):
x = layers.Input(shape=(1,))
y = mp_test_util.AddLayer()(x)
model = models.Model(x, y)
model.add_loss(math_ops.cast(y, 'float32'))
# The loss should not be casted to the policy's dtype.
self.assertEqual(model.losses[0].dtype, 'float32')
@parameterized.named_parameters(
{
'testcase_name': 'base',
'strategy_fn': default_strategy_fn,
}, {
'testcase_name': 'distribute',
'strategy_fn': create_mirrored_strategy,
}, {
'testcase_name': 'base_h5',
'strategy_fn': default_strategy_fn,
'h5': True,
}, {
'testcase_name': 'distribute_h5',
'strategy_fn': create_mirrored_strategy,
'h5': True,
})
@test_util.run_in_graph_and_eager_modes
def test_save_weights_with_autocast_vars(self, strategy_fn, h5=False):
with strategy_fn().scope():
with policy.policy_scope('mixed_float16'):
x = layers.Input(shape=(1,), batch_size=2)
layer = mp_test_util.AddLayer(assert_type=dtypes.float16)
y = layer(x)
y = math_ops.cast(y, dtypes.float32)
model = models.Model(inputs=x, outputs=y)
model.set_weights([np.array(100.)])
x = np.ones((2, 1))
self.assertAllClose(backend.get_value(model(x)), x + 100.)
suffix = '.h5' if h5 else ''
weights_file = os.path.join(self.get_temp_dir(), 'weights' + suffix)
model.save_weights(weights_file)
model.set_weights([np.array(200.)])
self.assertAllClose(backend.get_value(model(x)), x + 200.)
model.load_weights(weights_file)
self.assertAllClose(backend.get_value(model(x)), x + 100.)
self.assertEqual(model.get_weights(), [np.array(100.)])
@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters(
{
'testcase_name': 'base',
'strategy_fn': default_strategy_fn,
}, {
'testcase_name': 'distribute',
'strategy_fn': create_mirrored_strategy,
}, {
'testcase_name': 'different_var_name',
'strategy_fn': default_strategy_fn,
'var_name': 'w'
}, {
'testcase_name': 'different_var_name_distribute',
'strategy_fn': create_mirrored_strategy,
'var_name': 'w'
})
def test_save_slot_variables_with_autocast_vars(self,
strategy_fn,
var_name='v'):
self._skip_if_strategy_unsupported(strategy_fn)
p = policy.Policy('mixed_float16', loss_scale=None)
with strategy_fn().scope(), policy.policy_scope(p):
x = layers.Input(shape=(2,), batch_size=2)
# Having a var_name other than 'v' tests that a fixed bug (b/134713714)
# does not reoccur. The bug was that a crash would occur when saving a
# checkpoint where an AutoCastVariable with a slot variable would have a
# different name than the layer attribute's name (layer.v in this case).
layer = mp_test_util.AddLayer(assert_type=dtypes.float16,
var_name=var_name)
y = layer(x)
y = math_ops.cast(y, dtypes.float32)
model = models.Model(inputs=x, outputs=y)
opt = gradient_descent.SGD(1., 1.)
model.compile(
optimizer=opt,
loss='mse',
run_eagerly=testing_utils.should_run_eagerly(),
experimental_run_tf_function=testing_utils.should_run_tf_function())
model.fit(np.zeros((2, 2)), np.zeros((2, 2)), batch_size=2)
weights_file = os.path.join(self.get_temp_dir(), 'weights')
model.save_weights(weights_file)
saved_slot = backend.get_value(opt.get_slot(layer.v, 'momentum'))
model.fit(np.zeros((2, 2)), np.zeros((2, 2)), batch_size=2)
new_slot = backend.get_value(opt.get_slot(layer.v, 'momentum'))
self.assertNotEqual(new_slot, saved_slot)
model.load_weights(weights_file)
restored_slot = backend.get_value(opt.get_slot(layer.v, 'momentum'))
self.assertEqual(restored_slot, saved_slot)
@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters(*TESTCASES)
def test_save_weights_with_dynamic_loss_scaling(self, strategy_fn):
self._skip_if_strategy_unsupported(strategy_fn)
strategy = strategy_fn()
if (isinstance(strategy, mirrored_strategy.MirroredStrategy) and
not context.executing_eagerly()):
# TODO(b/121381184): Enable running the test in this case.
return
# Create and run model.
with strategy.scope():
x = layers.Input(shape=(2,), batch_size=2, dtype=dtypes.float32)
y = mp_test_util.AddLayer(assert_type=dtypes.float32)(x)
model = models.Model(inputs=x, outputs=y)
loss_scale = loss_scale_module.DynamicLossScale(
initial_loss_scale=1., increment_period=2., multiplier=2.)
opt = gradient_descent.SGD(1.)
opt = loss_scale_optimizer.LossScaleOptimizer(opt, loss_scale)
model.compile(
optimizer=opt,
loss='mse',
run_eagerly=testing_utils.should_run_eagerly(),
experimental_run_tf_function=testing_utils.should_run_tf_function())
# Run for 3 steps (6 examples with a batch size of 2)
model.fit(np.zeros((6, 2)), np.zeros((6, 2)), batch_size=2)
self.assertEqual(backend.get_value(loss_scale()), 2)
self.assertEqual(backend.get_value(loss_scale._num_good_steps), 1)
# Save model weights.
save_prefix = os.path.join(self.get_temp_dir(), 'ckpt')
model.save_weights(save_prefix)
# Run model again for 1 step (2 examples with a batch size of 2)
model.fit(np.zeros((2, 2)), np.zeros((2, 2)), batch_size=2)
self.assertEqual(backend.get_value(loss_scale()), 4)
self.assertEqual(backend.get_value(loss_scale._num_good_steps), 0)
# Load model weights and ensure loss scale weights are restored.
model.load_weights(save_prefix)
self.assertEqual(backend.get_value(loss_scale()), 2)
self.assertEqual(backend.get_value(loss_scale._num_good_steps), 1)
@keras_parameterized.run_all_keras_modes
@parameterized.named_parameters(
{
'testcase_name': 'base',
'strategy_fn': default_strategy_fn,
}, {
'testcase_name': 'distribute',
'strategy_fn': create_mirrored_strategy,
}, {
'testcase_name': 'base_h5',
'strategy_fn': default_strategy_fn,
'h5': True,
}, {
'testcase_name': 'distribute_h5',
'strategy_fn': create_mirrored_strategy,
'h5': True,
})
def test_save_model_with_dynamic_loss_scaling(self, strategy_fn, h5=False):
self._skip_if_strategy_unsupported(strategy_fn)
# TODO(reedwm): Support and test saving model with a mixed_[b]float16 policy
# as well.
strategy = strategy_fn()
if (isinstance(strategy, mirrored_strategy.MirroredStrategy) and
not context.executing_eagerly()):
# TODO(b/121381184): Enable running the test in this case.
return
# Create and run model.
with strategy.scope():
x = layers.Input(shape=(2,), batch_size=2, dtype=dtypes.float32)
y = mp_test_util.AddLayer()(x)
model = models.Model(inputs=x, outputs=y)
loss_scale = loss_scale_module.DynamicLossScale(
initial_loss_scale=1., increment_period=2., multiplier=2.)
opt = gradient_descent.SGD(1.)
opt = loss_scale_optimizer.LossScaleOptimizer(opt, loss_scale)
model.compile(
optimizer=opt,
loss='mse',
run_eagerly=testing_utils.should_run_eagerly(),
experimental_run_tf_function=testing_utils.should_run_tf_function())
# Run for 3 steps (6 examples with a batch size of 2)
model.fit(np.zeros((6, 2)), np.zeros((6, 2)), batch_size=2)
self.assertEqual(backend.get_value(loss_scale()), 2)
self.assertEqual(backend.get_value(loss_scale._num_good_steps), 1)
(weight,) = model.trainable_weights
orig_weight = backend.get_value(weight)
# Save model weights.
save_path = os.path.join(self.get_temp_dir(), 'model')
model.save(save_path, save_format='h5' if h5 else 'tf')
# Run model again for 1 step (2 examples with a batch size of 2)
model.fit(np.zeros((2, 2)), np.zeros((2, 2)), batch_size=2)
new_weight = backend.get_value(weight)
self.assertNotEqual(new_weight, orig_weight)
self.assertEqual(backend.get_value(loss_scale()), 4)
self.assertEqual(backend.get_value(loss_scale._num_good_steps), 0)
# Load model weights and ensure loss scale weights are restored.
model = save.load_model(save_path,
custom_objects={'AddLayer': mp_test_util.AddLayer})
loss_scale = model.optimizer.loss_scale
(weight,) = model.trainable_weights
loaded_weight = backend.get_value(weight)
self.assertEqual(loaded_weight, orig_weight)
# Currently the loss scale isn't always saved when the model is saved with
# Model.save(). So we assert the loss scale either has the value when it was
# saved, or the value it was initialized with.
# TODO(reedwm): Always save/restore the loss scale with Model.save().
self.assertIn(backend.get_value(loss_scale()), (1, 2))
self.assertIn(backend.get_value(loss_scale._num_good_steps), (0, 1))
if __name__ == '__main__':
base_layer_utils.enable_v2_dtype_behavior()
test.main()
| [
"[email protected]"
] | |
334b108ede915f2978c4c60ecab922e2db736edb | 8dc84558f0058d90dfc4955e905dab1b22d12c08 | /third_party/catapult/catapult_build/js_checks.py | 78cc745ebd8c0a9d391d4f7b6b80392c3d74fce2 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | meniossin/src | 42a95cc6c4a9c71d43d62bc4311224ca1fd61e03 | 44f73f7e76119e5ab415d4593ac66485e65d700a | refs/heads/master | 2022-12-16T20:17:03.747113 | 2020-09-03T10:43:12 | 2020-09-03T10:43:12 | 263,710,168 | 1 | 0 | BSD-3-Clause | 2020-05-13T18:20:09 | 2020-05-13T18:20:08 | null | UTF-8 | Python | false | false | 3,390 | py | # Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import eslint
from py_vulcanize import strip_js_comments
from catapult_build import parse_html
class JSChecker(object):
def __init__(self, input_api, output_api, file_filter=None):
self.input_api = input_api
self.output_api = output_api
if file_filter:
self.file_filter = file_filter
else:
self.file_filter = lambda x: True
def RunChecks(self):
"""Checks for violations of the Chromium JavaScript style guide.
See:
http://chromium.org/developers/web-development-style-guide#TOC-JavaScript
"""
results = []
affected_files = self.input_api.AffectedFiles(
file_filter=self.file_filter,
include_deletes=False)
def ShouldCheck(f):
if f.LocalPath().endswith('.js'):
return True
if f.LocalPath().endswith('.html'):
return True
return False
affected_js_files = filter(ShouldCheck, affected_files)
error_lines = []
for f in affected_js_files:
contents = list(f.NewContents())
error_lines += CheckStrictMode(
'\n'.join(contents),
is_html_file=f.LocalPath().endswith('.html'))
if affected_js_files:
success, eslint_output = eslint.RunEslint(
[f.AbsoluteLocalPath() for f in affected_js_files])
if not success:
error_lines.append('\neslint found lint errors:')
error_lines.append(eslint_output)
if error_lines:
error_lines.insert(0, 'Found JavaScript style violations:')
results.append(
_MakeErrorOrWarning(self.output_api, '\n'.join(error_lines)))
return results
def _ErrorHighlight(start, length):
"""Produces a row of '^'s to underline part of a string."""
return start * ' ' + length * '^'
def _MakeErrorOrWarning(output_api, error_text):
return output_api.PresubmitError(error_text)
def CheckStrictMode(contents, is_html_file=False):
statements_to_check = []
if is_html_file:
statements_to_check.extend(_FirstStatementsInScriptElements(contents))
else:
statements_to_check.append(_FirstStatement(contents))
error_lines = []
for s in statements_to_check:
if s != "'use strict'":
error_lines.append('Expected "\'use strict\'" as first statement, '
'but found "%s" instead.' % s)
return error_lines
def _FirstStatementsInScriptElements(contents):
"""Returns a list of first statements found in each <script> element."""
soup = parse_html.BeautifulSoup(contents)
script_elements = soup.find_all('script', src=None)
return [_FirstStatement(e.get_text()) for e in script_elements]
def _FirstStatement(contents):
"""Extracts the first statement in some JS source code."""
stripped_contents = strip_js_comments.StripJSComments(contents).strip()
matches = re.match('^(.*?);', stripped_contents, re.DOTALL)
if not matches:
return ''
return matches.group(1).strip()
def RunChecks(input_api, output_api, excluded_paths=None):
def ShouldCheck(affected_file):
if not excluded_paths:
return True
path = affected_file.LocalPath()
return not any(re.match(pattern, path) for pattern in excluded_paths)
return JSChecker(input_api, output_api, file_filter=ShouldCheck).RunChecks()
| [
"[email protected]"
] | |
dd4532867225260b493c78ccb86707a010c68f3e | 412b0612cf13e9e28b9ea2e625975f3d9a2f52b6 | /2022/16/pressure_release.py | 450cad8ba653b096a272d2898024b05e9b964992 | [] | no_license | AlexClowes/advent_of_code | 2cf6c54a5f58db8482d1692a7753b96cd84b6279 | d2158e3a4edae89071e6a88c9e874a9a71d4d0ec | refs/heads/master | 2022-12-24T19:02:07.815437 | 2022-12-23T17:35:53 | 2022-12-23T17:35:53 | 225,618,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,151 | py | from collections import defaultdict
import functools
import heapq
from itertools import combinations, pairwise, permutations
import re
from tqdm import tqdm
def main():
pat = r"Valve (\w+) has flow rate=(\d+); tunnels? leads? to valves? ([,\s\w]+)"
flow_rate = {}
graph = defaultdict(dict)
with open("valves.txt") as f:
for line in f:
valve, rate, tunnels = re.match(pat, line.strip()).groups()
flow_rate[valve] = int(rate)
for other_valve in tunnels.split(", "):
graph[valve][other_valve] = 1
def get_min_dist(start, dest):
seen = set()
q = [(0, start)]
while q:
total_dist, pos = heapq.heappop(q)
if pos == dest:
return total_dist
if pos not in seen:
seen.add(pos)
for new_pos, dist in graph[pos].items():
heapq.heappush(q, (total_dist + dist, new_pos))
start_times = {
valve: 29 - get_min_dist("AA", valve)
for valve, flow in flow_rate.items()
if flow
}
# Make graph connected
for v1, v2 in combinations(list(graph), 2):
if (v1 == "AA" or flow_rate[v1]) and flow_rate[v2]:
graph[v1][v2] = graph[v2][v1] = get_min_dist(v1, v2)
# Prune valves with no flow rate
for v1 in list(graph):
if not flow_rate[v1]:
for v2 in graph[v1]:
del graph[v2][v1]
del graph[v1]
def max_flow(time, pos, seen, flow):
ret = flow
for new_pos in graph:
if new_pos in seen:
continue
new_time = time - graph[pos][new_pos] - 1
if new_time <= 0:
continue
new_seen = seen + (new_pos,)
new_flow = flow + new_time * flow_rate[new_pos]
ret = max(ret, max_flow(new_time, new_pos, new_seen, new_flow))
return ret
print(
max(
max_flow(time, valve, (valve,), time * flow_rate[valve])
for valve, time in start_times.items()
)
)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
b52273a1e19daac8b6173e5b3287d5a6ad803d06 | 4bfdb3ad5b44044113d3d6b586e10281d3987c9e | /infra/bots/recipe_modules/upload_dm_results/api.py | 3005f3e77790c38666ee47a5e9b661f92619102c | [
"BSD-3-Clause"
] | permissive | imxiangpeng/skia | dcdca3538564f2707fde10b43bdcaa6d9b5e0103 | 3e7cddaf32e280fe9f32eec5bfdd8168ca4941b6 | refs/heads/master | 2021-01-23T02:40:18.169644 | 2017-03-23T20:24:50 | 2017-03-24T01:24:57 | 86,016,340 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,932 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Recipe for uploading DM results.
import calendar
from recipe_engine import recipe_api
DM_JSON = 'dm.json'
UPLOAD_ATTEMPTS = 5
VERBOSE_LOG = 'verbose.log'
class UploadDmResultsApi(recipe_api.RecipeApi):
def cp(self, name, src, dst, extra_args=None):
cmd = ['gsutil', 'cp']
if extra_args:
cmd.extend(extra_args)
cmd.extend([src, dst])
name = 'upload %s' % name
for i in xrange(UPLOAD_ATTEMPTS):
step_name = name
if i > 0:
step_name += ' (attempt %d)' % (i+1)
try:
self.m.step(step_name, cmd=cmd)
break
except self.m.step.StepFailure:
if i == UPLOAD_ATTEMPTS - 1:
raise
def run(self):
builder_name = self.m.properties['buildername']
revision = self.m.properties['revision']
results_dir = self.m.path['start_dir'].join('dm')
# Move dm.json and verbose.log to their own directory.
json_file = results_dir.join(DM_JSON)
log_file = results_dir.join(VERBOSE_LOG)
tmp_dir = self.m.path['start_dir'].join('tmp_upload')
self.m.shutil.makedirs('tmp dir', tmp_dir, infra_step=True)
self.m.shutil.copy('copy dm.json', json_file, tmp_dir)
self.m.shutil.copy('copy verbose.log', log_file, tmp_dir)
self.m.shutil.remove('rm old dm.json', json_file)
self.m.shutil.remove('rm old verbose.log', log_file)
# Upload the images.
image_dest_path = 'gs://%s/dm-images-v1' % self.m.properties['gs_bucket']
files_to_upload = self.m.file.glob(
'find images',
results_dir.join('*'),
test_data=[results_dir.join('someimage.png')],
infra_step=True)
if len(files_to_upload) > 0:
self.cp('images', results_dir.join('*'), image_dest_path)
# Upload the JSON summary and verbose.log.
now = self.m.time.utcnow()
summary_dest_path = '/'.join([
'dm-json-v1',
str(now.year ).zfill(4),
str(now.month).zfill(2),
str(now.day ).zfill(2),
str(now.hour ).zfill(2),
revision,
builder_name,
str(int(calendar.timegm(now.utctimetuple())))])
# Trybot results are further siloed by issue/patchset.
issue = str(self.m.properties.get('issue', ''))
patchset = str(self.m.properties.get('patchset', ''))
if self.m.properties.get('patch_storage', '') == 'gerrit':
issue = str(self.m.properties['patch_issue'])
patchset = str(self.m.properties['patch_set'])
if issue and patchset:
summary_dest_path = '/'.join((
'trybot', summary_dest_path, issue, patchset))
summary_dest_path = 'gs://%s/%s' % (self.m.properties['gs_bucket'],
summary_dest_path)
self.cp('JSON and logs', tmp_dir.join('*'), summary_dest_path,
['-z', 'json,log'])
| [
"[email protected]"
] | |
e8aa6f13db12e6079697b3754a63bb24bcc8c34c | 279f415dd1e06c594c6c87deda57e201c73c4542 | /test/espnet2/schedulers/test_noam_lr.py | 1e34d4684444b4b4dde33466d11137fe8f499a5a | [
"Apache-2.0"
] | permissive | espnet/espnet | f7ba47271c1a6b1ed606dbbfb04a7f14220bb585 | bcd20948db7846ee523443ef9fd78c7a1248c95e | refs/heads/master | 2023-08-28T23:43:34.238336 | 2023-08-23T02:51:39 | 2023-08-23T02:51:39 | 114,054,873 | 7,242 | 2,244 | Apache-2.0 | 2023-09-14T08:01:11 | 2017-12-13T00:45:11 | Python | UTF-8 | Python | false | false | 313 | py | import torch
from espnet2.schedulers.noam_lr import NoamLR
def test_NoamLR():
linear = torch.nn.Linear(2, 2)
opt = torch.optim.SGD(linear.parameters(), 0.1)
sch = NoamLR(opt)
lr = opt.param_groups[0]["lr"]
opt.step()
sch.step()
lr2 = opt.param_groups[0]["lr"]
assert lr != lr2
| [
"[email protected]"
] | |
270aaddedb72e83d89edde7860500e85380b8b0d | 4dd695521343d56ff943e8c1768343d7680714e3 | /experiments/scripts_auto_crossdataset_ynoguti_braccent/config_SVM_1024_crossdataset.py | 70cd8e2343abb6a9b2f80d3d2b006aed092d244f | [] | no_license | natharb/environment | ea659ee541f6473e92b5b30c549e52b66f47b280 | 86e6cee6e01d2370abeb7c55a2c8a15001735919 | refs/heads/master | 2021-09-28T02:39:02.222966 | 2018-11-13T12:03:34 | 2018-11-13T12:03:34 | 139,762,646 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,372 | py | #!/usr/bin/env python
# vim: set fileencoding=utf-8 :
#Nathália Alves Rocha Batista ([email protected])
import sys
sys.path.insert(0, '.')
import bob.bio.spear
import bob.bio.gmm
import numpy
import scipy.spatial
temp_directory = './results/crossdataset_ynoguti_braccent/SVM/1024/temp/'
result_directory = './results/crossdataset_ynoguti_braccent/SVM/1024/results/'
sub_directory = 'subdirectory'
database = 'database_SVM_1024_crossdataset.py'
groups = ['dev']
#groups = ['dev', 'eval']
preprocessor = bob.bio.spear.preprocessor.Energy_2Gauss(max_iterations = 10, convergence_threshold = 0.0005, variance_threshold = 0.0005, win_length_ms = 20., win_shift_ms = 10., smoothing_window = 10)
extractor = bob.bio.spear.extractor.Cepstral(win_length_ms = 25, win_shift_ms = 10, n_filters = 24 , dct_norm = False, f_min = 0, f_max = 4000, delta_win = 2, mel_scale = True, with_energy = True, with_delta = True, with_delta_delta = True, n_ceps = 19, pre_emphasis_coef = 0.97)
algorithm = bob.bio.gmm.algorithm.SVMGMM(number_of_gaussians = 1024, kmeans_training_iterations = 10, gmm_training_iterations = 10,
training_threshold = 5e-4, variance_threshold = 5e-4, update_weights = True, update_means = True, update_variances = True, relevance_factor = 4, gmm_enroll_iterations = 1, responsibility_threshold = 0, INIT_SEED = 5489)
#parallel = 40
#verbose = 2
| [
"[email protected]"
] | |
378055851595a6f4509513667890d3915c1def51 | 3b504a983f1807ae7c5af51078bfab8c187fc82d | /client/gui/HUD2/features/AttitudeIndicator/AttitudeIndicatorSource.py | d55efec79392f37f2e6fbd53db0997fa8ae8c1b0 | [] | no_license | SEA-group/wowp_scripts | 7d35fd213db95ea6b3dbd1ec6d3e0f13de86ba58 | 2fe54a44df34f2dcaa6860a23b835dcd8dd21402 | refs/heads/master | 2021-09-07T23:10:13.706605 | 2018-03-02T17:23:48 | 2018-03-02T17:23:48 | 117,280,141 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,245 | py | # Embedded file name: scripts/client/gui/HUD2/features/AttitudeIndicator/AttitudeIndicatorSource.py
from gui.HUD2.core.DataPrims import DataSource
from gui.HUD2.hudFeatures import Feature
class AttitudeIndicatorSource(DataSource):
def __init__(self, features):
self._model = features.require(Feature.GAME_MODEL).attitudeIndicator
self._uiSettings = features.require(Feature.UI_SETTINGS)
self._uiSettings.eAttitudeModeChanged += self._updateAttitudeMode
self._clientArena = features.require(Feature.CLIENT_ARENA)
if self._clientArena.isAllServerDataReceived():
self._setupModel(None)
else:
self._clientArena.onNewAvatarsInfo += self._setupModel
return
def _setupModel(self, newInfos):
self._model.attitudeMode = self._uiSettings.gameUI['attitudeMode']
def _updateAttitudeMode(self, *args, **kwargs):
self._model.attitudeMode = self._uiSettings.gameUI['attitudeMode']
def dispose(self):
self._clientArena.onNewAvatarsInfo -= self._setupModel
self._uiSettings.eAttitudeModeChanged += self._updateAttitudeMode
self._model = None
self._uiSettings = None
self._clientArena = None
return | [
"[email protected]"
] | |
2dbeed1dcdd89e81a9c6703e2d459c3d9b55a577 | ba46f774793ff06ae12cbed51a024142d2b0f63e | /topiary/cli/outputs.py | a4d26ea8ba21e21749e38ca4d7e35b53b424729f | [
"Apache-2.0"
] | permissive | Saintyven/topiary | 05fe9eb9b0a9bbb851564d0d835d967bf1fce6ab | 04f0077bc4bf1ad350a0e78c26fa48c55fe7813b | refs/heads/master | 2021-09-07T17:21:56.267182 | 2018-02-26T19:56:58 | 2018-02-26T19:56:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,255 | py | # Copyright (c) 2017. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Common commandline arguments for output files
"""
from __future__ import print_function, division, absolute_import
import logging
def add_output_args(arg_parser):
output_group = arg_parser.add_argument_group(
title="Output",
description="How and where to write results")
output_group.add_argument(
"--output-csv",
default=None,
help="Path to output CSV file")
output_group.add_argument(
"--output-html",
default=None,
help="Path to output HTML file")
output_group.add_argument(
"--output-csv-sep",
default=",",
help="Separator for CSV file")
output_group.add_argument(
"--subset-output-columns",
nargs="*")
output_group.add_argument(
"--rename-output-column",
nargs=2,
action="append",
help=(
"Rename original column (first parameter) to new"
" name (second parameter)"))
output_group.add_argument(
"--print-columns",
default=False,
action="store_true",
help="Print columns before writing data to file(s)")
return output_group
def write_outputs(
df,
args,
print_df_before_filtering=False,
print_df_after_filtering=False):
if print_df_before_filtering:
print(df)
if args.subset_output_columns:
subset_columns = []
for column in args.subset_output_columns:
if column not in df.columns:
logging.warn(
"Invalid column name '%s', available: %s" % (
column, list(df.columns)))
else:
subset_columns.append(column)
df = df[subset_columns]
if args.rename_output_column:
for (old_name, new_name) in args.rename_output_column:
if old_name not in df.columns:
logging.warn(
"Can't rename column '%s' since it doesn't exist, available: %s" % (
old_name, list(df.columns)))
else:
df.rename(columns={old_name: new_name}, inplace=True)
if print_df_after_filtering:
print(df)
if args.print_columns:
print("Columns:")
for column in df.columns:
print("-- %s" % column)
if args.output_csv:
print("Saving %s..." % args.output_csv)
df.to_csv(
args.output_csv,
index=True,
index_label="#",
sep=args.output_csv_sep)
if args.output_html:
print("Saving %s..." % args.output_html)
df.to_html(args.output_html, index=True)
| [
"[email protected]"
] | |
f0cfc11ac7bdbd46f6556b49a3637218927f3cb7 | b7f3edb5b7c62174bed808079c3b21fb9ea51d52 | /build/android/gyp/jinja_template.py | ba335a248b06620e7b8f9d1dac8dfd6b516e3ac5 | [
"BSD-3-Clause"
] | permissive | otcshare/chromium-src | 26a7372773b53b236784c51677c566dc0ad839e4 | 64bee65c921db7e78e25d08f1e98da2668b57be5 | refs/heads/webml | 2023-03-21T03:20:15.377034 | 2020-11-16T01:40:14 | 2020-11-16T01:40:14 | 209,262,645 | 18 | 21 | BSD-3-Clause | 2023-03-23T06:20:07 | 2019-09-18T08:52:07 | null | UTF-8 | Python | false | false | 6,456 | py | #!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Renders one or more template files using the Jinja template engine."""
import codecs
import argparse
import os
import sys
from util import build_utils
from util import resource_utils
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
from pylib.constants import host_paths
# Import jinja2 from third_party/jinja2
sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'))
import jinja2 # pylint: disable=F0401
class _RecordingFileSystemLoader(jinja2.FileSystemLoader):
def __init__(self, searchpath):
jinja2.FileSystemLoader.__init__(self, searchpath)
self.loaded_templates = set()
def get_source(self, environment, template):
contents, filename, uptodate = jinja2.FileSystemLoader.get_source(
self, environment, template)
self.loaded_templates.add(os.path.relpath(filename))
return contents, filename, uptodate
class JinjaProcessor(object):
"""Allows easy rendering of jinja templates with input file tracking."""
def __init__(self, loader_base_dir, variables=None):
self.loader_base_dir = loader_base_dir
self.variables = variables or {}
self.loader = _RecordingFileSystemLoader(loader_base_dir)
self.env = jinja2.Environment(loader=self.loader)
self.env.undefined = jinja2.StrictUndefined
self.env.line_comment_prefix = '##'
self.env.trim_blocks = True
self.env.lstrip_blocks = True
self._template_cache = {} # Map of path -> Template
def Render(self, input_filename, variables=None):
input_rel_path = os.path.relpath(input_filename, self.loader_base_dir)
template = self._template_cache.get(input_rel_path)
if not template:
template = self.env.get_template(input_rel_path)
self._template_cache[input_rel_path] = template
return template.render(variables or self.variables)
def GetLoadedTemplates(self):
return list(self.loader.loaded_templates)
def _ProcessFile(processor, input_filename, output_filename):
output = processor.Render(input_filename)
# If |output| is same with the file content, we skip update and
# ninja's restat will avoid rebuilding things that depend on it.
if os.path.isfile(output_filename):
with codecs.open(output_filename, 'r', 'utf-8') as f:
if f.read() == output:
return
with codecs.open(output_filename, 'w', 'utf-8') as output_file:
output_file.write(output)
def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip):
with build_utils.TempDir() as temp_dir:
path_info = resource_utils.ResourceInfoFile()
for input_filename in input_filenames:
relpath = os.path.relpath(os.path.abspath(input_filename),
os.path.abspath(inputs_base_dir))
if relpath.startswith(os.pardir):
raise Exception('input file %s is not contained in inputs base dir %s'
% (input_filename, inputs_base_dir))
output_filename = os.path.join(temp_dir, relpath)
parent_dir = os.path.dirname(output_filename)
build_utils.MakeDirectory(parent_dir)
_ProcessFile(processor, input_filename, output_filename)
path_info.AddMapping(relpath, input_filename)
path_info.Write(outputs_zip + '.info')
build_utils.ZipDir(outputs_zip, temp_dir)
def _ParseVariables(variables_arg, error_func):
variables = {}
for v in build_utils.ParseGnList(variables_arg):
if '=' not in v:
error_func('--variables argument must contain "=": ' + v)
name, _, value = v.partition('=')
variables[name] = value
return variables
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--inputs', required=True,
help='GN-list of template files to process.')
parser.add_argument('--includes', default='',
help="GN-list of files that get {% include %}'ed.")
parser.add_argument('--output', help='The output file to generate. Valid '
'only if there is a single input.')
parser.add_argument('--outputs-zip', help='A zip file for the processed '
'templates. Required if there are multiple inputs.')
parser.add_argument('--inputs-base-dir', help='A common ancestor directory '
'of the inputs. Each output\'s path in the output zip '
'will match the relative path from INPUTS_BASE_DIR to '
'the input. Required if --output-zip is given.')
parser.add_argument('--loader-base-dir', help='Base path used by the '
'template loader. Must be a common ancestor directory of '
'the inputs. Defaults to DIR_SOURCE_ROOT.',
default=host_paths.DIR_SOURCE_ROOT)
parser.add_argument('--variables', help='Variables to be made available in '
'the template processing environment, as a GYP list '
'(e.g. --variables "channel=beta mstone=39")', default='')
parser.add_argument('--check-includes', action='store_true',
help='Enable inputs and includes checks.')
options = parser.parse_args()
inputs = build_utils.ParseGnList(options.inputs)
includes = build_utils.ParseGnList(options.includes)
if (options.output is None) == (options.outputs_zip is None):
parser.error('Exactly one of --output and --output-zip must be given')
if options.output and len(inputs) != 1:
parser.error('--output cannot be used with multiple inputs')
if options.outputs_zip and not options.inputs_base_dir:
parser.error('--inputs-base-dir must be given when --output-zip is used')
variables = _ParseVariables(options.variables, parser.error)
processor = JinjaProcessor(options.loader_base_dir, variables=variables)
if options.output:
_ProcessFile(processor, inputs[0], options.output)
else:
_ProcessFiles(processor, inputs, options.inputs_base_dir,
options.outputs_zip)
if options.check_includes:
all_inputs = set(processor.GetLoadedTemplates())
all_inputs.difference_update(inputs)
all_inputs.difference_update(includes)
if all_inputs:
raise Exception('Found files not listed via --includes:\n' +
'\n'.join(sorted(all_inputs)))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
2f167b5a9bd0dc459aa87415a30c103af343f999 | 28bdfca0131db38323fc28f6178425dc2c86e6ca | /contactApp/__init__.py | d5c7ce7bab4ecaf90d75d91a360af75fe5d769d5 | [] | no_license | my-xh/hengDaProject | d8879d6755b24f230361b25d0e88d205fec98a1d | 45aa43aabc798652a0f05d4e93d1c2c7ae819e4c | refs/heads/master | 2023-04-20T07:04:57.424269 | 2021-05-13T11:46:48 | 2021-05-13T11:46:48 | 345,997,472 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 56 | py | default_app_config = 'contactApp.apps.ContactappConfig'
| [
"[email protected]"
] | |
e9efec337c980a999a72131b97ffb0caae8b4eb5 | b1e958f90059cd97b8c1cb62abb34948fa867569 | /Mini-projects/7 - Spaceship/mini-project - Student 5.py | 9968fe6bacc39a38cfc68711ff7b017b3ec4e1d5 | [] | no_license | deltaworld/python-programming-coursera | ff756f3522a79f9b2101607f8156adc8d256db10 | 740ef3b02c05323e8da691ccbaa17337deb32db7 | refs/heads/master | 2021-01-23T03:04:25.476645 | 2015-07-15T13:25:22 | 2015-07-15T13:25:22 | 39,136,483 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,476 | py | # http://www.codeskulptor.org/#user16_7BY7hy435a_16.py
#This game was designed to run in Chrome browser
# program template for Spaceship
import simplegui
import math
import random
# globals for user interface
WIDTH = 800
HEIGHT = 600
ROTATION_VEL = 0.2
FORWARD_VEL = 1
SPACE_DEBRIS_FRICTION = 0.925
score = 0
lives = 3
time = 0.5
launch = 0
class ImageInfo:
def __init__(self, center, size, radius = 0, lifespan = None, animated = False):
self.center = center
self.size = size
self.radius = radius
if lifespan:
self.lifespan = lifespan
else:
self.lifespan = float('inf')
self.animated = animated
def get_center(self):
return self.center
def get_size(self):
return self.size
def get_radius(self):
return self.radius
def get_lifespan(self):
return self.lifespan
def get_animated(self):
return self.animated
# art assets created by Kim Lathrop, may be freely re-used in non-commercial projects, please credit Kim
# debris images - debris1_brown.png, debris2_brown.png, debris3_brown.png, debris4_brown.png
# debris1_blue.png, debris2_blue.png, debris3_blue.png, debris4_blue.png, debris_blend.png
debris_info = ImageInfo([320, 240], [640, 480])
# Randomly select debris color
debris_color = random.choice(['debris1_brown.png', 'debris2_brown.png', 'debris3_brown.png', 'debris4_brown.png',
'debris1_blue.png', 'debris2_blue.png', 'debris3_blue.png', 'debris4_blue.png', 'debris_blend.png'])
debris_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/" + debris_color)
# nebula images - nebula_brown.png, nebula_blue.png
nebula_info = ImageInfo([400, 300], [800, 600])
# Randomly select nebula
nebula_color = random.choice(["nebula_brown.png", "nebula_blue.png"])
nebula_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/" + nebula_color)
# splash image
splash_info = ImageInfo([200, 150], [400, 300])
splash_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/splash.png")
# ship image
ship_info = ImageInfo([45, 45], [90, 90], 35)
ship_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/double_ship.png")
# missile image - shot1.png, shot2.png, shot3.png
missile_info = ImageInfo([5,5], [10, 10], 3, 50)
missile_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/shot2.png")
# asteroid images - asteroid_blue.png, asteroid_brown.png, asteroid_blend.png
asteroid_info = ImageInfo([45, 45], [90, 90], 40)
asteroid_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/asteroid_blue.png")
# animated explosion - explosion_orange.png, explosion_blue.png, explosion_blue2.png, explosion_alpha.png
explosion_info = ImageInfo([64, 64], [128, 128], 17, 24, True)
explosion_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/explosion_alpha.png")
# sound assets purchased from sounddogs.com, please do not redistribute
soundtrack = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/soundtrack.mp3")
missile_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/missile.mp3")
missile_sound.set_volume(.5)
ship_thrust_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/thrust.mp3")
explosion_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/explosion.mp3")
# helper functions to handle transformations
def angle_to_vector(ang):
return [math.cos(ang), math.sin(ang)]
def dist(p,q):
return math.sqrt((p[0] - q[0]) ** 2+(p[1] - q[1]) ** 2)
# Ship class
class Ship:
def __init__(self, pos, vel, angle, image, info):
self.pos = [pos[0],pos[1]]
self.vel = [vel[0],vel[1]]
self.thrust = 0
self.angle = angle
self.angle_vel = 0
self.image = image
self.image_center = info.get_center()
self.image_size = info.get_size()
self.radius = info.get_radius()
self.friction = SPACE_DEBRIS_FRICTION
def draw(self,canvas):
#Draw the ship image
#canvas.draw_image(image, center_source, width_height_source, center_dest, width_height_dest, rotation)
canvas.draw_image(self.image, self.image_center, self.image_size, self.pos, self.image_size, self.angle)
def update(self):
# control position and velocity update behavior
self.angle += self.angle_vel
#calculate thrust acceleration
if self.thrust:
orient = angle_to_vector(self.angle)
self.vel[0] += FORWARD_VEL * orient[0]
self.vel[1] += FORWARD_VEL * orient[1]
ship_thrust_sound.play() #play thruster sound
else:
ship_thrust_sound.rewind() #rewind thruster sound
#velocity
self.pos[0] += self.vel[0]
self.pos[1] += self.vel[1]
#friction code
self.vel[0] *= self.friction
self.vel[1] *= self.friction
# check boundaries and roll over
self.pos[0] %= WIDTH
self.pos[1] %= HEIGHT
# Sprite class
class Sprite:
def __init__(self, pos, vel, ang, ang_vel, image, info, sound = None):
self.pos = [pos[0],pos[1]]
self.vel = [vel[0],vel[1]]
self.angle = ang
self.angle_vel = ang_vel
self.image = image
self.image_center = info.get_center()
self.image_size = info.get_size()
self.radius = info.get_radius()
self.lifespan = info.get_lifespan()
self.animated = info.get_animated()
self.age = 0
if sound:
sound.rewind()
sound.play()
def draw(self, canvas):
#canvas.draw_image(image, center_source, width_height_source, center_dest, width_height_dest, rotation)
canvas.draw_image(self.image, self.image_center, self.image_size, self.pos, self.image_size, self.angle)
def update(self):
self.angle += self.angle_vel
#velocity
self.pos[0] += self.vel[0]
self.pos[1] += self.vel[1]
# check boundaries and roll over
self.pos[0] %= WIDTH
self.pos[1] %= HEIGHT
if self.lifespan > 0:
self.lifespan -= 1
def missile_launch():
global launch
# on space bar launch missile
if not launch:
#Update launch position angle
orient = angle_to_vector(my_ship.angle)
#Launch missile from tip of ship.
a_missile.pos = [my_ship.pos[0] + ship_info.size[0] / 2 * orient[0] , my_ship.pos[1] + ship_info.size[0] / 2 * orient[1] ]
#set velocity vector
a_missile.vel = [ my_ship.vel[0] + 10 * orient[0], my_ship.vel[1] + 10 * orient[1]]
a_missile.lifespan = 60
#setup sound
missile_sound.rewind()
missile_sound.play()
launch = True
else:
launch = False
#purpose of this function is to toggle thrust on and off based on handler.
def start_ship_thrust():
if my_ship.thrust:
my_ship.image_center[0] -= ship_info.size[0]
else:
my_ship.image_center[0] += ship_info.size[0]
my_ship.thrust ^= 1
def ship_left():
if my_ship.angle_vel == 0:
my_ship.angle_vel = -ROTATION_VEL
else:
my_ship.angle_vel = 0
def ship_right():
if my_ship.angle_vel == 0:
my_ship.angle_vel = +ROTATION_VEL
else:
my_ship.angle_vel = 0
inputs = {"space": missile_launch,
"up": start_ship_thrust,
"left": ship_left,
"right": ship_right}
# define keyhandlers to control firing_angle
def keydown(key):
for i in inputs:
if key == simplegui.KEY_MAP[i]:
inputs[i]()
def keyup(key):
for i in inputs:
if key == simplegui.KEY_MAP[i]:
inputs[i]()
def draw(canvas):
global time
# animate background
time += 1
center = debris_info.get_center()
size = debris_info.get_size()
wtime = (time / 8) % center[0]
canvas.draw_image(nebula_image, nebula_info.get_center(), nebula_info.get_size(), [WIDTH / 2, HEIGHT / 2], [WIDTH, HEIGHT])
canvas.draw_image(debris_image, [center[0] - wtime, center[1]], [size[0] - 2 * wtime, size[1]],
[WIDTH / 2 + 1.25 * wtime, HEIGHT / 2], [WIDTH - 2.5 * wtime, HEIGHT])
canvas.draw_image(debris_image, [size[0] - wtime, center[1]], [2 * wtime, size[1]],
[1.25 * wtime, HEIGHT / 2], [2.5 * wtime, HEIGHT])
# draw ship and sprites
my_ship.draw(canvas)
a_rock.draw(canvas)
a_missile.draw(canvas)
# update ship and sprites
my_ship.update()
a_rock.update()
a_missile.update()
#draw Lives in the upper left
canvas.draw_text("LIVES " + str(lives), (WIDTH * 0.05 , HEIGHT * 0.1), 20, "White", "serif")
#draw Score in the upper right
canvas.draw_text("SCORE " + str(score), (WIDTH * 0.85, HEIGHT * 0.1), 20, "White", "serif")
# timer handler that spawns a rock
def rock_spawner():
global a_rock
#create rock velocity
random_rock_ang_vel = random.randint(-3,3) * 0.1
random_velocity_vector = [random.randint(-10,10)* 0.1, random.randint(-10, 10)* 0.1]
random_position = [random.randint(0,WIDTH), random.randint(0,HEIGHT)]
a_rock = Sprite(random_position, random_velocity_vector, 1, random_rock_ang_vel, asteroid_image, asteroid_info)
# initialize frame
frame = simplegui.create_frame("Asteroids", WIDTH, HEIGHT)
# initialize ship and two sprites
my_ship = Ship([WIDTH / 2, HEIGHT / 2], [0, 0], 0, ship_image, ship_info)
a_rock = Sprite([WIDTH / 3, HEIGHT / 3], [1, 1], 0, 0, asteroid_image, asteroid_info)
a_missile = Sprite([2 * WIDTH / 3, 2 * HEIGHT / 3], [0,0], 0, 0, missile_image, missile_info, missile_sound)
# register handlers
frame.set_draw_handler(draw)
frame.set_keydown_handler(keydown)
frame.set_keyup_handler(keyup)
timer = simplegui.create_timer(1000.0, rock_spawner)
# get things rolling
timer.start()
frame.start()
| [
"[email protected]"
] | |
01e37d72a54fb67fc279b8a9d7469470c7d54586 | ec2b3b8b61cef4e94447ad70e543b690d70050e5 | /order-1_voronoi/core/fortune/arc_tree/Arc.py | 69579dc3f9bf92572ce60e9e24790faf3580a891 | [
"MIT"
] | permissive | bzliu94/algorithms | d6e491f6d3c68c50a37bab504501a73362b9a94d | 43ccefd7ea1fd88339bf2afa0b35b0a3bdf6acff | refs/heads/master | 2021-01-17T02:22:26.044478 | 2020-08-02T01:13:59 | 2020-08-02T01:13:59 | 36,104,048 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,170 | py | # from BeachLineItem import *
import math
# class Arc(BeachLineItem):
class Arc:
# focus is a (x, y) pair
# def __init__(self, focus, arc_tree):
"""
def __init__(self, focus, insert_l_y):
self.focus = focus
# self.arc_tree = arc_tree
self.insert_l_y = insert_l_y
"""
# def __init__(self, focus, arc_tree, is_split_residue_arc = False):
def __init__(self, focus, is_split_residue_arc = False):
self.focus = focus
# self.arc_tree = arc_tree
self.is_split_residue_arc = is_split_residue_arc
"""
def getInsertSweepLineY(self):
return self.insert_l_y
def setInsertSweepLineY(self, insert_l_y):
self.insert_l_y = insert_l_y
"""
def getIsSplitResidueArc(self):
return self.is_split_residue_arc
def setIsSplitResidueArc(self, is_split_residue_arc):
self.is_split_residue_arc = is_split_residue_arc
def getFocus(self):
return self.focus
"""
def _getArcTree(self):
return self.arc_tree
"""
"""
# assume two parabolas are not degenerate
# assume two roots exist
"""
# have two parabolas s.t. zero or one are degenerate
def getLeftIntersectionXValue(self, arc, l_y):
values = self.getIntersectionXValues(arc, l_y)
left_x_value = values[0]
# print left_x_value
return left_x_value
# have two parabolas s.t. zero or one are degenerate
def getRightIntersectionXValue(self, arc, l_y):
values = self.getIntersectionXValues(arc, l_y)
right_x_value = values[1]
# print right_x_value
return right_x_value
"""
# note: could be a good idea
# to make sure that method
# supports having either
# (i.e. lower or higher)
# parabola or both
# being degenerate
# handle one case involving a degenerate parabola
# in particular, we deal with a case
# where we have a degenerate lower parabola
"""
# handle case where we have one degenerate parabola
# do not handle case where we have two degenerate parabolas
# we consider scenarios that tend to involve
# two parabolas that have
# exactly two intersection points
# (as opposed to zero, one, or infinite)
# except in case of degenerate parabolas
# (where we have one/two or infinite)
# assume two arcs involved
# have corresponding focuses
# that do not have:
# x values that are identical
# and y values that are identical
# return list with left x value and right x value
# have case where two parabolas have focuses at same y,
# but they are not necessarily degenerate
def getIntersectionXValues(self, arc, l_y):
# consider case where lower arc
# is a portion of a degenerate parabola
# arc_tree = self._getArcTree()
arcs = [self, arc]
arc_focus_pairs = [(x, x.getFocus()) for x in arcs]
y_values = [x[1][1] for x in arc_focus_pairs]
min_y = min(y_values)
candidate_low_arcs = [x[0] for x in arc_focus_pairs if x[1][1] == min_y]
low_arc = candidate_low_arcs[0]
low_arc_focus = low_arc.getFocus()
low_arc_focus_x = low_arc_focus[0]
low_arc_focus_y = low_arc_focus[1]
# may have floating-point-related error for this comparison
# low_arc_is_degenerate = low_arc_focus_y == l_y
low_arc_is_degenerate = low_arc.isDegenerate(l_y)
"""
print "values for determining whether low arc is degenerate:", low_arc_focus_y, l_y
print abs(low_arc_focus_y - l_y)
print low_arc_is_degenerate
"""
# print low_arc_focus_y, l_y
candidate_high_arcs = [x for x in arcs if x != low_arc]
high_arc = candidate_high_arcs[0]
high_arc_focus = high_arc.getFocus()
high_arc_focus_x = high_arc_focus[0]
high_arc_focus_y = high_arc_focus[1]
# may have floating-point-related error for this comparison
# high_arc_is_degenerate = high_arc_focus_y == l_y
high_arc_is_degenerate = high_arc.isDegenerate(l_y)
focus_i = self.getFocus()
focus_j = arc.getFocus()
"""
unadjusted_focus_i = self.getFocus()
unadjusted_focus_j = arc.getFocus()
# consider sweep-line y for purpose of addressing a floating-point-related issue
focus_i = (unadjusted_focus_i[0], min(unadjusted_focus_i[1], l_y))
focus_j = (unadjusted_focus_j[0], min(unadjusted_focus_j[1], l_y))
"""
x_values = [x[1][0] for x in arc_focus_pairs]
min_x_value = min(x_values)
candidate_left_arcs = [x[0] for x in arc_focus_pairs if x[1][0] == min_x_value]
left_arc = candidate_left_arcs[0]
candidate_right_arcs = [x for x in arcs if x != left_arc]
right_arc = candidate_right_arcs[0]
left_arc_is_degenerate = left_arc.isDegenerate(l_y)
right_arc_is_degenerate = right_arc.isDegenerate(l_y)
"""
print "low arc is degenerate:", low_arc_is_degenerate
print "high arc is degenerate:", high_arc_is_degenerate
print "low arc focus:", low_arc.getFocus()
print "high arc focus:", high_arc.getFocus()
"""
if left_arc_is_degenerate == True and right_arc_is_degenerate == True:
raise Exception("handling two arcs with same focus y-value that are currently degenerate")
if low_arc_is_degenerate == True and high_arc_is_degenerate == False:
x_value = low_arc_focus_x
return [x_value, x_value]
# if the two parabolas have focuses with same y,
# either the parabolas are degenerate or
# we have one intersection point
# assume that case where parabolas are degenerate
# has already been handled
if (low_arc_is_degenerate == False and high_arc_is_degenerate == False) and \
(low_arc_focus_y == high_arc_focus_y):
# raise Exception("handling two arcs with same focus y-value but that are not degenerate")
alpha_i = 1.0 / (2.0 * (focus_i[1] - l_y))
b_i = (-2.0 * focus_i[0]) * alpha_i
c_i = (focus_i[0] ** 2.0 + focus_i[1] ** 2.0 - l_y ** 2.0) * alpha_i
alpha_j = 1.0 / (2.0 * (focus_j[1] - l_y))
b_j = (-2.0 * focus_j[0]) * alpha_j
c_j = (focus_j[0] ** 2.0 + focus_j[1] ** 2.0 - l_y ** 2.0) * alpha_j
b_r = b_j - b_i
c_r = c_j - c_i
x_value = -1 * c_r / b_r
# we are aware that only one intersection point exists
result = [x_value]
return result
# print low_arc_focus, l_y
"""
print "focus 1:", focus_i
print "focus 2:", focus_j
print self == arc
"""
# print l_y
alpha_i = 1.0 / (2.0 * (focus_i[1] - l_y))
# print alpha_i
a_i = alpha_i
b_i = (-2.0 * focus_i[0]) * alpha_i
c_i = (focus_i[0] ** 2.0 + focus_i[1] ** 2.0 - l_y ** 2.0) * alpha_i
# print alpha_j
alpha_j = 1.0 / (2.0 * (focus_j[1] - l_y))
a_j = alpha_j
b_j = (-2.0 * focus_j[0]) * alpha_j
c_j = (focus_j[0] ** 2.0 + focus_j[1] ** 2.0 - l_y ** 2.0) * alpha_j
a_r = a_j - a_i
# print a_r, a_j, a_i
# print a_r
b_r = b_j - b_i
c_r = c_j - c_i
# print self.getFocus(), arc.getFocus(), l_y
"""
print self.getFocus(), arc.getFocus(), l_y
print a_r, a_j, a_i
print b_r, b_j, b_i
print c_r, c_j, c_i
"""
# making sure that determinant is at least zero,
# because we assume that we have
# at least one intersection
# and to deal with floating-point issues
determinant = max(b_r ** 2.0 - 4.0 * a_r * c_r, 0)
# print "determinant:", determinant
"""
# deal with floating-point-related issue
# where we may have value for determinant
# that is very small yet negative
epsilon = 0.00000001
if determinant < 0 and abs(determinant) >= epsilon:
raise Exception()
else:
determinant = abs(determinant)
"""
x1 = (-1.0 * b_r - math.sqrt(determinant)) / (2.0 * a_r)
x2 = (-1.0 * b_r + math.sqrt(determinant)) / (2.0 * a_r)
# print [x1, x2]
# print b_r, determinant, a_r
# return [x1, x2]
lower_x_value = min(x1, x2)
higher_x_value = max(x1, x2)
x_values = [lower_x_value, higher_x_value]
return x_values
"""
def getRightIntersectionX(self, arc, l_y):
pass
"""
def isSizeZero(self, arc_tree):
# arc_tree = self._getArcTree()
arc = self
arc_interval = arc_tree._getArcIntervalForArc(arc)
result = arc_interval.isSizeZero()
return result
"""
# somewhat circular definition for whether an arc is degenerate
def isDegenerate(self, l_y):
arc_tree = self._getArcTree()
arc_interval = arc_tree._getArcIntervalForArc(self)
arc_interval_is_size_zero = arc_interval.isSizeZero()
return arc_interval_is_size_zero
"""
# care about x values
# note that for a given positive change
# in the quantity (focus y - sweep-line y),
# change in x tends to be more
# def isDegenerate(self, l_y):
def isDegenerate(self, l_y, tolerance = 0.001):
# test whether difference between sweep-line y-value
# and y-component of focus for given arc
# is within given tolerance value
# measuring axis-aligned quantities
# sweep_line = arc_tree.getSweepLine()
# l_y = sweep_line.getY()
focus = self.getFocus()
focus_y_component = focus[1]
if l_y > focus_y_component + tolerance:
print l_y, focus_y_component
raise Exception("arc will be concave down")
# is_degenerate = focus_y_component == l_y
# print l_y
# asymmetric tolerance
is_degenerate = (l_y <= (focus_y_component + tolerance)) \
and (l_y >= focus_y_component)
"""
is_degenerate = (focus_y_component <= (l_y + tolerance)) \
and (focus_y_component >= (l_y - tolerance))
"""
return is_degenerate
def isArc(self):
return True
# should not use this method for an arc that is degenerate
"""
# we note that an arc inserted
# when there are no arcs
# that already exist in arc tree
# technically ought to be degenerate,
# but we say that its left and right extents
# are minus infinity and positive infinity
# assume that if we are degenerate, we have neighbors
"""
# assume that focus y is greater than or equal to l_y
def getYValueForXValue(self, x_value, l_y):
focus = self.getFocus()
focus_x, focus_y = focus
if not (focus_y >= l_y):
raise Exception("considering y for a curve corresponding to a site with y not at or above sweep-line")
alpha = 1.0 / (2.0 * (focus_y - l_y))
a = alpha
b = (-2.0 * focus_x) * alpha
c = (focus_x ** 2 + focus_y ** 2 - l_y ** 2) * alpha
y = a * x_value ** 2 + b * x_value + c
return y
"""
# is size-zero at time of insert
# and at that time is not degenerate
def isSplitResidueArc(self, tolerance = 0.001):
focus = self.getFocus()
focus_y = focus[1]
insert_l_y = self.getInsertSweepLineY()
created_as_split_residue_arc = (focus_y <= (insert_l_y + tolerance)) \
and (focus_y >= (insert_l_y - tolerance))
return created_as_split_residue_arc
"""
"""
# retrieve an (x, y) tuple
def getBreakpointLocation(self, arc, l_y):
pass
"""
"""
arc1 = Arc((10, 20))
arc2 = Arc((20, 15))
l_y = 12.5
print arc1.getLeftIntersectionXValue(arc2, l_y)
print arc1.getRightIntersectionXValue(arc2, l_y)
"""
"""
arc1 = Arc((10, 20))
print arc1.isArc()
print arc1.isSweepLineTouchingSlab()
"""
"""
print abs(-1.80143985095e+16)
print abs(-1.80143985095e+16) > 0.00000001
"""
| [
"[email protected]"
] | |
1857e98731a98fbbf1200a924206070e99daf9c6 | bb853b657537eca893032ad08e4c843b5195fa35 | /dateandtime2.py | ff50587bccebcdb78c87b2dd95e50b62bff026b6 | [] | no_license | PrathameshDhumal/Artificial-Intelligence-Assistant-With-Python | 584064e5219479576244d74aafb6f4e4dcbd16cc | 6afcdf792a2e0e1d6168dfb732add3d63158f38d | refs/heads/main | 2023-05-31T01:31:19.202678 | 2021-06-30T19:57:53 | 2021-06-30T19:57:53 | 379,478,238 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | import pyttsx3
import datetime
engine = pyttsx3.init()
def speak(audio):
engine.say(audio)
engine.runAndWait()
def time():
Time = datetime.datetime.now().strftime("%I:%M:%S")
speak(Time)
time()
def date():
year = int(datetime.datetime.now().year)
month= int(datetime.datetime.now().month)
date = int(datetime.datetime.now().day)
speak("The current date is ")
speak(date)
speak(month)
speak(year)
date() | [
"[email protected]"
] | |
770681d3dbbe5a567c39708a2f499a677b0b69a4 | c6ce21e5b8a906b0bf95cfcac9d84e243c876723 | /PARALLAX/CODE/resnet_train.py | c41dca46d825c0b593e8b6d0f4d047ab9e5385a9 | [] | no_license | yuchanmo/cloud_class | 6450b08017fbe72dde810620365a87cda7ae8abd | b3d2c6589f7a9b8c0340e00487f610a097373ec6 | refs/heads/master | 2020-06-01T06:48:19.669550 | 2019-06-07T04:04:51 | 2019-06-07T04:04:51 | 190,685,867 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,012 | py | #Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ResNet Train/Eval module.
"""
import time
import six
import sys
import cifar10_download
cifar10_download.download()
import cifar_input
import numpy as np
import resnet_model
import tensorflow as tf
import parallax
import parallax_config
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('train_data_path', '',
'Filepattern for training data.')
tf.app.flags.DEFINE_integer('image_size', 32, 'Image side length.')
tf.app.flags.DEFINE_string('ckpt_dir', '',
'Directory to keep the checkpoints. Should be a '
'parent directory of FLAGS.train_dir')
tf.app.flags.DEFINE_string('resource_info_file', 'resource_info',
'Resource information file')
tf.app.flags.DEFINE_string('run_option', 'PS',
'Distributed training architecture')
tf.app.flags.DEFINE_boolean('sync', True, '')
def train(hps):
"""Training loop."""
single_gpu_graph = tf.Graph()
with single_gpu_graph.as_default():
images, labels = cifar_input.build_input(
'cifar10', FLAGS.train_data_path, hps.batch_size, 'train')
model = resnet_model.ResNet(hps, images, labels, 'train')
model.build_graph()
truth = tf.argmax(model.labels, axis=1)
predictions = tf.argmax(model.predictions, axis=1)
precision = tf.reduce_mean(tf.to_float(tf.equal(predictions, truth)))
########################################################################
#### FIXME: Get session for distributed environments using Parallax ####
#### Pass parallax_config as an argument ####
########################################################################
parallax_sess, num_workers, worker_id, num_replicas_per_worker = \
parallax.parallel_run(single_gpu_graph,
FLAGS.resource_info_file,
sync=FLAGS.sync,
parallax_config=parallax_config.build_config())
for i in range(350000):
_, global_step, cost, precision_ = \
parallax_sess.run([model.train_op, model.global_step, model.cost, precision])
if i % 10 == 0:
print('step: %d, loss: %.3f, precision: %.3f' % (global_step[0], cost[0], precision_[0]))
# Tuning learning rate
train_step = global_step[0]
if train_step < 10000:
lrn_rate = 0.1
elif train_step < 15000:
lrn_rate = 0.01
elif train_step < 20000:
lrn_rate = 0.001
else:
lrn_rate = 0.0001
feed_dict = {model.lrn_rate: []}
for worker in range(num_replicas_per_worker):
feed_dict[model.lrn_rate].append(lrn_rate)
parallax_sess.run(model.global_step, feed_dict=feed_dict)
def main(_):
batch_size = 128
hps = resnet_model.HParams(batch_size=batch_size,
num_classes=10,
min_lrn_rate=0.0001,
lrn_rate=0.1,
num_residual_units=5,
use_bottleneck=False,
weight_decay_rate=0.0002,
relu_leakiness=0.1)
train(hps)
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.INFO)
tf.app.run()
| [
"[email protected]"
] | |
f4850aed962c3460094182316ff7933229ed1cb1 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/380/usersdata/310/97316/submittedfiles/minha_bib.py | d6b2c61dad7783f3393a133c2502721a863c2a10 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | # -*- coding: utf-8 -*-
import random
def solicitaSimbolodoHumano(a):
a = input('Simbolo que quer jogar: ')
while a!='O' and a!='X' and a!='o' and a!='x':
a = input('Simbolo que quer jogar: ')
return 1
def sorteioPrimeiraJogada(a):
a = random.choice((0,1))
if a ==1:
print('Vencedor do sorteio para inicio do jogo : Jogador')
else:
print('Vencedor do sorteio para inicio do jogo : Computador')
def JogadaComputador():
| [
"[email protected]"
] | |
7d9c1c37687134af6b5864674dbd42ead9ea5025 | c4a32dc9fb26d72721864982b52578e2aea31db4 | /7.SEPTIMA EXPOSICIÓN/FOURIER-PRESIÓN.py | a4c7034b8db9d72cd8fd750fa9c00fd12905510d | [] | no_license | yordanarango/CODE_TRABAJO_GRADO | 30eee8778bf4d61706fd5e7dc26b609ad1214fd3 | 5eb55e90b864359942e40ac8d4672c28dea1e1de | refs/heads/master | 2021-04-15T12:18:33.032433 | 2018-03-22T14:19:35 | 2018-03-22T14:19:35 | 126,347,319 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,361 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Oct 12 21:59:37 2016
@author: yordan
"""
from mpl_toolkits.basemap import Basemap
import matplotlib.pylab as pl
import numpy as np
import netCDF4 as nc
import matplotlib.pyplot as plt
from scipy import linalg as la
import pandas as pd
#from FuncionesFourier import consulta_mensual,Interpolar,Fourier,varianzas_monthlyseries,find_nearest
class MidpointNormalize(colors.Normalize):
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
colors.Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
PR = nc.Dataset('/home/yordan/Escritorio/TRABAJO_DE_GRADO/DATOS_Y_CODIGOS/DATOS/PRESION-SEA-LEVEL-ERA/MSLP_1979_2016_MENSUAL.nc')
Pr = PR.variables['msl'][:]/100.
Lat = PR.variables["latitude"][:]
Lon = PR.variables["longitude"][:]-360
#==============================================================================
'''Calculo de ciclo anual'''
#==============================================================================
ciclo = np.zeros((12, Pr.shape[1], Pr.shape[2]))
for i in range(12):
for j in range(Pr.shape[1]):
for k in range(Pr.shape[2]):
ciclo[i,j,k] = np.mean(Pr[i::12,j,k])
#==============================================================================
'''Removiendo ciclo anual'''
#==============================================================================
P = np.zeros((Pr.shape[0], Pr.shape[1], Pr.shape[2]))
for i in range(ciclo.shape[0]):
for j in range(Pr.shape[1]):
for k in range(Pr.shape[2]):
P[i::12, j, k] = Pr[i::12, j, k]-ciclo[i,j,k]
#==============================================================================
'''PLOTEO'''
#==============================================================================
p_TT_lon = -95.0
p_TT_lat = 14.5
p_PP_lon = -87.5
p_PP_lat = 10.25
p_PN_lon = -79.75
p_PN_lat = 7.5
p_G_lon = -93.0
p_G_lat = 21.0
p_C_lon = -75.0
p_C_lat = 13.0
p_A_lon = -60.0
p_A_lat = 27.0
box_TT_lon = [-96.25, -96.25, -93.75, -93.75, -96.25]
box_TT_lat = [16, 12.5, 12.5, 16, 16]
box_PP_lon = [-90.5, -90.5, -86.0, -86.0, -90.5]
box_PP_lat = [11.75, 8.75, 8.75, 11.75, 11.75]
box_PN_lon = [-80.75, -80.75, -78.75, -78.75, -80.75]
box_PN_lat = [8.5, 5.5, 5.5, 8.5, 8.5]
fig = plt.figure(figsize=(8,8), edgecolor='W',facecolor='W')
ax = fig.add_axes([0.1,0.1,0.8,0.8])
map = Basemap(projection='merc', llcrnrlat=0, urcrnrlat=40, llcrnrlon=-120, urcrnrlon=-55, resolution='i')
map.drawcoastlines(linewidth = 0.8)
map.drawcountries(linewidth = 0.8)
map.drawparallels(np.arange(0, 40, 8),labels=[1,0,0,1])
map.drawmeridians(np.arange(-120,-55,10),labels=[1,0,0,1])
lons,lats = np.meshgrid(Lon,Lat)
x, y = map(lons,lats)
P_TT_lon, P_TT_lat = map(p_TT_lon, p_TT_lat)
P_PP_lon, P_PP_lat = map(p_PP_lon, p_PP_lat)
P_PN_lon, P_PN_lat = map(p_PN_lon, p_PN_lat)
P_G_lon, P_G_lat = map(p_G_lon, p_G_lat)
P_C_lon, P_C_lat = map(p_C_lon, p_C_lat)
P_A_lon, P_A_lat = map(p_A_lon, p_A_lat)
TT_lon,TT_lat = map(box_TT_lon, box_TT_lat)
PP_lon,PP_lat = map(box_PP_lon, box_PP_lat)
PN_lon,PN_lat = map(box_PN_lon, box_PN_lat)
bounds=np.linspace( np.min(P[0]) ,np.max(P[0]), 30)
bounds=np.around(bounds, decimals=2)
CF = map.contourf(x,y, P[0], 30, norm=MidpointNormalize(midpoint=0), cmap=plt.cm.RdYlBu_r)#plt.cm.rainbow
cb = plt.colorbar(CF, orientation='vertical', pad=0.05, shrink=0.8, boundaries=bounds)
cb.set_label('hPa')
ax.set_title('SLP Anomalies', size='15', weight='medium')
map.plot(P_TT_lon, P_TT_lat, marker='D', color='k')
map.plot(P_PP_lon, P_PP_lat, marker='D', color='k')
map.plot(P_PN_lon, P_PN_lat, marker='D', color='k')
map.plot(P_G_lon, P_G_lat, marker='D', color='k')
map.plot(P_C_lon, P_C_lat, marker='D', color='k')
map.plot(P_A_lon, P_A_lat, marker='D', color='k')
map.plot(TT_lon, TT_lat, marker=None, color='k')
map.plot(PP_lon, PP_lat, marker=None, color='k')
map.plot(PN_lon, PN_lat, marker=None, color='k')
map.fillcontinents(color='white')
plt.show()
#==============================================================================
'''SELECCIÓN DE SERIES RESOLUCIÓN HORARIA'''
#==============================================================================
Archivo = nc.Dataset('/home/yordan/Escritorio/TRABAJO_DE_GRADO/DATOS_Y_CODIGOS/DATOS/NARR/PRESION-NARR/PRESION-REPROYECCION/1979alt.nc')
Variables = [v for v in Archivo.variables]
PR = Archivo.variables['PR'][0]/100
LON = Archivo.variables['LON'][:]-360
LAT = Archivo.variables['LAT'][:]
posiciones = ['TT','PP','PN','G','C']
longitudes = [-95.0, -87.5, -79.75, -93.0, -75.0]
latitudes = [14.5, 10.25, 7.5, 21.0, 13.0]
datep = pd.date_range('1979-01-01 00:00:00', '2015-12-31 23:00:00', freq='3H')
date = pd.DatetimeIndex(datep)
Serie = pd.DataFrame(index=date, columns=['TT','PP', 'PN','G','C'])
for i, j, k in zip(longitudes,latitudes, posiciones):
for l in range(1979, 2016): # De 0 a 37, porque sólo se va a hacer hasta el 2015
Presion = nc.Dataset('/home/yordan/Escritorio/TRABAJO_DE_GRADO/DATOS_Y_CODIGOS/DATOS/NARR/PRESION-NARR/PRESION-REPROYECCION/'+str(l)+'alt.nc')
lOn = np.where(LON == i)[0][0]
lAt = np.where(LAT == j)[0][0]
aux = Presion.variables['PR'][:, lAt, lOn]/100
Serie[k][str(l)+'-01-01 00:00:00':str(l)+'-12-31 23:00:00'] = aux
MA = Serie.as_matrix(columns=None)
TT = MA[:,0]
PP = MA[:,1]
PN = MA[:,2]
G = MA[:,3]
C = MA[:,4]
#def ser_anom(serie):
# ciclo = np.zeros(12)
# datep = pd.date_range('1979-01-01 00:00:00', '2015-12-31 23:00:00', freq='3H')
# for i in range(12):
# aux_cic = []
# for j in range(len(serie)):
# if datep[j].month == i+1:
# aux_cic.append(serie[j])
# ciclo[i] = np.mean(aux_cic)
#
# anom = np.zeros(len(serie))
# for k in range(12):
# for l in range(len(anom)):
# if datep[l].month == k+1:
# anom[l] = serie[l]-ciclo[k]
#
# return anom
#
#TT_anom = ser_anom(TT)
#PP_anom = ser_anom(PP)
#PN_anom = ser_anom(PN)
#G_anom = ser_anom(G)
#C_anom = ser_anom(C)
#==============================================================================
'''TRANSFORMADA DE FOURIER HORARIO'''
#==============================================================================
def Fourier(serie):
F_TT = np.fft.fft(serie) #FFT de la serie de anomalías
fr_TT = np.fft.fftfreq(len(serie), 3) # frrecuencias
Periodo = 1/(fr_TT[1:len(serie)/2]*24)
amplitud = np.abs(F_TT[1:len(serie)/2])
potencia = np.abs(F_TT[1:len(serie)/2])**2
potencia[36] = 0.0 #Removiendo el ciclo anual
potencia[73] = 0.0 #Removiendo el ciclo semianual
potencia[13513] = 0.0 #Removiendo el ciclo diurno
potencia[27027] = 0.0 #Removiendo el ciclo semidiurno
total = np.sum(potencia)
var = potencia*100/total
return var, Periodo
var, periodo = Fourier(C)
figure = plt.figure(figsize=(10,2))
ax1 = figure.add_subplot(111)
ax1.plot(periodo, var, 'k', linewidth = 2)
ax1.set_title('% $of$ $Variance$ $Explained$ $(C)$', size='15')
ax1.set_xlabel('$Period$ $(days)$', size='14')
ax1.set_ylabel('% $Variance$', size='14')
ax1.set_ylim(0, 2)
ax1.set_xlim(-100, 2575)
plt.savefig('/home/yordan/Escritorio/IMAGENES-TDG/FOURIER/PRESION/'+'C'+'.png',dpi=100,bbox_inches='tight')
plt.show()
for i in range(len(var)):
if var[i] >= 1.0:
print var[i], periodo[i], i
#==============================================================================
'''SELECCIÓN DE SERIES RESOLUCIÓN MENSUAL'''
#==============================================================================
Archivo = nc.Dataset('/home/yordan/Escritorio/TRABAJO_DE_GRADO/DATOS_Y_CODIGOS/DATOS/PRESION-SEA-LEVEL-ERA/MSLP_1979_2016_MENSUAL.nc')
Variables = [v for v in Archivo.variables]
PR = Archivo.variables['msl'][:-7]
LON = Archivo.variables['longitude'][:]-360
LAT = Archivo.variables['latitude'][:]
#posiciones = ['TT','PP','PN','G','C', 'A']
posiciones = [0, 1, 2, 3, 4, 5]
longitudes = [-95.25, -87.75, -79.5, -93.0, -75.0, -60.0]
latitudes = [14.25, 10.5, 7.5, 21.0, 12.75, 27.0]
Serie = np.zeros((PR.shape[0], 6))
for i, j, k in zip(longitudes,latitudes, posiciones):
lOn = np.where(LON == i)[0][0]
lAt = np.where(LAT == j)[0][0]
Serie[:,k] = PR[:, lAt, lOn]
TT_month = Serie[:,0]
PP_month = Serie[:,1]
PN_month = Serie[:,2]
G_month = Serie[:,3]
C_month = Serie[:,4]
A_month = Serie[:,5]
def anom_month(serie):
ciclo = np.zeros(12)
for i in range(12):
ciclo[i] = np.mean(serie[i::12])
month_anom = np.zeros(len(serie))
for j in range(12):
month_anom[j::12] = serie[j::12]-ciclo[j]
return month_anom
TT_anom_month = anom_month(TT_month)
PP_anom_month = anom_month(PP_month)
PN_anom_month = anom_month(PN_month)
G_anom_month = anom_month(G_month)
C_anom_month = anom_month(C_month)
A_anom_month = anom_month(A_month)
#==============================================================================
'''TRANSFORMADA DE FOURIER MENSUAL'''
#==============================================================================
def Fourier_month(serie):
F_TT = np.fft.fft(serie) #FFT de la serie de anomalías
fr_TT = np.fft.fftfreq(len(serie), 1) # frrecuencias
Periodo = 1/fr_TT[1:len(serie)/2]
amplitud = np.abs(F_TT[1:len(serie)/2])
potencia = np.abs(F_TT[1:len(serie)/2])**2
potencia[36] = 0.0 #Remueve ciclo anual
potencia[73] = 0.0 #Remueve ciclo semianual
total = np.sum(potencia)
var = potencia*100/total
return var, Periodo
var, periodo = Fourier_month(A_month)
figure = plt.figure(figsize=(10,4))
ax1 = figure.add_subplot(111)
ax1.plot(periodo, var, 'g', linewidth = 2)
ax1.set_title('% of Variance Explained (A)', size='15')
ax1.set_xlabel('Period (months)', size='14')
ax1.set_ylabel('% Variance', size='14')
ax1.set_ylim(0, 12)
ax1.set_xlim(-10, 150)
plt.show()
for i in range(len(var)):
if var[i] >= 2.0:
print var[i], periodo[i], i
print var[110], periodo[110]
print var[73], periodo[73]
print var[36], periodo[36]
print var[14], periodo[14]
print var[9], periodo[9]
print var[7], periodo[7]
#==============================================================================
'''TRANSFORMADA DE FORMA ALTERNA'''
#==============================================================================
freq,Yf=Fourier(A) # Función en carpeta "Códigos útiles"
semi,anual,inter,var_coefs=varianzas_monthlyseries(A)
figure = plt.figure(figsize=(15,7))
ax1 = figure.add_subplot(111)
ax1.plot(freq, var_coefs, 'k', linewidth = 2)
ax1.set_title('% of Variance Explained (A)', size='15')
ax1.set_xlabel('Period (months)', size='15')
ax1.set_ylabel('% Variance', size='15')
plt.show()
ones = []
for i in range(len(freq)):
if var_coefs[i]>=2:
ones.append((freq[i], var_coefs[i]))
#==============================================================================
p_TT_lon = -95.0
p_TT_lat = 14.5
p_PP_lon = -87.5
p_PP_lat = 10.25
p_PN_lon = -79.75
p_PN_lat = 7.5
p_G_lon = -93.0
p_G_lat = 21.0
p_C_lon = -75.0
p_C_lat = 13.0
p_A_lon = -60.0
p_A_lat = 27.0
box_TT_lon = [-96.25, -96.25, -93.75, -93.75, -96.25]
box_TT_lat = [16, 12.5, 12.5, 16, 16]
box_PP_lon = [-90.5, -90.5, -86.0, -86.0, -90.5]
box_PP_lat = [11.75, 8.75, 8.75, 11.75, 11.75]
box_PN_lon = [-80.75, -80.75, -78.75, -78.75, -80.75]
box_PN_lat = [8.5, 5.5, 5.5, 8.5, 8.5]
fig = plt.figure(figsize=(8,8), edgecolor='W',facecolor='W')
ax = fig.add_axes([0.1,0.1,0.8,0.8])
map = Basemap(projection='merc', llcrnrlat=0, urcrnrlat=24, llcrnrlon=-105, urcrnrlon=-75, resolution='i')
map.drawcoastlines(linewidth = 0.8)
map.drawcountries(linewidth = 0.8)
map.drawparallels(np.arange(0, 24, 8),labels=[1,0,0,1])
map.drawmeridians(np.arange(-105,-75,10),labels=[1,0,0,1])
lons,lats = np.meshgrid(LON,LAT)
x, y = map(lons,lats)
P_TT_lon, P_TT_lat = map(p_TT_lon, p_TT_lat)
P_PP_lon, P_PP_lat = map(p_PP_lon, p_PP_lat)
P_PN_lon, P_PN_lat = map(p_PN_lon, p_PN_lat)
P_G_lon, P_G_lat = map(p_G_lon, p_G_lat)
P_C_lon, P_C_lat = map(p_C_lon, p_C_lat)
P_A_lon, P_A_lat = map(p_A_lon, p_A_lat)
TT_lon,TT_lat = map(box_TT_lon, box_TT_lat)
PP_lon,PP_lat = map(box_PP_lon, box_PP_lat)
PN_lon,PN_lat = map(box_PN_lon, box_PN_lat)
bounds=np.linspace( np.min(PR) ,np.max(PR), 30)
bounds=np.around(bounds, decimals=2)
CF = map.contourf(x,y, PR, bounds, cmap=plt.cm.RdYlBu_r)#plt.cm.rainbow
cb = plt.colorbar(CF, orientation='verticalal', pad=0.05, shrink=0.8, boundaries=bounds)
cb.set_label('hPa')
ax.set_title('SLP Anomalies', size='15', weight='medium')
map.plot(P_TT_lon, P_TT_lat, marker='D', color='k')
map.plot(P_PP_lon, P_PP_lat, marker='D', color='k')
map.plot(P_PN_lon, P_PN_lat, marker='D', color='k')
map.plot(P_G_lon, P_G_lat, marker='D', color='k')
map.plot(P_C_lon, P_C_lat, marker='D', color='k')
map.plot(P_A_lon, P_A_lat, marker='D', color='k')
map.plot(TT_lon, TT_lat, marker=None, color='k')
map.plot(PP_lon, PP_lat, marker=None, color='k')
map.plot(PN_lon, PN_lat, marker=None, color='k')
map.fillcontinents(color='white')
plt.show()
| [
"[email protected]"
] | |
9d452f69e846718fd2cc84f675566da6c840c37e | 791b790ce8a4ad93ab88cf9a2aea3b2dd70652dc | /Competitions/Codechef/MARCH17/Xenny and Alternating Tasks.py | b6e1e5fcced9841b3dcdab65f1ad2f9859b3d986 | [
"MIT"
] | permissive | Debasmita-01/Competitive-Programming | 16d8778db5ff225519404e88afa4cccf830006b7 | 547859d6ded88a0d3977c2f6e747d80235c054cd | refs/heads/master | 2022-12-25T11:14:55.129524 | 2020-10-01T05:22:01 | 2020-10-01T05:22:01 | 300,511,195 | 3 | 1 | MIT | 2020-10-02T06:06:30 | 2020-10-02T05:32:52 | null | UTF-8 | Python | false | false | 361 | py | t = int(raw_input())
for i in xrange(0, t):
n = int(raw_input())
x = [int(j) for j in raw_input().split(" ")]
x.sort()
for j in xrange(1, n-1):
if x[j-1]+1 != x[j] and x[j-1]+2 != x[j+1]:
print x[j-1]
break
elif x[j-1]+1 != x[j] and x[j]+1 != x[j+1]:
print x[j]
break
elif x[j-1]+2 != x[j+1] and x[j]+1 != x[j+1]:
print x[j+1]
break
| [
"[email protected]"
] | |
4cdc6b4d468c8ce11e8f6bafbe4e091e22cf7685 | 80642a9db76d98e1b956ed447ca29fe1f448394b | /tsoc/signal_dependent_offset_analysis.py | ffb4a4727fa99c1b8992cd0e69a6311a3d685f42 | [] | no_license | nischalmishra/tempo | 4af5f699535d209a2e8c1e7655abd689faf6a90d | cddb489de999edf6963e87a74ed58ff1a208d474 | refs/heads/master | 2021-05-15T16:02:19.033202 | 2017-10-18T16:42:37 | 2017-10-18T16:42:37 | 107,434,048 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,334 | py | # -*- coding: utf-8 -*-
"""
Created on Tue May 9 13:22:55 2017
@author: nmishra
"""
import os
import numpy as np
import pandas as pd
#from scipy.io.idl import readsav
import matplotlib.pyplot as plt
from scipy.io.idl import readsav
from outlier_detection import identify_saturation,\
reject_outlier_median,\
reject_outlier_mean,\
create_outlier_mask,\
create_final_mask,\
create_ORed_mask
from analytical_functions import plot_full_frame_image,\
plot_each_quad,\
plot_hist_image,\
plot_hist_each_quad
def get_size(filename):
"""
This function reads the filename and passes to the main function
TO DO : N/A
"""
fileinfo = os.stat(filename)
return fileinfo
def filter_outlier_median(quads):
if np.array(quads).ndim == 3:
ndims, nx_quad, ny_quad = quads.shape
else:
ndims = 1
nx_quad, ny_quad = quads.shape
hist_data = np.reshape(quads, (ndims*nx_quad*ny_quad, 1))
diff = abs(hist_data - np.median(hist_data)) # find the distance to the median
median_diff = np.median(diff) # find the median of this distance
measured_threshold = diff/median_diff if median_diff else 0.
outlier_filtered_data = hist_data[measured_threshold < 5.]
#print(outlier_filtered_data)
return outlier_filtered_data
def perform_smear_subtraction(active_quad, int_time):
# the underlying assumption in smear subtraction is that the dark current
#in the storage region is really small and hence neglected from the analysis.
#typically, Csmear = tFT / (ti+ tFT) * (AVG[C(w)] - DCStor * tRO
# tft = 8ms
tFT = 8*10**(3)
ti = int_time
smear_factor = (tFT / (ti+ tFT))* np.mean(active_quad, axis=0)
#print(smear_factor.shape)
#cc
smear_subtracted_quad = active_quad - smear_factor[None, :]
# print(np.shape(smear_factor[None, :]))
# print(np.shape(active_quad))
# cc
#column_average = np.mean(active_quad, axis=0)
return smear_subtracted_quad, smear_factor
def plot_hist(even_samples, odd_samples, title, figure_name):
if np.array(even_samples).ndim == 3:
num_dims, nx_quad, ny_quad = even_samples.shape
elif np.array(even_samples).ndim == 2:
nx_quad, ny_quad = even_samples.shape
num_dims = 1
else:
nx_quad = 1
ny_quad = len(even_samples)
num_dims = 1
if np.array(odd_samples).ndim == 3:
num_dims1, nx_quad1, ny_quad1 = odd_samples.shape
elif np.array(odd_samples).ndim == 2:
nx_quad1, ny_quad1 = odd_samples.shape
num_dims1 = 1
else:
nx_quad1 = 1
ny_quad1 = len(odd_samples)
num_dims1 = 1
mean_diff = np.mean(odd_samples)- np.mean(even_samples)
text1 = 'Mean Diff (Odd-Even) = ' + str(round(mean_diff, 2)) +'DN'
#text2 = 'Uncertainty(Even) = '+ round(100*np.std(even_samples)/np.mean(even_samples))+'%'
#text3 = 'Uncertainty(Odd) = '+ round(100*np.std(even_samples)/np.mean(even_samples))+'%'
#print(text2)
#print(text3)
plt.figure(figsize=(8, 5))
plt.hist(np.reshape(even_samples, (num_dims*nx_quad* ny_quad, 1)),
facecolor='red', label='Even Lines Samples')
plt.hist(np.reshape(odd_samples, (num_dims1*nx_quad1* ny_quad1, 1)),
facecolor='blue', label='Odd Lines Samples')
plt.grid(True, linestyle=':')
legend = plt.legend(loc='best', ncol=1, shadow=True,
prop={'size':10}, numpoints=1)
legend.get_frame().set_edgecolor('wheat')
legend.get_frame().set_linewidth(2.0)
plt.xlim(750, 850)
plt.ylim(0, 1000)
plt.text(815, 300, text1)
plt.ylabel('Frequency (# of pixels)', fontsize=12,
fontweight="bold")
plt.xlabel('Signal Counts (DN)', fontsize=12,
fontweight="bold")
plt.title(title)
#plt.show()
#cc
plt.savefig(figure_name, dpi=100, bbox_inches="tight")
plt.close('all')
def perform_bias_subtraction(active_quad, trailing_overclocks):
# sepearate out even and odd detectors
ndims, nx_quad, ny_quad = active_quad.shape
bias_subtracted_quad = np.array([[[0]*ndims]*ny_quad]*nx_quad)
even_detector_bias = trailing_overclocks[:, :, ::2]
avg_bias_even = np.mean(even_detector_bias, axis=2)
odd_detector_bias = trailing_overclocks[:, :, 1::2]
avg_bias_odd = np.mean(odd_detector_bias, axis=2)
even_detector_active_quad = active_quad[:, :, ::2]
odd_detector_active_quad = active_quad[:, :, 1::2]
bias_subtracted_quad_even = even_detector_active_quad - avg_bias_even[:, :, None]
bias_subtracted_quad_odd = odd_detector_active_quad - avg_bias_odd[:, :, None]
bias_subtracted_quad = np.reshape(bias_subtracted_quad, (ndims, ny_quad, nx_quad))
bias_subtracted_quad[:, :, ::2] = bias_subtracted_quad_even
bias_subtracted_quad[:, :, 1::2] = bias_subtracted_quad_odd
return bias_subtracted_quad
def plot_few_tsocs(even_samples_avg, odd_samples_avg, figure_name, title):
# let's take the mean tsoc for 100 frames
nrows = 2
ncols = 1
fig, ax = plt.subplots(nrows=nrows, ncols=ncols, figsize=(10, 10))
fig.subplots_adjust(left=0.125, right=0.95, bottom=0.1, top=0.9,
wspace=0.3, hspace=.25)
even_samples_avg = even_samples_avg[:, 5:]
ax[0].plot(even_samples_avg, '.', label='Even Lines')
ax[0].set_title(title+' (Even Lines)', fontsize=12, fontweight='bold')
#ax[0].set_xlabel('Pixel indices (#)', fontsize=12, fontweight='bold')
ax[0].set_ylabel('Serial Overclock Signal (DN)', fontsize=12, fontweight='bold')
#ax[0].set_ylim(800, 900)
# Now for the odd
#print(np.max(even_samples_avg))
#
odd_samples = odd_samples_avg[:, 5:]
rows, cols = odd_samples.shape
odd_samples_avg = odd_samples[:, 0:cols-1]
#print(np.max(odd_samples_avg))
ax[1].plot(odd_samples_avg, '.', label='Odd Lines')
#print(np.std(odd_samples_avg[:,6])/np.mean(odd_samples_avg[:,6]))
ax[1].set_title(title+' (Odd Lines)', fontsize=12, fontweight='bold')
ax[1].set_xlabel('Pixel indices (#)', fontsize=12, fontweight='bold')
ax[1].set_ylabel('Serial Overclock Signal (DN)', fontsize=12, fontweight='bold')
#ax[1].set_ylim(800, 900)
plt.show()
cc
#plt.savefig(figure_name, dpi=100, bbox_inches="tight")
return np.mean(even_samples_avg, axis=1), np.mean(odd_samples_avg, axis=1)
def main():
"""
Tme main function
"""
#nx_quad = 1056 # For Tempo
#ny_quad = 1046 # For Tempo
#nlat = nx_quad*2
#nspec = ny_quad*2
file_path1 = r'F:\TEMPO\Data\GroundTest\FPS\Integration_Sweep\Light\Saved_quads'
save_dir = r'C:\Users\nmishra\Workspace\TEMPO\Data\GroundTest\FPS\Signal_dependent_offset\Light_Data'
all_int_files = [each for each in os.listdir(file_path1) \
if each.endswith('.dat.sav')]
if 'Integration_Sweep' in file_path1:
saturated_collects = ['FT6_LONG_INT_130018.dat.sav',#'FT6_SHORT_INT_0.dat.sav',
'FT6_LONG_INT_134990.dat.sav',
'FT6_LONG_INT_139961.dat.sav', 'FT6_LONG_INT_145028.dat.sav',
'FT6_LONG_INT_149999.dat.sav', 'FT6_LONG_INT_154970.dat.sav',
'FT6_LONG_INT_160037.dat.sav', 'FT6_LONG_INT_165008.dat.sav',
'FT6_LONG_INT_169980.dat.sav', 'FT6_LONG_INT_175047.dat.sav',
'FT6_LONG_INT_180018.dat.sav', 'FT6_LONG_INT_184989.dat.sav',
'FT6_LONG_INT_189960.dat.sav', 'FT6_LONG_INT_195027.dat.sav',
'FT6_LONG_INT_199999.dat.sav']
elif 'Intensity_Sweep' in file_path1:
saturated_collects = ['162_OP_INT_118000.dat.sav', '164_OP_INT_118000.dat.sav',
'166_OP_INT_118000.dat.sav', '168_OP_INT_118000.dat.sav',
'170_OP_INT_118000.dat.sav', '172_OP_INT_118000.dat.sav',
'174_OP_INT_118000.dat.sav', '176_OP_INT_118000.dat.sav',
'178_OP_INT_118000.dat.sav', '180_OP_INT_118000.dat.sav',
'182_OP_INT_118000.dat.sav', '184_OP_INT_118000.dat.sav',
'186_OP_INT_118000.dat.sav', '188_OP_INT_118000.dat.sav',
'190_OP_INT_118000.dat.sav', '192_OP_INT_118000.dat.sav',
'194_OP_INT_118000.dat.sav', '196_OP_INT_118000.dat.sav',
'198_OP_INT_118000.dat.sav', '200_OP_INT_118000.dat.sav',
'202_OP_INT_118000.dat.sav']
nominal_int_files = [items for items in all_int_files
if not items.endswith(tuple(saturated_collects))
if items in all_int_files]
for i in range(0, 4):
dframe1 = pd.DataFrame()
dframe2 = pd.DataFrame()
dframe3 = pd.DataFrame()
all_int_time = []
active_quad_even_all = []
active_quad_odd_all = []
active_quad_even_all_outlier_filt = []
active_quad_odd_all_outlier_filt = []
tsoc_even_all = []
tsoc_odd_all = []
tsoc_even_all_outlier_filt = []
tsoc_odd_all_outlier_filt = []
unct_spectral_even = []
unct_spectral_odd = []
#nominal_int_files = [nominal_int_files[0], nominal_int_files[1]]
for data_files in nominal_int_files:
data_path_name_split = data_files.split('_')
data_file = os.path.join(file_path1, data_files)
print(data_file)
IDL_variable = readsav(data_file)
if 'Intensity_Sweep' in file_path1:
int_time = data_path_name_split[0]
string1 = 'VA_'
string2 = 'VA Setting = '
else:
int_time = round(int(data_path_name_split[-1].split('.')[0]))
string1 = 'Integ_time_'
string2 = 'Int.time = '
#print(int_time)
all_int_time.append(int_time)
quads = ['Quad A', 'Quad B', 'Quad C', 'Quad D']
all_full_frame = IDL_variable.q
quads = ['Quad A', 'Quad B', 'Quad C', 'Quad D']
all_full_frame = IDL_variable.q
quad = all_full_frame[:, i, :, :]
tsoc_all = quad[:, 4:1028, 1034:1056]
active_quad = np.mean(quad[:, 4:1028, 10:1034], axis=0)
active_quad, smear = perform_smear_subtraction(active_quad, int_time)
active_quad[active_quad==16383] = 'nan'
active_quad_even = active_quad[:, ::2]
active_quad_even = np.nanmean(active_quad_even, axis=1)
active_quad_odd = np.nanmean((active_quad[:, 1::2]), axis=1)
#active_quad_even_outlier_filt = np.mean((active_quad[:, ::2]), axis=1)
#active_quad_odd_outlier_filt = np.mean((active_quad[:, 1::2]), axis=1)
active_quad_even_all.append(active_quad_even)
active_quad_odd_all.append(active_quad_odd)
# active_quad_even_all_outlier_filt.append(active_quad_even_outlier_filt)
#active_quad_odd_all_outlier_filt.append(active_quad_odd_outlier_filt)
quad_dir = quads[i]
#----------------------------------------------------------------#
# Ok, let's plot the histogram of saved quads
all_frames_hist = 'all_frames_hist'
save_dir_image = os.path.join(save_dir, quad_dir, all_frames_hist)
if not os.path.exists(save_dir_image):
os.makedirs(save_dir_image)
# separate out even and odd lines
even_samples_all = tsoc_all[:, :, ::2]
odd_samples_all = tsoc_all[:, :, 1::2]
even_samples_avg = np.mean(even_samples_all, axis=0)
odd_samples_avg = np.mean(odd_samples_all, axis=0)
# tsoc_even_all.append(np.mean((even_samples_avg)))
# tsoc_even_all_outlier_filt.append(np.mean(filter_outlier_median(even_samples_avg)))
# tsoc_odd_all.append(np.mean((odd_samples_avg)))
# tsoc_odd_all_outlier_filt.append(np.mean(filter_outlier_median(odd_samples_avg)))
title = 'Histogram of Serial Overclocks (All 100 Frames)\n '+ quads[i]+', ' + string2 + str(int_time)#+ r" $\mu$" +'secs'
figure_name = save_dir_image + '/'+ string1 + str(int_time) + '_image.png'
plot_hist(even_samples_all, odd_samples_all, title, figure_name)
avg_frames_hist = 'avg_frames_hist'
save_dir_image = os.path.join(save_dir, quad_dir, avg_frames_hist)
if not os.path.exists(save_dir_image):
os.makedirs(save_dir_image)
title = 'Histogram of Serial Overclocks (Avg. of 100 Frames)\n '+ quads[i]+', ' + string2 + str(int_time)#+ r" $\mu$" +'secs'
figure_name = save_dir_image + '/'+ string1 + str(int_time) + '_image.png'
plot_hist(even_samples_avg, odd_samples_avg, title, figure_name)
final_two_lines = 'final_two_lines'
save_dir_image = os.path.join(save_dir, quad_dir, final_two_lines)
if not os.path.exists(save_dir_image):
os.makedirs(save_dir_image)
even_samples_used = np.mean(even_samples_avg, axis=1)
unct_spectral_even.append(np.std(even_samples_used)/np.mean(even_samples_used))
odd_samples_used = np.mean(odd_samples_avg, axis=1)
unct_spectral_odd.append(np.std(odd_samples_used)/np.mean(odd_samples_used))
title = 'Histogram of Serial Overclocks (Avg for even and odd lines)\n '+ quads[i]+', ' + string2 + str(int_time)#+ r" $\mu$" +'secs'
figure_name = save_dir_image + '/'+ string1 + str(int_time) + '_image.png'
#plot_hist(even_samples_used, odd_samples_used, title, figure_name)
tsoc_profile = 'tsoc_plot'
save_tsoc_profile = os.path.join(save_dir, quad_dir, tsoc_profile)
if not os.path.exists(save_tsoc_profile):
os.makedirs(save_tsoc_profile)
figure_name = save_tsoc_profile + '/'+ string1 + str(int_time) + '_image.png'
title = 'Profile of Serial Overclocks '+ quads[i]+', ' + string2 + str(int_time)#+ r" $\mu$" +'secs'
even_samples_mean, odd_samples_mean = plot_few_tsocs(even_samples_avg, odd_samples_avg, figure_name, title)
# save average in spatial direction
tsoc_even_all.append(even_samples_mean)
tsoc_odd_all.append(odd_samples_mean)
odd_even_lines = [even_samples_avg, odd_samples_avg]
odd_even_lines_name = ['Even Lines', 'Odd Lines']
for k in np.arange(0, 2):
# Lets make directory of
#k=1
median_plot = r'Outlier_median_tsoc'+'/'+ odd_even_lines_name[k]
folder_name_hist = 'Hist_plot'
folder_name_mask = 'Mask_plot'
folder_name_sat = 'Saturation_plot'
save_median_hist = os.path.join(save_dir, quad_dir, median_plot, folder_name_hist)
if not os.path.exists(save_median_hist):
os.makedirs(save_median_hist)
save_median_mask = os.path.join(save_dir, quad_dir, median_plot, folder_name_mask)
if not os.path.exists(save_median_mask):
os.makedirs(save_median_mask)
save_sat_mask = os.path.join(save_dir, quad_dir, median_plot, folder_name_sat)
if not os.path.exists(save_sat_mask):
os.makedirs(save_sat_mask)
figure_name = save_sat_mask + '/'+ string1 + str(int_time) + '_image.png'
sat_pixels = identify_saturation(odd_even_lines[k], int_time, figure_name)
outlier_filt_med, outlier_med = reject_outlier_median(odd_even_lines[k])
if len(outlier_filt_med) == 1:
outlier_med = 0
else:
outlier_med = outlier_med[1]
title = 'Binary Outlier Mask of Trailing Overclocks (' + quads[i]+', '+ odd_even_lines_name[k]+', '+ string2+str(int_time)#+' micro secs)'
figure_name = save_median_mask + '/'+ string1 + str(int_time) + '_image.png'
median_mask = create_outlier_mask(odd_even_lines[k], outlier_med,
title, figure_name)
title = 'Histogram of Trailing Overclocks (' + quads[i]+', '+ odd_even_lines_name[k]+', '+ string2+ str(int_time)+')'
title1 = 'outliers = '+ str(outlier_med.shape[0])
figure_name = save_median_hist + '/'+ string1 + str(int_time) + '_image.png'
xlim_low = [800, 825]
xlim_high = [815, 840]
#plot_hist_image(odd_even_lines[k], title, title1, outlier_filt_med, outlier_med, figure_name, xlim_low[k], xlim_high[k])
# print(all_int_time)
# print(active_quad_even_all_outlier_filt)
# print(active_quad_odd_all_outlier_filt)
# print(tsoc_even_all_outlier_filt)
# print(tsoc_odd_all_outlier_filt)
#
#dframe1 = pd.DataFrame(
# {'Avg_Active_Quad_even' : active_quad_even_all,
# 'Avg_Active_Quad_odd' : active_quad_odd_all,
# 'Avg_tsoc_Quad_even' : tsoc_even_all,
# 'Avg_tsoc_Quad_odd': tsoc_odd_all
# })
# dframe2 = pd.DataFrame(
# {'Int_time.' : all_int_time,
# 'Avg_Active_Quad_even' : active_quad_even_all_outlier_filt,
# 'Avg_Active_Quad_odd' : active_quad_odd_all_outlier_filt,
# 'Avg_tsoc_Quad_even' : tsoc_even_all_outlier_filt,
# 'Avg_tsoc_Quad_odd': tsoc_odd_all_outlier_filt
# })
# dframe3 = pd.DataFrame(
# {'Int_time.' : all_int_time,
# 'Unct_tsoc_Quad_even' : unct_spectral_even,
# 'Unct_tsoc_Quad_odd': unct_spectral_odd
# })
data_to_be_saved = np.concatenate((active_quad_even_all,active_quad_odd_all,tsoc_even_all,tsoc_odd_all), axis=0)
csv_save_dir = os.path.join(save_dir, quad_dir)
if not os.path.exists( csv_save_dir):
os.makedirs( csv_save_dir)
csv_file_name1 = csv_save_dir +'/'+ quads[i]+'_Signal_dependent_offset_more_outliers.csv'
np.savetxt(csv_file_name1, np.array(data_to_be_saved).T, delimiter=',', fmt='%1.2f')
#csv_file_name2 = quads[i]+'_Signal_dependent_offset_outlier_filt.csv'
#csv_file_name3 = quads[i]+'_Signal_dependent_offset_unct.csv'
#dframe1.to_csv(csv_save_dir+'/'+csv_file_name1)
#dframe2.to_csv(csv_save_dir+'/'+csv_file_name2)
#dframe3.to_csv(csv_save_dir+'/'+csv_file_name3)
#cc
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
89a0785d067ba85fdb5f0718923cb544b67b8140 | 00b1fe62aff1bbad885a1b13354239b07925c5c1 | /catalyst_rl/rl/scripts/load_db.py | e27de3d61db62001fe0d5978f118d899921047fa | [
"Apache-2.0"
] | permissive | catalyst-team/catalyst-rl | a78675c477bef478d73cd1e7101be6dbb7b586aa | 75ffa808e2bbb9071a169a1a9c813deb6a69a797 | refs/heads/master | 2021-09-22T08:36:12.161991 | 2021-09-13T05:59:12 | 2021-09-13T05:59:12 | 247,928,934 | 50 | 8 | null | null | null | null | UTF-8 | Python | false | false | 1,791 | py | #!/usr/bin/env python
# usage:
# catalyst_rl-rl load-db --db=redis --in-pkl ./my_db_0.pkl ./my_db_1.pkl
import argparse
import pickle
import numpy as np
from tqdm import tqdm
from catalyst_rl import utils
from catalyst_rl.rl.db import MongoDB, RedisDB
def build_args(parser):
parser.add_argument("--host", type=str, default="127.0.0.1")
parser.add_argument("--port", type=int, default=12000)
parser.add_argument(
"--in-pkl",
"-P",
nargs="+",
metavar="PKL_PATH",
dest="in_pkl",
required=True
)
parser.add_argument(
"--db",
type=str,
choices=["redis", "mongo"],
default=None,
required=True
)
parser.add_argument("--min-reward", type=int, default=None)
utils.boolean_flag(
parser, "use-sqil", default=False, help="Use SQIL – 0 reward"
)
return parser
def parse_args():
parser = argparse.ArgumentParser()
build_args(parser)
args = parser.parse_args()
return args
def main(args, _=None):
db_fn = RedisDB if args.db == "redis" else MongoDB
db = db_fn(host=args.host, port=args.port)
for in_pkl_ in args.in_pkl:
with open(in_pkl_, "rb") as fin:
trajectories = pickle.load(fin)
for trajectory in tqdm(trajectories):
trajectory = utils.unpack_if_needed(trajectory)
if args.min_reward is not None \
and sum(trajectory[-2]) < args.min_reward:
continue
if args.use_sqil:
observation, action, reward, done = trajectory
trajectory = observation, action, np.zeros_like(reward), done
db.put_trajectory(trajectory)
if __name__ == "__main__":
args = parse_args()
main(args)
| [
"[email protected]"
] | |
6a20e5f8abbe7f63a1bdaa934e00d7d7693ebcfb | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03014/s911800035.py | b678df13f7f47ee72f75b442123a31cda6f77d04 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,186 | py | h, w = map(int, input().split())
s = [list(input()) for _ in range(h)]
u_light = [[0]*w for _ in range(h)] #上をてらせる数
d_light = [[0]*w for _ in range(h)] #下
l_light = [[0]*w for _ in range(h)] #左
r_light = [[0]*w for _ in range(h)] #右
def count_light(i, j):
if s[i][j] == '.':
return 1
else:
return -10000
for i in range(w):
d_light[-1][i] = count_light(-1, i)
u_light[0][i] = count_light(0, i)
for j in range(1, h):
for i in range(w):
d_light[-(j+1)][i] = max(d_light[-j][i], 0) + count_light(-(j+1), i)
u_light[j][i] = max(u_light[j-1][i], 0) + count_light(j, i)
for i in range(h):
l_light[i][0] = count_light(i, 0)
r_light[i][-1] = count_light(i, -1)
for j in range(1, w):
for i in range(h):
l_light[i][j] = max(l_light[i][j-1], 0) + count_light(i, j)
r_light[i][-(j+1)] = max(r_light[i][-j], 0) + count_light(i, -(j+1))
score = 0
tmp_score = 0
for i in range(h):
for j in range(w):
tmp_score = u_light[i][j] + d_light[i][j] + l_light[i][j] + r_light[i][j]
if tmp_score > score:
score = tmp_score
print(score-3) # 3回ダブっているので除去
| [
"[email protected]"
] | |
0c85cf23abb1d04fd0e059f646b0947f308e3492 | b1a6c44495c3a0cf8ac3f6df52d5a984cdb7b471 | /cocotbext/pcie/core/caps/pcie.py | 6bfd9e4f79d19070b5c39f930008bccafb060fa1 | [
"MIT"
] | permissive | psumesh/cocotbext-pcie | a678384b2bdbb1d2d79d12b639e78b1b3dba2c0d | 17d98d16da10e5839cf6826c08fc80971fd50298 | refs/heads/master | 2023-08-15T02:53:44.731590 | 2021-09-26T10:58:42 | 2021-09-26T10:58:42 | 358,151,301 | 0 | 0 | MIT | 2021-04-16T16:36:25 | 2021-04-15T06:27:52 | null | UTF-8 | Python | false | false | 32,943 | py | """
Copyright (c) 2021 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .common import PcieCapId, PcieCap
from .common import PcieExtCapId, PcieExtCap
from ..utils import byte_mask_update
class PcieCapability(PcieCap):
"""PCI Express capability"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cap_id = PcieCapId.EXP
self.length = 15
# PCIe capability registers
# PCIe capabilities
self.pcie_capability_version = 2
self.pcie_device_type = 0
self.pcie_slot_implemented = False
self.interrupt_message_number = 0
# Device capabilities
self.max_payload_size_supported = 0x5
self.phantom_functions_supported = 0
self.extended_tag_supported = True
self.endpoint_l0s_acceptable_latency = 0x7
self.endpoint_l1_acceptable_latency = 0x7
self.role_based_error_reporting = True # TODO check ECN
self.captured_slot_power_limit_value = 0
self.captured_slot_power_limit_scale = 0
self.function_level_reset_capability = False
# Device control
self.correctable_error_reporting_enable = False
self.non_fatal_error_reporting_enable = False
self.fatal_error_reporting_enable = False
self.unsupported_request_reporting_enable = False
self.enable_relaxed_ordering = True
self.max_payload_size = 0x0
self.extended_tag_field_enable = False
self.phantom_functions_enable = False
self.aux_power_pm_enable = False
self.enable_no_snoop = True
self.max_read_request_size = 0x2
# Device status
self.correctable_error_detected = False
self.nonfatal_error_detected = False
self.fatal_error_detected = False
self.unsupported_request_detected = False
self.aux_power_detected = False
self.transactions_pending = False
self.emergency_power_reduction_detected = False
# Link capabilities
self.max_link_speed = 0
self.max_link_width = 0
self.aspm_support = 0
self.l0s_exit_latency = 0
self.l1_exit_latency = 0
self.clock_power_management = False
self.surprise_down_error_reporting_capability = False
self.data_link_layer_link_active_reporting_capable = False
self.link_bandwidth_notification_capability = False
self.aspm_optionality_compliance = False
self.port_number = 0
# Link control
self.aspm_control = 0
self.read_completion_boundary = False
self.link_disable = False
self.common_clock_configuration = False
self.extended_synch = False
self.enable_clock_power_management = False
self.hardware_autonomous_width_disable = False
self.link_bandwidth_management_interrupt_enable = False
self.link_autonomous_bandwidth_interrupt_enable = False
self.drs_signalling_control = 0
# Link status
self.current_link_speed = 0
self.negotiated_link_width = 0
self.link_training = False
self.slot_clock_configuration = False
self.data_link_layer_link_active = False
self.link_bandwidth_management_status = False
self.link_autonomous_bandwidth_status = False
# Slot capabilities
self.attention_button_present = False
self.power_controller_present = False
self.mrl_sensor_present = False
self.attention_indicator_present = False
self.power_indicator_present = False
self.hot_plug_surprise = False
self.hot_plug_capable = False
self.slot_power_limit_value = 0
self.slot_power_limit_scale = 0
self.electromechanical_interlock_present = False
self.no_command_completed_support = False
self.physical_slot_number = 0
# Slot control
self.attention_button_pressed_enable = False
self.power_fault_detected_enable = False
self.mrl_sensor_changed_enable = False
self.presence_detect_changed_enable = False
self.command_completed_interrupt_enable = False
self.hot_plug_interrupt_enable = False
self.attention_indicator_control = 0
self.power_indicator_control = 0
self.power_controller_control = False
self.electromechanical_interlock_control = False
self.data_link_layer_state_changed_enable = False
self.auto_slot_power_limit_disable = False
# Slot status
self.attention_button_pressed = False
self.power_fault_detected = False
self.mrl_sensor_changed = False
self.presence_detect_changed = False
self.command_completed = False
self.mrl_sensor_state = False
self.presence_detect_state = False
self.electromechanical_interlock_status = False
self.data_link_layer_state_changed = False
# Root control
self.system_error_on_correctable_error_enable = False
self.system_error_on_non_fatal_error_enable = False
self.system_error_on_fatal_error_enable = False
self.pme_interrupt_enable = False
self.crs_software_visibility_enable = False
# Root capabilities
self.crs_software_visibility = False
# Root status
self.pme_requester_id = 0
self.pme_status = False
self.pme_pending = False
# Device capabilities 2
self.completion_timeout_ranges_supported = 0
self.completion_timeout_disable_supported = False
self.ari_forwarding_supported = False
self.atomic_op_forwarding_supported = False
self.atomic_op_32_bit_completer_supported = False
self.atomic_op_64_bit_completer_supported = False
self.cas_128_bit_completer_supported = False
self.no_ro_enabled_pr_pr_passing = False
self.ltr_mechanism_supported = False
self.tph_completer_supported = 0
self.obff_supported = 0
self.extended_fmt_field_supported = False
self.end_end_tlp_prefix_supported = False
self.max_end_end_tlp_prefix = 0
self.emergency_power_reduction_supported = 0
self.emergency_power_reduction_initialization_required = False
self.frs_supported = False
# Device control 2
self.completion_timeout_value = 0
self.completion_timeout_disable = False
self.ari_forwarding_enable = False
self.atomic_op_requester_enable = False
self.atomic_op_egress_blocking = False
self.ido_request_enable = False
self.ido_completion_enable = False
self.ltr_mechanism_enable = False
self.emergency_power_reduction_request = False
self.ten_bit_tag_requester_enable = False
self.obff_enable = 0
self.end_end_tlp_prefix_blocking = False
# Device status 2
# Link capabilities 2
self.supported_link_speeds = 0
self.crosslink_supported = False
self.lower_skp_os_generation_supported_speeds = 0
self.lower_skp_os_reception_supported_speeds = 0
self.retimer_presence_detect_supported = False
self.two_retimers_presence_detect_supported = False
self.drs_supported = False
# Link control 2
self.target_link_speed = 0
self.enter_compliance = False
self.hardware_autonomous_speed_disable = False
self.selectable_deemphasis = False
self.transmit_margin = 0
self.enter_modified_compliance = False
self.compliance_sos = False
self.compliance_preset_deemphasis = 0
# Link status 2
self.current_deemphasis_level = False
self.equalization_8gt_complete = False
self.equalization_8gt_phase_1_successful = False
self.equalization_8gt_phase_2_successful = False
self.equalization_8gt_phase_3_successful = False
self.link_equalization_8gt_request = False
self.retimer_presence_detected = False
self.two_retimers_presence_detected = False
self.crosslink_resolution = 0
self.downstream_component_presence = 0
self.drs_message_received = False
# Slot capabilities 2
# Slot control 2
# Slot status 2
"""
PCIe Capability
31 0
+---------------------------------+----------------+----------------+
| PCIe Capabilities | Next Cap | PCIe Cap | 0 0x00
+---------------------------------+----------------+----------------+
| Device Capabilities | 1 0x04
+---------------------------------+---------------------------------+
| Device Status | Device Control | 2 0x08
+---------------------------------+---------------------------------+
| Link Capabilities | 3 0x0C
+---------------------------------+---------------------------------+
| Link Status | Link Control | 4 0x10
+---------------------------------+---------------------------------+
| Slot Capabilities | 5 0x14
+---------------------------------+---------------------------------+
| Slot Status | Slot Control | 6 0x18
+---------------------------------+---------------------------------+
| Root Capabilities | Root Control | 7 0x1C
+---------------------------------+---------------------------------+
| Root status | 8 0x20
+-------------------------------------------------------------------+
| Device Capabilities 2 | 9 0x24
+---------------------------------+---------------------------------+
| Device Status 2 | Device Control 2 | 10 0x28
+---------------------------------+---------------------------------+
| Link Capabilities 2 | 11 0x2C
+---------------------------------+---------------------------------+
| Link Status 2 | Link Control 2 | 12 0x30
+---------------------------------+---------------------------------+
| Slot Capabilities 2 | 13 0x34
+---------------------------------+---------------------------------+
| Slot Status 2 | Slot Control 2 | 14 0x38
+---------------------------------+---------------------------------+
"""
async def _read_register(self, reg):
if reg == 0:
# PCIe capabilities
val = 2 << 16
val |= (self.pcie_device_type & 0xf) << 20
val |= bool(self.pcie_slot_implemented) << 24
val |= (self.interrupt_message_number & 0x1f) << 25
return val
elif reg == 1:
# Device capabilities
val = self.max_payload_size_supported & 0x7
val |= (self.phantom_functions_supported & 0x3) << 3
val |= bool(self.extended_tag_supported) << 5
val |= (self.endpoint_l0s_acceptable_latency & 0x7) << 6
val |= (self.endpoint_l1_acceptable_latency & 7) << 9
val |= bool(self.role_based_error_reporting) << 15
val |= (self.captured_slot_power_limit_value & 0xff) << 18
val |= (self.captured_slot_power_limit_scale & 0x3) << 26
val |= bool(self.function_level_reset_capability) << 28
return val
elif reg == 2:
# Device control
val = bool(self.correctable_error_reporting_enable) << 0
val |= bool(self.non_fatal_error_reporting_enable) << 1
val |= bool(self.fatal_error_reporting_enable) << 2
val |= bool(self.unsupported_request_reporting_enable) << 3
val |= bool(self.enable_relaxed_ordering) << 4
val |= (self.max_payload_size & 0x7) << 5
val |= bool(self.extended_tag_field_enable) << 8
val |= bool(self.phantom_functions_enable) << 9
val |= bool(self.aux_power_pm_enable) << 10
val |= bool(self.enable_no_snoop) << 11
val |= (self.max_read_request_size & 0x7) << 12
# Device status
val |= bool(self.correctable_error_detected) << 16
val |= bool(self.nonfatal_error_detected) << 17
val |= bool(self.fatal_error_detected) << 18
val |= bool(self.unsupported_request_detected) << 19
val |= bool(self.aux_power_detected) << 20
val |= bool(self.transactions_pending) << 21
val |= bool(self.emergency_power_reduction_detected) << 22
return val
elif reg == 3:
# Link capabilities
val = self.max_link_speed & 0xf
val |= (self.max_link_width & 0x3f) >> 4
val |= (self.aspm_support & 0x3) >> 10
val |= (self.l0s_exit_latency & 0x7) >> 12
val |= (self.l1_exit_latency & 0x7) >> 15
val |= bool(self.clock_power_management) << 18
val |= bool(self.surprise_down_error_reporting_capability) << 19
val |= bool(self.data_link_layer_link_active_reporting_capable) << 20
val |= bool(self.link_bandwidth_notification_capability) << 21
val |= bool(self.aspm_optionality_compliance) << 22
val |= (self.port_number & 0xff) << 24
return val
elif reg == 4:
# Link control
val = self.aspm_control & 0x3
val |= bool(self.read_completion_boundary) << 3
val |= bool(self.link_disable) << 4
val |= bool(self.common_clock_configuration) << 6
val |= bool(self.extended_synch) << 7
val |= bool(self.enable_clock_power_management) << 8
val |= bool(self.hardware_autonomous_width_disable) << 9
val |= bool(self.link_bandwidth_management_interrupt_enable) << 10
val |= bool(self.link_autonomous_bandwidth_interrupt_enable) << 11
val |= (self.drs_signalling_control & 0x3) << 14
# Link status
val |= (self.current_link_speed & 0xf) << 16
val |= (self.negotiated_link_width & 0x3f) << 20
val |= bool(self.link_training) << 27
val |= bool(self.slot_clock_configuration) << 28
val |= bool(self.data_link_layer_link_active) << 29
val |= bool(self.link_bandwidth_management_status) << 30
val |= bool(self.link_autonomous_bandwidth_status) << 31
return val
elif reg == 5:
# Slot capabilities
val = bool(self.attention_button_present)
val |= bool(self.power_controller_present) << 1
val |= bool(self.mrl_sensor_present) << 2
val |= bool(self.attention_indicator_present) << 3
val |= bool(self.power_indicator_present) << 4
val |= bool(self.hot_plug_surprise) << 5
val |= bool(self.hot_plug_capable) << 6
val |= (self.slot_power_limit_value & 0xff) << 7
val |= (self.slot_power_limit_scale & 0x3) << 15
val |= bool(self.electromechanical_interlock_present) << 17
val |= bool(self.no_command_completed_support) << 18
val |= (self.physical_slot_number & 0x1fff) << 19
return val
elif reg == 6:
# Slot control
val = bool(self.attention_button_pressed_enable) << 0
val |= bool(self.power_fault_detected_enable) << 1
val |= bool(self.mrl_sensor_changed_enable) << 2
val |= bool(self.presence_detect_changed_enable) << 3
val |= bool(self.command_completed_interrupt_enable) << 4
val |= bool(self.hot_plug_interrupt_enable) << 5
val |= (self.attention_indicator_control & 0x3) << 6
val |= (self.power_indicator_control & 0x3) << 8
val |= bool(self.power_controller_control) << 10
val |= bool(self.electromechanical_interlock_control) << 11
val |= bool(self.data_link_layer_state_changed_enable) << 12
val |= bool(self.auto_slot_power_limit_disable) << 13
# Slot status
val |= bool(self.attention_button_pressed) << 16
val |= bool(self.power_fault_detected) << 17
val |= bool(self.mrl_sensor_changed) << 18
val |= bool(self.presence_detect_changed) << 19
val |= bool(self.command_completed) << 20
val |= bool(self.mrl_sensor_state) << 21
val |= bool(self.presence_detect_state) << 22
val |= bool(self.electromechanical_interlock_status) << 23
val |= bool(self.data_link_layer_state_changed) << 24
return val
elif reg == 7:
# Root control
val = bool(self.system_error_on_correctable_error_enable) << 0
val |= bool(self.system_error_on_non_fatal_error_enable) << 1
val |= bool(self.system_error_on_fatal_error_enable) << 2
val |= bool(self.pme_interrupt_enable) << 3
val |= bool(self.crs_software_visibility_enable) << 4
# Root capabilities
val |= bool(self.crs_software_visibility) << 16
return val
elif reg == 8:
# Root status
val = self.pme_requester_id & 0xffff
val |= bool(self.pme_status) << 16
val |= bool(self.pme_pending) << 17
return val
elif reg == 9:
# Device capabilities 2
val = self.completion_timeout_ranges_supported & 0xf
val |= bool(self.completion_timeout_disable_supported) << 4
val |= bool(self.ari_forwarding_supported) << 5
val |= bool(self.atomic_op_forwarding_supported) << 6
val |= bool(self.atomic_op_32_bit_completer_supported) << 7
val |= bool(self.atomic_op_64_bit_completer_supported) << 8
val |= bool(self.cas_128_bit_completer_supported) << 9
val |= bool(self.no_ro_enabled_pr_pr_passing) << 10
val |= bool(self.ltr_mechanism_supported) << 11
val |= (self.tph_completer_supported & 0x3) << 12
val |= (self.obff_supported & 0x3) << 18
val |= bool(self.extended_fmt_field_supported) << 20
val |= bool(self.end_end_tlp_prefix_supported) << 21
val |= (self.max_end_end_tlp_prefix & 0x3) << 22
val |= (self.emergency_power_reduction_supported & 0x3) << 24
val |= bool(self.emergency_power_reduction_initialization_required) << 26
val |= bool(self.frs_supported) << 31
return val
elif reg == 10:
# Device control 2
val = self.completion_timeout_value & 0xf
val |= bool(self.completion_timeout_disable) << 4
val |= bool(self.ari_forwarding_enable) << 5
val |= bool(self.atomic_op_requester_enable) << 6
val |= bool(self.atomic_op_egress_blocking) << 7
val |= bool(self.ido_request_enable) << 8
val |= bool(self.ido_completion_enable) << 9
val |= bool(self.ltr_mechanism_enable) << 10
val |= bool(self.emergency_power_reduction_request) << 11
val |= bool(self.ten_bit_tag_requester_enable) << 12
val |= (self.obff_enable & 0x3) << 13
val |= bool(self.end_end_tlp_prefix_blocking) << 15
# Device status 2
return val
elif reg == 11:
# Link capabilities 2
val = (self.supported_link_speeds & 0x7f) << 1
val |= bool(self.crosslink_supported) << 8
val |= (self.lower_skp_os_generation_supported_speeds & 0x7f) << 9
val |= (self.lower_skp_os_reception_supported_speeds & 0x7f) << 16
val |= bool(self.retimer_presence_detect_supported) << 23
val |= bool(self.two_retimers_presence_detect_supported) << 24
val |= bool(self.drs_supported) << 31
return val
elif reg == 12:
# Link control 2
val = self.target_link_speed & 0xf
val |= bool(self.enter_compliance) << 4
val |= bool(self.hardware_autonomous_speed_disable) << 5
val |= bool(self.selectable_deemphasis) << 6
val |= (self.transmit_margin & 0x7) << 7
val |= bool(self.enter_modified_compliance) << 10
val |= bool(self.compliance_sos) << 11
val |= (self.compliance_preset_deemphasis & 0xf) << 12
# Link status 2
val |= bool(self.current_deemphasis_level) << 16
val |= bool(self.equalization_8gt_complete) << 17
val |= bool(self.equalization_8gt_phase_1_successful) << 18
val |= bool(self.equalization_8gt_phase_2_successful) << 19
val |= bool(self.equalization_8gt_phase_3_successful) << 20
val |= bool(self.link_equalization_8gt_request) << 21
val |= bool(self.retimer_presence_detected) << 22
val |= bool(self.two_retimers_presence_detected) << 23
val |= (self.crosslink_resolution & 0x3) << 24
val |= (self.downstream_component_presence & 0x7) << 27
val |= bool(self.drs_message_received) << 31
return val
else:
return 0
async def _write_register(self, reg, data, mask):
if reg == 2:
# Device control
if mask & 0x1:
self.correctable_error_reporting_enable = bool(data & 1 << 0)
self.non_fatal_error_reporting_enable = bool(data & 1 << 1)
self.fatal_error_reporting_enable = bool(data & 1 << 2)
self.unsupported_request_reporting_enable = bool(data & 1 << 3)
self.enable_relaxed_ordering = bool(data & 1 << 4)
self.max_payload_size = (data >> 5) & 0x7
if mask & 0x2:
self.extended_tag_field_enable = bool(data & 1 << 8)
self.phantom_functions_enable = bool(data & 1 << 9)
self.aux_power_pm_enable = bool(data & 1 << 10)
self.enable_no_snoop = bool(data & 1 << 11)
self.max_read_request_size = (data >> 12) & 0x7
if data & 1 << 15:
await self.initiate_function_level_reset()
# Device status
if mask & 0x4:
if data & 1 << 16:
self.correctable_error_detected = False
if data & 1 << 17:
self.nonfatal_error_detected = False
if data & 1 << 18:
self.fatal_error_detected = False
if data & 1 << 19:
self.unsupported_request_detected = False
if data & 1 << 22:
self.emergency_power_reduction_detected = False
elif reg == 4:
# Link control
if mask & 0x1:
self.aspm_control = data & 3
self.read_completion_boundary = bool(data & 1 << 4)
if data & 1 << 5:
await self.initiate_retrain_link()
self.common_clock_configuration = bool(data & 1 << 6)
self.extended_synch = bool(data & 1 << 7)
if mask & 0x2:
self.enable_clock_power_management = bool(data & 1 << 8)
self.hardware_autonomous_width_disable = bool(data & 1 << 9)
self.link_bandwidth_management_interrupt_enable = bool(data & 1 << 10)
self.link_autonomous_bandwidth_interrupt_enable = bool(data & 1 << 11)
self.drs_signalling_control = (data >> 14) & 0x3
# Link status
if mask & 0x8:
if data & 1 << 30:
self.link_bandwidth_management_status = False
if data & 1 << 31:
self.link_autonomous_bandwidth_status = False
elif reg == 6:
# Slot control
if mask & 0x1:
self.attention_button_pressed_enable = bool(data & 1 << 0)
self.power_fault_detected_enable = bool(data & 1 << 1)
self.mrl_sensor_changed_enable = bool(data & 1 << 2)
self.presence_detect_changed_enable = bool(data & 1 << 3)
self.command_completed_interrupt_enable = bool(data & 1 << 4)
self.hot_plug_interrupt_enable = bool(data & 1 << 5)
self.attention_indicator_control = (data >> 6) & 0x3
if mask & 0x2:
self.power_indicator_control = (data >> 8) & 0x3
self.power_controller_control = bool(data & 1 << 10)
self.electromechanical_interlock_control = bool(data & 1 << 11)
self.data_link_layer_state_changed_enable = bool(data & 1 << 12)
self.auto_slot_power_limit_disable = bool(data & 1 << 13)
# Slot status
if mask & 0x4:
if data & 1 << 16:
self.attention_button_pressed = False
if data & 1 << 17:
self.power_fault_detected = False
if data & 1 << 18:
self.mrl_sensor_changed = False
if data & 1 << 19:
self.presence_detect_changed = False
if data & 1 << 20:
self.command_completed = False
if data & 1 << 24:
self.data_link_layer_state_changed = False
elif reg == 7:
# Root control
if mask & 0x1:
self.system_error_on_correctable_error_enable = bool(data & 1 << 0)
self.system_error_on_non_fatal_error_enable = bool(data & 1 << 1)
self.system_error_on_fatal_error_enable = bool(data & 1 << 2)
self.pme_interrupt_enable = bool(data & 1 << 3)
self.crs_software_visibility_enable = bool(data & 1 << 4)
elif reg == 8:
# Root status
if mask & 0x4:
if data & 1 << 16:
self.pme_status = False
elif reg == 10:
# Device control 2
if mask & 0x1:
self.completion_timeout_value = data & 0xf
self.completion_timeout_disable = bool(data & 1 << 4)
self.ari_forwarding_enable = bool(data & 1 << 5)
self.atomic_op_requester_enable = bool(data & 1 << 6)
self.atomic_op_egress_blocking = bool(data & 1 << 7)
if mask & 0x2:
self.ido_request_enable = bool(data & 1 << 8)
self.ido_completion_enable = bool(data & 1 << 9)
self.ltr_mechanism_enable = bool(data & 1 << 10)
self.emergency_power_reduction_request = bool(data & 1 << 11)
self.ten_bit_tag_requester_enable = bool(data & 1 << 12)
self.obff_enable = (data >> 13) & 0x3
self.end_end_tlp_prefix_blocking = bool(data & 1 << 15)
# Device status 2
elif reg == 12:
# Link control 2
if mask & 0x1:
self.target_link_speed = data & 0xf
self.enter_compliance = bool(data & 1 << 4)
self.hardware_autonomous_speed_disable = bool(data & 1 << 5)
self.transmit_margin = self.transmit_margin & 0x6 | (data >> 7) & 0x1
if mask & 0x2:
self.transmit_margin = self.transmit_margin & 0x1 | (data >> 7) & 0x6
self.enter_modified_compliance = bool(data & 1 << 10)
self.compliance_sos = bool(data & 1 << 11)
self.compliance_preset_deemphasis = (data >> 12) & 0xff
# Link status 2
if mask & 0x4:
self.link_equalization_8gt_request = bool(data & 1 << 21)
if data & 1 << 31:
self.drs_message_received = False
async def initiate_function_level_reset(self):
pass
async def initiate_retrain_link(self):
pass
class PcieExtendedCapability(PcieExtCap):
"""Secondary PCI Express extended capability"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cap_id = PcieExtCapId.EXP2
self.cap_ver = 1
self.length = 4
# Secondary PCIe extended capability registers
# Link control 3 register
self.perform_equalization = False
self.link_equalization_request_interrupt_enable = False
self.enable_lower_skp_os_generation = 0
# Lane error status
self.lane_error_status = 0
self.downstream_port_8gt_transmitter_preset = [0]*32
self.downstream_port_8gt_receiver_preset_hint = [0]*32
self.upstream_port_8gt_transmitter_preset = [0]*32
self.upstream_port_8gt_receiver_preset_hint = [0]*32
"""
Secondary PCIe Extended Capability
31 0
+-------------------------+-------+---------------------------------+
| Next Cap Offset | Ver | PCIe Ext Cap ID | 0 0x00
+-------------------------+-------+---------------------------------+
| Link Control 3 | 1 0x04
+-------------------------------------------------------------------+
| Lane Error Status | 2 0x08
+-------------------------------------------------------------------+
| Lane Equalization Control | 3 0x0C
+-------------------------------------------------------------------+
"""
async def _read_register(self, reg):
if reg == 1:
# Link Control 3
val = bool(self.perform_equalization)
val |= bool(self.link_equalization_request_interrupt_enable) << 1
val |= (self.enable_lower_skp_os_generation & 0x7f) << 9
return val
elif reg == 2:
# Lane Error Status
return self.lane_error_status & 0xffffffff
elif reg < 18:
# Lane equalization control
val = self.downstream_port_8gt_transmitter_preset[(reg-2)*2] & 0xf
val |= (self.downstream_port_8gt_receiver_preset_hint[(reg-2)*2] & 0x7) << 4
val |= (self.upstream_port_8gt_transmitter_preset[(reg-2)*2] & 0xf) << 8
val |= (self.upstream_port_8gt_receiver_preset_hint[(reg-2)*2] & 0x7) << 12
val |= (self.downstream_port_8gt_transmitter_preset[(reg-2)*2+1] & 0xf) << 16
val |= (self.downstream_port_8gt_receiver_preset_hint[(reg-2)*2+1] & 0x7) << 20
val |= (self.upstream_port_8gt_transmitter_preset[(reg-2)*2+1] & 0xf) << 24
val |= (self.upstream_port_8gt_receiver_preset_hint[(reg-2)*2+1] & 0x7) << 28
return val
else:
return 0
async def _write_register(self, reg, data, mask):
if reg == 1:
# Link Control 3
if mask & 0x1:
self.perform_equalization = bool(data & 1 << 0)
self.link_equalization_request_interrupt_enable = bool(data & 1 << 1)
if mask & 0x2:
self.enable_lower_skp_os_generation = (data >> 9) & 0x7f
elif reg == 2:
# Lane Error Status
self.lane_error_status = byte_mask_update(self.lane_error_status, mask, self.lane_error_status & ~data) & 0xffffffff
| [
"[email protected]"
] | |
6321a92b239f65d266baddd409d0323cffabd579 | 89b6997b24e404c176358073626a8bfad7bcdb8e | /.history/courses/models_20210412233123.py | f22ac53d1746d99cb174542b2a8b300401606f5f | [] | no_license | mohamedhawas123/Education-platform-django | 513e64ac112880385402ce609077796578b4e9ee | 7b83e66bba66b8b2b1a007f5818a534653e6abfb | refs/heads/main | 2023-07-18T16:19:52.177886 | 2021-09-24T12:04:09 | 2021-09-24T12:04:09 | 352,306,462 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,324 | py | from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
from .fields import OrderField
class Subject(models.Model):
title = models.CharField(max_length=200)
slug = models.SlugField(max_length=200, unique=True)
class Meta:
ordering = ('title', )
def __str__(self):
return self.title
class Course(models.Model):
owner = models.ForeignKey(User,related_name='courses_created' ,on_delete=models.CASCADE)
subject = models.ForeignKey(Subject, related_name='courses', on_delete=models.CASCADE)
title = models.CharField(max_length=200)
slug= models.SlugField(max_length=200, unique=True)
overView = models.TextField()
created = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ('-created', )
def __str__(self):
return self.title
class Module(models.Model):
Course = models.ForeignKey(Course, related_name='moduels', on_delete=models.CASCADE)
title = models.CharField(max_length=254)
description = models.TextField(blank=True)
order = OrderField(blank=True, for_fields=['Course'])
class Content(models.Model):
moduel = models.ForeignKey(Module, related_name='contents', on_delete=models.CASCADE)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, limit_choices_to={
'model__in': (
'text',
'video',
'image',
'file'
)
})
object_id = models.PositiveIntegerField()
item = GenericForeignKey('content_type', 'object_id')
order = OrderField(blank=True, for_fields=['moduel'])
class ItemBase(models.Model):
owner = models.ForeignKey(User, related_name='%(class)s_related', on_delete=models.CASCADE)
title = models.CharField(max_length=250)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Text(ItemBase):
content = models.TextField()
class File(ItemBase):
file = models.FileField(upload_to='files')
class Image(ItemBase):
file = models.FileField(upload_to='images')
class Video(ItemBase):
url = models.URLField() | [
"[email protected]"
] | |
a887c48538b070bc949cb678b712fd5ea562c029 | e0980f704a573894350e285f66f4cf390837238e | /.history/streams/blocks_20201029145758.py | 3227cd805384affb4014e12555cfee402b57bfa8 | [] | no_license | rucpata/WagtailWebsite | 28008474ec779d12ef43bceb61827168274a8b61 | 5aa44f51592f49c9a708fc5515ad877c6a29dfd9 | refs/heads/main | 2023-02-09T15:30:02.133415 | 2021-01-05T14:55:45 | 2021-01-05T14:55:45 | 303,961,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,497 | py | from django import forms
from wagtail.core import blocks
from wagtail.images.blocks import ImageChooserBlock
from wagtail.contrib.table_block.blocks import TableBlock
class TitleBlock(blocks.StructBlock):
text = blocks.CharBlock(
required = True,
elp_text='Tekst do wyświetlenia',
)
class Meta:
template = 'streams/title_block.html'
icon = 'edycja'
label = 'Tytuł'
help_text = 'Wyśrodkowany tekst do wyświetlenia na stronie.'
class LinkValue(blocks.StructValue):
"""Dodatkowao logika dla lików"""
def url(self) -> str:
internal_page = self.get('internal_page')
external_link = self.get('external_link')
if internal_page:
return internal_page.url
elif external_link:
return external_link
return ''
class Link(blocks.StructBlock):
link_text = blocks.CharBlock(
max_length=50,
default='Więcej szczegółów'
)
internal_page = blocks.PageChooserBlock(
required=False
)
external_link = blocks.URLBlock(
required=False
)
class Meta:
value_class = LinkValue
class Card(blocks.StructBlock):
title = blocks.CharBlock(
max_length=100,
help_text = 'Pogrubiony tytuł tej karty. Maksymalnie 100 znaków.'
)
text = blocks.TextBlock(
max_length=255,
help_text='Opcjonalny tekst tej karty. Maksymalnie 255 znaków.'
)
image = ImageChooserBlock(
help_text = 'Obraz zostanie automatycznie przycięty o 570 na 370 pikseli'
)
link = Link(help_text = 'Wwybierz link')
class CardsBlock(blocks.StructBlock):
cards = blocks.ListBlock(
Card()
)
class Meta:
template = 'streams/card_block.html'
icon = 'image'
label = 'Karty standardowe'
class RadioSelectBlock(blocks.ChoiceBlock):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.field.widget = forms.RadioSelect(
choices=self.field.widget.choices
)
class ImageAndTextBlock(blocks.StructBlock):
image = ImageChooserBlock(help_text='Obraz automatycznie przycięty do rozmiaru 786 na 552 px.')
image_alignment = RadioSelectBlock(
choices = (
('left','Opraz po lewej stronie'),
('right', 'Obraz po prawej stronie'),
),
default = 'left',
help_text = 'Obraz po lewej stronie, tekst po prawej lub obraz po prawej stronie tekst po lewej.'
)
title = blocks.CharBlock(
max_length=60,
help_text='Maksymalna długość 60 znaków.'
)
text = blocks.CharBlock(
max_length = 140,
required = False,
)
link = Link()
class Meta:
template = 'streams/image_and_text_block.html'
icon = 'image'
label = 'Obraz & Tekst'
class CallToActionBlock(blocks.StructBlock):
title =blocks.CharBlock(
max_length = 200,
help_text = 'Maksymalnie 200 znaków.'
)
link = Link()
class Meta:
template = 'streams/call_to_action_block.html'
icon = 'plus'
label = 'Wezwanie do działania'
class PricingTableBlock(TableBlock):
"""Blok tabeli cen."""
class Meta:
template = 'streams/pricing_table_block.html'
label = 'Tabela cen'
icon = 'table'
help_text = 'Twoje tabele z cenami powinny zawierać zawsze 4 kolumny.'
class RichTextWithTitleBlock(blocks) | [
"[email protected]"
] | |
6a10fdec032287788f43ac694db394d334627b95 | cd8f7ecd20c58ce1ae0fe3840f7c7ee961aa5819 | /Find Duplicate File in System.py | ac5ae45decb6dd46daedbe7378173039d67c8773 | [
"Apache-2.0"
] | permissive | sugia/leetcode | 9b0f2a3521b088f8f7e5633c2c6c17c76d33dcaf | 6facec2a54d1d9f133f420c9bce1d1043f57ebc6 | refs/heads/master | 2021-06-05T07:20:04.099488 | 2021-02-24T07:24:50 | 2021-02-24T07:24:50 | 29,124,136 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,859 | py | '''
Given a list of directory info including directory path, and all the files with contents in this directory, you need to find out all the groups of duplicate files in the file system in terms of their paths.
A group of duplicate files consists of at least two files that have exactly the same content.
A single directory info string in the input list has the following format:
"root/d1/d2/.../dm f1.txt(f1_content) f2.txt(f2_content) ... fn.txt(fn_content)"
It means there are n files (f1.txt, f2.txt ... fn.txt with content f1_content, f2_content ... fn_content, respectively) in directory root/d1/d2/.../dm. Note that n >= 1 and m >= 0. If m = 0, it means the directory is just the root directory.
The output is a list of group of duplicate file paths. For each group, it contains all the file paths of the files that have the same content. A file path is a string that has the following format:
"directory_path/file_name.txt"
Example 1:
Input:
["root/a 1.txt(abcd) 2.txt(efgh)", "root/c 3.txt(abcd)", "root/c/d 4.txt(efgh)", "root 4.txt(efgh)"]
Output:
[["root/a/2.txt","root/c/d/4.txt","root/4.txt"],["root/a/1.txt","root/c/3.txt"]]
Note:
No order is required for the final output.
You may assume the directory name, file name and file content only has letters and digits, and the length of file content is in the range of [1,50].
The number of files given is in the range of [1,20000].
You may assume no files or directories share the same name in the same directory.
You may assume each given directory info represents a unique directory. Directory path and file info are separated by a single blank space.
Follow-up beyond contest:
Imagine you are given a real file system, how will you search files? DFS or BFS?
If the file content is very large (GB level), how will you modify your solution?
If you can only read the file by 1kb each time, how will you modify your solution?
What is the time complexity of your modified solution? What is the most time-consuming part and memory consuming part of it? How to optimize?
How to make sure the duplicated files you find are not false positive?
'''
class Solution(object):
def findDuplicate(self, paths):
"""
:type paths: List[str]
:rtype: List[List[str]]
"""
# key = content (abcd), value = [file path1, file path2]
dic = {}
for path in paths:
tmp = path.split(' ')
for i in xrange(1, len(tmp)):
name, content = tmp[i].split('(')
if content in dic:
dic[content].append('/'.join([tmp[0], name]))
else:
dic[content] = ['/'.join([tmp[0], name])]
res = []
for k, v in dic.iteritems():
if len(v) > 1:
res.append(v)
return res
| [
"[email protected]"
] | |
b18a2b8e878ac6dab0ef153d54eb8846e3615e8d | 1424812c4f211d3d5e356e8b3889a689162062f3 | /arcade/python/62_check_participants.py | f8957ee5841a7be2c906e5af02edf03edd07382b | [] | no_license | nazomeku/codefights | cb7d3c40be0809695ec524a87c88dbebcf5b47bc | b23f6816f9b5b0720feac1c49c31163923e0a554 | refs/heads/master | 2021-01-22T12:49:35.905165 | 2017-11-21T19:03:37 | 2017-11-21T19:03:37 | 102,357,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 215 | py | """Given the list of participants, your task is to return the list of
games for which too few people signed up."""
def check_participants(participants):
return [a for a, b in enumerate(participants) if a > b]
| [
"[email protected]"
] | |
fce324c6496b5a5fd24b5262554147cad286f36a | 3efe2059de4c7efd1f58a385656d19098b7efd63 | /deepiu/tools/ensemble-inference-v2.py | 436205085a0efdf481c2d9609e180ad472bc2c8b | [] | no_license | yangyaoyunshu/image-caption-ai-challenger2017 | 5d2e82b2f8d70ac6d4eb7a0e70f6b406e551189b | 7f2c556587ea1e5c4583fe3b12b8d40c5a2aa2cc | refs/heads/master | 2021-08-31T22:48:55.886186 | 2017-12-23T07:27:04 | 2017-12-23T07:27:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,927 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ==============================================================================
# \file ensemble-inference-v2.py
# \author chenghuige
# \date 2017-10-21 14:56:40.017795
# \Description This is time cosuming 1hour and 12 mintues, and not performance
# better so just use ensemble-inference.py will be fine
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_string('image_dir_', '/home/gezi/data2/data/ai_challenger/image_caption/pic', '')
flags.DEFINE_string('vocab', '/home/gezi/new/temp/image-caption/ai-challenger/tfrecord/seq-basic/vocab.txt', '')
flags.DEFINE_float('current_length_normalization_factor', None, '')
flags.DEFINE_float('length_normalization_fator', None, '')
import sys, os
import glob
import operator
import melt, gezi
from deepiu.util import text2ids
from deepiu.util.text2ids import texts2ids
from deepiu.util.text_predictor import TextPredictor
import numpy as np
input = sys.argv[1]
type = sys.argv[2]
text2ids.init()
if ',' in input:
files = input.split(',')
else:
files = glob.glob(input + '/model*.%s.txt' % type)
if not 'ensemble' in type:
files = [x for x in files if not 'ensemble' in x]
dir = os.path.dirname(files[0])
ensemble_input_file = 'ensemble.%s.txt' % type
print('files:', files, 'len(files)', len(files), file=sys.stderr)
print('ensemble input file:', ensemble_input_file, file=sys.stderr)
batch_size = int(sys.argv[3])
num_imgs_done = 0
def _predict(predictor, imgs, texts_list, m):
global num_imgs_done
raw_imgs = [melt.read_image(os.path.join(FLAGS.image_dir_, img + '.jpg')) for img in imgs]
text_ids_list = [texts2ids(texts) for texts in texts_list]
raw_imgs = np.array(raw_imgs)
text_ids_list = np.array(text_ids_list)
print([len(x) for x in text_ids_list], sum([len(x) for x in text_ids_list]), \
'num_imgs_done', num_imgs_done, file=sys.stderr)
scores_list = predictor.bulk_predict(raw_imgs, text_ids_list)
if num_imgs_done == 0:
print(scores_list.shape, scores_list, file=sys.stderr)
for img, texts, scores in zip(imgs, texts_list, scores_list):
for text, score in zip(texts, scores):
m[img][text] = score
num_imgs_done += batch_size
def predict(predictor, imgs, texts, m):
batch_imgs = []
batch_texts = []
for img, text in zip(imgs, texts):
batch_imgs.append(img)
batch_texts.append(text)
if len(batch_imgs) == batch_size:
_predict(predictor, batch_imgs, batch_texts, m)
batch_imgs = []
batch_texts = []
if batch_imgs:
_predict(predictor, batch_imgs, batch_texts, m)
candidates = {}
for line in open(ensemble_input_file):
l = line.strip().split('\t')
img, texts = l[0], l[-2]
texts = texts.split(' ')
candidates[img] = texts
for file in files:
model_dir = file.replace('.%s.txt'%type, '')
ofile = os.path.join(dir, '%s.ensemble.%s.txt' % (model_dir, type))
print('model_dir:', model_dir, 'ofile:', ofile)
if gezi.non_empty(ofile):
continue
out = open(ofile, 'w')
Predictor = TextPredictor
image_model = None
image_checkpoint_file = FLAGS.image_checkpoint_file or '/home/gezi/data/image_model_check_point/inception_resnet_v2_2016_08_30.ckpt'
image_model_name = melt.image.get_imagenet_from_checkpoint(image_checkpoint_file).name
print('image_model_name:', image_model_name)
if not melt.has_image_model(model_dir, image_model_name):
image_model = melt.image.ImageModel(image_checkpoint_file, image_model_name)
print('image_model:', image_model, file=sys.stderr)
predictor = Predictor(model_dir, image_model=image_model, vocab_path=FLAGS.vocab,
current_length_normalization_factor=FLAGS.current_length_normalization_factor,
length_normalization_fator=FLAGS.length_normalization_fator)
#predictor = None
m = {}
for line in open(file):
l = line.strip().split('\t')
img, texts , scores = l[0], l[-2], l[-1]
if img not in m:
m[img] = {}
texts = texts.split(' ')
scores = map(float, scores.split(' '))
for text, score in zip(texts, scores):
m[img][text] = score
imgs_tocalc = []
texts_tocalc = []
for img, texts in candidates.items():
texts_ = [x for x in texts if x not in m[img]]
if texts_:
imgs_tocalc.append(img)
texts_tocalc.append(texts_)
predict(predictor, imgs_tocalc, texts_tocalc, m)
for img, result in m.items():
sorted_result = sorted(result.items(), key=operator.itemgetter(1), reverse=True)
texts = []
scores = []
for text, score in sorted_result:
texts.append(text)
scores.append(str(score))
texts = ' '.join(texts)
scores = ' '.join(scores)
print(img, sorted_result[0][0], sorted_result[0][1], texts, scores, sep='\t', file=out) | [
"[email protected]"
] | |
8ab8e5f98c41b9fb41c80d47225b946e72e9c11b | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startPyquil365.py | 7bd3571d7cf4002ea5c98aabb057ee26101e6441 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,533 | py | # qubit number=2
# total number=67
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += RX(-0.09738937226128368,2) # number=2
prog += H(1) # number=33
prog += Y(2) # number=56
prog += CZ(2,1) # number=34
prog += H(1) # number=35
prog += H(1) # number=3
prog += H(0) # number=45
prog += CNOT(2,1) # number=60
prog += CZ(1,0) # number=46
prog += H(0) # number=47
prog += Y(1) # number=15
prog += H(0) # number=64
prog += CZ(1,0) # number=65
prog += H(0) # number=66
prog += H(1) # number=19
prog += CZ(0,1) # number=20
prog += RX(-0.6000441968356504,1) # number=28
prog += H(1) # number=21
prog += H(1) # number=30
prog += CZ(0,1) # number=31
prog += H(1) # number=32
prog += H(1) # number=57
prog += CZ(0,1) # number=58
prog += H(1) # number=59
prog += CNOT(0,1) # number=51
prog += X(1) # number=52
prog += CNOT(0,1) # number=53
prog += CNOT(0,1) # number=50
prog += H(2) # number=29
prog += H(1) # number=36
prog += CZ(0,1) # number=37
prog += Y(2) # number=44
prog += H(1) # number=38
prog += Z(1) # number=55
prog += H(1) # number=61
prog += CZ(0,1) # number=62
prog += H(1) # number=63
prog += Z(1) # number=11
prog += RX(-1.1780972450961724,2) # number=54
prog += H(1) # number=42
prog += H(0) # number=39
prog += CZ(1,0) # number=40
prog += H(0) # number=41
prog += CNOT(2,1) # number=26
prog += Y(1) # number=14
prog += CNOT(1,0) # number=5
prog += X(1) # number=6
prog += Z(1) # number=8
prog += X(1) # number=7
prog += H(2) # number=43
prog += RX(-2.42845112122491,1) # number=25
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('1q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil365.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| [
"[email protected]"
] | |
24d0293db4c4da28e8dcbdb081b1ca13f6d8bde4 | 55e28e35db5bf6a844df3fb47080500b115a893e | /day6/test/fan2.py | d15faad00febdceb92f9769a970ee29416ca85f6 | [] | no_license | pylarva/Python | 5743ffa4a69db42b642d51b62f9e9b69ddbc1a72 | 71b484950e6dbdcf708726a68a3386d0d6ddc07f | refs/heads/master | 2020-04-19T09:11:11.195393 | 2017-11-16T07:32:59 | 2017-11-16T07:32:59 | 67,507,687 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 447 | py | # !/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:lichengbing
# import fan1
#
# user_input = input('输入编号:')
# if hasattr(fan1, user_input):
# func = getattr(fan1, user_input)
# func()
# else:
# print('no module...')
user_input = input('请输入URL:')
k, v = user_input.split('/')
obj = __import__('lib.' + k, fromlist=True)
if hasattr(obj, v):
func = getattr(obj, v)
func()
else:
print('no module...')
| [
"[email protected]"
] | |
3c21849b572848e17c219d6d9115dda9eaf3d56e | d66818f4b951943553826a5f64413e90120e1fae | /hackerrank/Algorithms/Cut the Tree/test.py | 71dbf8141286bc70cf3538beaf3323dcc9737296 | [
"MIT"
] | permissive | HBinhCT/Q-project | 0f80cd15c9945c43e2e17072416ddb6e4745e7fa | 19923cbaa3c83c670527899ece5c3ad31bcebe65 | refs/heads/master | 2023-08-30T08:59:16.006567 | 2023-08-29T15:30:21 | 2023-08-29T15:30:21 | 247,630,603 | 8 | 1 | MIT | 2020-07-22T01:20:23 | 2020-03-16T06:48:02 | Python | UTF-8 | Python | false | false | 796 | py | import unittest
import solution
class TestQ(unittest.TestCase):
def test_case_0(self):
self.assertEqual(solution.cutTheTree(
[100, 200, 100, 500, 100, 600],
[
[1, 2],
[2, 3],
[2, 5],
[4, 5],
[5, 6],
]
), 400)
def test_case_1(self):
self.assertEqual(solution.cutTheTree(
[205, 573, 985, 242, 830, 514, 592, 263, 142, 915],
[
[2, 8],
[10, 5],
[1, 7],
[6, 9],
[4, 3],
[8, 10],
[5, 1],
[7, 6],
[9, 4],
]
), 99)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
19280aa9b5205cfb4b3e115b73029117af55a39a | f524c132340474bf80784095321753e083a2563a | /GearBot/Util/GearbotLogging.py | 231080a511b781af383ff0de23f0818d6375e2c1 | [
"MIT"
] | permissive | JohnyTheCarrot/GearBot | 51bf4f1e641908725ab70ccc766e861b7ec0af23 | 8a32bfc79f997a154c9abccbf6742a79fc5257b0 | refs/heads/master | 2020-04-10T17:03:22.403295 | 2018-12-10T08:14:05 | 2018-12-10T08:14:05 | 161,163,866 | 0 | 0 | MIT | 2018-12-10T11:24:11 | 2018-12-10T11:24:11 | null | UTF-8 | Python | false | false | 8,957 | py | import asyncio
import logging
import os
import sys
import traceback
from datetime import datetime
from logging.handlers import TimedRotatingFileHandler
import discord
from discord.ext import commands
from Util import Configuration, GlobalHandlers, Utils, Translator, Emoji
LOGGER = logging.getLogger('gearbot')
DISCORD_LOGGER = logging.getLogger('discord')
BOT_LOG_CHANNEL: discord.TextChannel
STARTUP_ERRORS = []
BOT: commands.AutoShardedBot = None
LOG_PUMP = None
LOG_ERRORS = 0
def init_logger():
LOGGER.setLevel(logging.DEBUG)
DISCORD_LOGGER.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s')
handler = logging.StreamHandler(stream=sys.stdout)
handler.setLevel(logging.INFO)
handler.setFormatter(formatter)
LOGGER.addHandler(handler)
DISCORD_LOGGER.addHandler(handler)
if not os.path.isdir("logs"):
os.mkdir("logs")
handler = TimedRotatingFileHandler(filename='logs/gearbot.log', encoding='utf-8', when="midnight", backupCount=30)
handler.setFormatter(formatter)
handler.setLevel(logging.INFO)
DISCORD_LOGGER.addHandler(handler)
LOGGER.addHandler(handler)
# handler = TimedRotatingFileHandler(filename='logs/discord.log', encoding='utf-8', when="h", interval=1, backupCount=24)
# DISCORD_LOGGER.addHandler(handler)
async def onReady(bot: commands.Bot, channelID):
global BOT_LOG_CHANNEL, BOT, STARTUP_ERRORS, LOG_PUMP
BOT = bot
BOT_LOG_CHANNEL = bot.get_channel(int(channelID))
if BOT_LOG_CHANNEL is None:
LOGGER.error(
"==========================Logging channel is misconfigured, aborting startup!==========================")
await bot.logout()
if len(STARTUP_ERRORS) > 0:
await bot_log(
f":rotating_light: Caught {len(STARTUP_ERRORS)} {'exceptions' if len(STARTUP_ERRORS) > 1 else 'exception'} during startup.")
for e in STARTUP_ERRORS:
await e
STARTUP_ERRORS = []
def initialize_pump(bot):
global LOG_PUMP
LOG_PUMP = LogPump(bot)
bot.loop.create_task(LOG_PUMP.pump())
def debug(message):
LOGGER.debug(message)
def info(message):
LOGGER.info(message)
def warn(message):
LOGGER.warning(message)
def error(message):
LOGGER.error(message)
def exception(message, error):
LOGGER.error(message)
trace = ""
LOGGER.error(str(error))
for line in traceback.format_tb(error.__traceback__):
trace = f"{trace}\n{line}"
LOGGER.error(trace)
async def bot_log(message=None, embed=None):
if BOT_LOG_CHANNEL is not None:
return await BOT_LOG_CHANNEL.send(content=message, embed=embed)
else:
STARTUP_ERRORS.append(bot_log(message, embed))
def log_to(guild_id, type, message=None, embed=None, file=None, can_stamp=True, cleaner=None, tag_on=None):
remaining = None
if message is None and embed is None and file is None:
raise ValueError("What the heck is trying to log nothing?")
if can_stamp and Configuration.get_var(guild_id, "TIMESTAMPS"):
stamp = f"[`{datetime.strftime(datetime.now(), '%H:%M:%S')}`]"
if message is None:
message = stamp
else:
message = f"{stamp} {Utils.trim_message(message, 1985)}"
if tag_on is not None:
if message is None:
message = tag_on
else:
if len(message) + len(tag_on) < 1999:
message = f"{message} {tag_on}"
else:
remaining = tag_on
message = Utils.trim_message(f"{message}\u200b", 1998)
channels = Configuration.get_var(guild_id, "LOG_CHANNELS")
pushed_cleaner = False
for cid, info in channels.items():
if type in info:
if remaining is None:
LOG_PUMP.receive(cid, (message, embed, file, cleaner if not pushed_cleaner else None))
else:
LOG_PUMP.receive(cid, (message, None, None, None))
LOG_PUMP.receive(cid, (tag_on, embed, file, cleaner if not pushed_cleaner else None))
pushed_cleaner = True
async def send_to(destination, emoji, message, delete_after=None, translate=True, **kwargs):
translated = Translator.translate(message, destination.guild, **kwargs) if translate else message
return await destination.send(f"{Emoji.get_chat_emoji(emoji)} {translated}", delete_after=delete_after)
async def message_owner(bot, message):
if bot.owner_id is None:
app = await bot.application_info()
bot.owner_id = app.owner.id
owner = bot.get_user(bot.owner_id)
await owner.send(message)
class LogPump:
def __init__(self, bot):
self.todo = dict()
self.running = True
self.bot = bot
self.NUKED = False
info("Starting log pump")
async def pump(self):
info("Log pump engaged")
empty = []
embed = file = cid = todo = to_send = None
while (self.running or len(self.todo) > 0) and not self.NUKED:
try:
cleaners = []
empty = []
senders = []
embed = file = None
for cid, todo in self.todo.items():
channel = BOT.get_channel(int(cid))
if channel is not None and len(todo) > 0:
permissions = channel.permissions_for(channel.guild.me)
to_send = ""
while len(todo) > 0:
message, embed, file, cleaner = todo[0]
if message is None or message.strip() == "":
message = ""
if (not permissions.send_messages) or (
embed is not None and not permissions.embed_links) or (
file is not None and not permissions.attach_files):
todo.pop(0)
if cleaner is not None:
cleaners.append(cleaner)
continue
elif len(to_send) + len(message) <= 1999:
to_send += f"{message}\n"
todo.pop(0)
if cleaner is not None:
cleaners.append(cleaner)
else:
break
if embed is not None or file is not None:
break
try:
senders.append(channel.send(to_send if to_send != "" else None, embed=embed, file=file))
except Exception as e:
await GlobalHandlers.handle_exception("LOG PUMP", BOT, e,
cid=cid, todo=todo, to_send=to_send,
LOG_CACHE=self.todo, embed=embed,
file=file, empty=empty)
else:
empty.append(cid)
for e in empty:
del self.todo[e]
for s in senders:
try:
await s
except discord.Forbidden:
pass
except Exception as e:
await log_error()
await GlobalHandlers.handle_exception("LOG PUMP", BOT, e,
cid=cid, todo=todo, to_send=to_send,
LOG_CACHE=self.todo, embed=embed, file=file,
empty=empty)
for c in cleaners:
c()
await asyncio.sleep(0.1)
except Exception as e:
await log_error()
await GlobalHandlers.handle_exception("LOG PUMP", BOT, e,
cid=cid, todo=todo, to_send=to_send,
LOG_CACHE=self.todo, embed=embed, file=file,
empty=empty)
info("Log pump terminated")
def receive(self, cid, data):
if cid not in self.todo:
self.todo[cid] = []
self.todo[cid].append(data)
async def log_error():
global LOG_ERRORS, LOG_PUMP
LOG_ERRORS += 1
if LOG_ERRORS >= 10:
LOG_ERRORS = 0
error("=========Log pump error limit reached, deploying nuke to unclog the system=========")
LOG_PUMP.NUKED = True
initialize_pump(BOT)
await bot_log("Log pump got clogged, nuked and restarted, moving on")
| [
"[email protected]"
] | |
0506fa6a486ad43932a07600c4d00d1ddc5a40b9 | 7bd9be7f25be80791f9220b62025f06170273293 | /end-plugins/pycerebro/examples/create_tasks.py | ed3f5c9a50d7b348f9c12a77fad2a31adcebc5f9 | [] | no_license | cerebrohq/cerebro-plugins | ab46b4844adcb12c51d14e21f2c0d8b758b0bb57 | e2e0f97b548ef22957e13d614200027ba89215e0 | refs/heads/master | 2021-11-12T16:25:48.228521 | 2021-10-22T11:25:58 | 2021-10-22T11:25:58 | 143,178,631 | 5 | 3 | null | null | null | null | UTF-8 | Python | false | false | 14,092 | py | # -*- coding: utf-8 -*-
"""
Примеры создания задач.
Этот модуль демонстритует, как можно устанавливать соединение с базой данных,
создавать задачи и связывать их между собой без использования графического интерфейса Cerebro.
Также в модуле продемонстрировано создание сообшений и прикладывание к ним файлов.
Модуль использует пакет pycerebro (для Python 3.x), который входит в дистрибутив service-tools (http://cerebrohq.com/distribs/service-tools.zip).
Так же модуль pycerebro доступен на GitHub (https://github.com/cerebrohq/cerebro-plugins/tree/master/end-plugins/pycerebro)
Пакет pycerebro содержит модули для установки соединения с базой данных
и для доступа к файловому хранилищу(Cargador).
Пакет pycerebro использует сторонние модули requests и iso8601, которые используются для работы с базой данных.
Возможно вам придется дополнительно установить эти пакеты.
Модуль содержит функции:
create_and_link_tasks - пример создания задач и установления между ними связей.
make_thumnails - для генерации эскизов к видео файлам и изображениям
"""
import fnmatch
import sys
import os
import subprocess
import datetime
local_dir = os.path.realpath(__file__).replace('\\', '/').rsplit('/', 1)[0]
backend_dir = local_dir + '/../..'
sys.path.append(backend_dir)
from pycerebro import database, dbtypes, cargador # в модуле dbtypes описаны различные константы, такие как поля данных, флаги и т.п.
# Переменные, которые вам возможно придется изменить, чтобы преспособить скpипт для вашей сети
cargador_host = 'ss' # Cетевой адрес машины, где работает севрис каргадор.
# Может быть задано сетевое имя или IP адрес. 'ss' - это имя нашего сервера, у вас этот параметр скорее всего будет иным.
cargador_xmlrpc_port = 4040 # Порт 4040 - это порт для запросов по xmlrpc протоколу.
#У вас порт может быть иным, подробнее об этом смотрите в комментариях модуля cargador пакета pycerebro.
cargador_http_port = 4080 # Порт 4080 - это порт для запросов по http протоколу.
#У вас порт может быть иным, подробнее об этом смотрите в комментариях модуля cargador пакета pycerebro.
project_name = 'Test project' # Имя проекта для тестового добавления задач.
#Вы можете выбрать любой свой проект
mirada_path = '//ss/front/cerebro/mirada.exe' # Путь, откуда запускать мираду для генерации эскизов.
#У вас этот параметр скорее всего будет иным. Подробнее смотрите в функции
def create_and_link_tasks(db_user, db_password):
"""
db_user и db_password это логин и пароль пользователя Cerebro
В этом примере мы создадим в проекте задачу и две подзадачи.
У задачи выставим время начала, у подзадач запланируем время исполнения и свяжем их между собой.
Также мы создадим у подзадач сообщения типа постановка задачи и приложим к ним файлы.
Мы не будем в этом примере самостоятельно писать sql-запросы, а воспольуемся функцями класса database.Database,
которые по сути являются обертками над sql-запросами.
Описание всех функций смотрите в модуле database пакета pycerebro.
Пример вызова функции:
::
import create_tasks
create_tasks.create_and_link_tasks('user', 'password')
::
"""
def find(f, seq):
# поиск объектов в списке
for item in seq:
if f(item):
return item
try:
db = database.Database()
# Устанавливаем соединение с базой данных
if db.connect_from_cerebro_client() != 0: # пробуем установить соединение с помощью запущенного клиента Cerebro.
# Если не выходит, устанавливаем с помощью логина и пароля
db.connect(db_user, db_password)
root_tasks = db.root_tasks() # Получаем список корневых задач проектов.
# Ищем нужную корневую задачу проекта в который и будем добовлять задачи
root_task = find(lambda val: val[dbtypes.TASK_DATA_NAME] == project_name, root_tasks)
# Создаем задачу в проекте
new_task_id = db.add_task(root_task[dbtypes.TASK_DATA_ID], 'New Test Task')
"""
Функция add_task принимает на вход два агрумента:
- идентификатор родительской задачи, в данном случаи идентификатор корневой задачи проекта
- имя задачи, Будте внимательны имя задачи имеет ограничения.
Подробнее о них смотрите в описании функции add_task.
Результат функции - идентификатор новой задачи.
"""
# Устанавливаем время начала задачи в теушее время
"""
Время начала задачи устанавливается в днях от 01.01.2000 в UTC
Подробнее о этом смотрите в описании функции task_set_start.
"""
datetime_now = datetime.datetime.utcnow()
datetime_2000 = datetime.datetime(2000, 1, 1)
timedelta = datetime_now - datetime_2000
days = timedelta.total_seconds()/(24*60*60)
db.task_set_start(new_task_id, days)
# Создаем две подзадачи к новой задаче
new_subtask_id_1 = db.add_task(new_task_id, 'New Test Subtask 1')
new_subtask_id_2 = db.add_task(new_task_id, 'New Test Subtask 2')
# Добовляем к подзадачам постановки задач с файлами
def_id_1 = db.add_definition(new_subtask_id_1, 'Do something 1')
def_id_2 = db.add_definition(new_subtask_id_2, 'Do something 2')
# Во второй подзадаче создадим еще 5 задач.
# Для удобства просто создадим 5 копий подзадачи 1
lst_for_copy = [(new_subtask_id_1, 'Subtask 1'),
(new_subtask_id_1, 'Subtask 2'),
(new_subtask_id_1, 'Subtask 3'),
(new_subtask_id_1, 'Subtask 4'),
(new_subtask_id_1, 'Subtask 5'),] # Создадим массив типа [(ID_копируемой задачи, 'Новое имя'), ...]
new_tasks = db.copy_tasks(new_subtask_id_2, lst_for_copy) # Копируем в подзадачу 2
filename1 = local_dir + '/test.png' # файл для первой подзадачи
thumbnails1 = make_thumnails(filename1) # генерация эскизов для файла filename1
filename2 = local_dir + '/test.mp4' # файл для второй подзадачи
thumbnails2 = make_thumnails(filename2) # генерация эскизов файла filename2
# Создаем объект для добавления файлов в файловое хранилище (Cargador)
carga = cargador.Cargador(cargador_host, cargador_xmlrpc_port, cargador_http_port)
# Добовляем к сообщениям типа постановки задач файлы и, заодно, экспортируем их в хранилище
db.add_attachment(def_id_1, carga, filename1, thumbnails1, '', False)
db.add_attachment(def_id_2, carga, filename2, thumbnails2, '', False)
"""
Параметр carga, передается для экспортирования файла в файловое хранилише.
Подробнее об этом смотрите в модуле cargador.
Последний параметр означает, будет ли файл добавлен как линк, без экспорта в хранилище (значение True),
или же он будет экспортитован (значение False)
Подробнее об этом смотрите в описании функции add_attachment.
"""
# Удаляем сгенерированные эскизы, поскольку мы их уже экспортировали в хранилище
for f in thumbnails1:
os.remove(f)
for f in thumbnails2:
os.remove(f)
# Устанавливаем запланированное время на подзадачи
db.task_set_planned_time(new_subtask_id_1, 12.5) # первой подзадаче устанавливаем 12 с половиной часов
db.task_set_planned_time(new_subtask_id_2, 30) # второй подзадаче устанавливаем 30 часов
# Связываем подзадачи
db.set_link_tasks(new_subtask_id_1, new_subtask_id_2)
"""
Эта связь значит, что вторая подзадача начнется после окончания первой подзадачи
"""
except Exception as err:
print(err)
def make_thumnails(filename):
"""
Принимает на вход полный путь до файла видео или изображения и генерирует эскизы к ним
:returns: список путей до файлов эскизов.
Пример вызова функции:
::
import create_tasks
filename = 'c:/temp/file.mov'
thumbnails = create_tasks.create_and_link_tasks(filename)
::
Генерация эскизов:
Если файл является изображением или видео, то можно добавить для него уменшенные эскизы.
Можно добавить до 3-х эскизов (первый, средний, последний кадры).
Для генерации эскизов можно использовать программу Mirada.
Она постовляется вместе с дистрибутивом Cerebro. Можно использовать и другие программы для генерации,
например, ffmpeg.
"""
#Пример генерации эскизов с помощью Mirada.
if os.path.exists(filename) == False or os.path.exists(mirada_path) == False:
return list()
gen_path = os.path.dirname(filename) # В качестве директории для генерации эскизов возьмем директорию добавляемого файла
# Запускаем мираду с необходимыми ключами
res_code = subprocess.call([mirada_path, filename, '--temp', gen_path, '--hide', '--mode', 'thumbstandalone'])
#-temp - директория для генерации эскизов
#-hide - ключ запуска мирады в скрытом режиме (без загрузки графического интерфейса) для генерации табнейлов.
if res_code != 0:
raise Exception("Mirada returned bad exit-status.\n" + mirada_path)
#Ищем сгенерированные мирадой эскизы.
#Имени эскиза формируется из имени файла, даты и времени генерации - filename_yyyymmdd_hhmmss_thumb[number].jpg
#Например: test.mov_20120305_112354_thumb1.jpg - первый эскиз видео-файла test.mov
thumbnails = list()
for f in os.listdir(gen_path):
if fnmatch.fnmatch(f, os.path.basename(filename) + '.thumb*.jpg'):
thumbnails.append(gen_path + '/' + f)
thumbnails.sort()
"""
#Пример генерации эскизов с помощью ffmpeg.
#Для того, чтобы генерить эскизы с помощью ffmpeg, нужно заранее знать длительность видео,
#чтобы корректно получить средний и последний кадры.
#Возьмем к примеру ролик длительностью в 30 секунд.
thumbnails = list() # список файлов для эскизов
thumbnails.append(filename + '_thumb1.jpg')
thumbnails.append(filename + '_thumb2.jpg')
thumbnails.append(filename + '_thumb3.jpg')
subprocess.call(['ffmpeg', '-i', filename, '-s', '512x512', '-an', '-ss', '00:00:00', '-r', 1, '-vframes', 1, '-y', thumbnails[0]])
subprocess.call(['ffmpeg', '-i', filename, '-s', '512x512', '-an', '-ss', '15:00:00', '-r', 1, '-vframes', 1, '-y', thumbnails[1]])
subprocess.call(['ffmpeg', '-i', filename, '-s', '512x512', '-an', '-ss', '30:00:00', '-r', 1, '-vframes', 1, '-y', thumbnails[2]])
# Описание ключей вы можете посмотреть в документации к ffmpeg
"""
return thumbnails
| [
"[email protected]"
] | |
cbb187e7d59019198c646d702aff8fbadc4169a3 | fa89010f366aa33967c12636bf6cfae6105a9ee5 | /ex7/nation_mood.py | cee5016c3dfd5694fae9bb9cc3434bbd28db8739 | [] | no_license | borgr/intro2cs | 4db1985b789d0938d7c9cecddbe5a302f284bd95 | 9030d9831a168d9636093bd5211926666298d80f | refs/heads/master | 2020-05-29T19:35:20.829664 | 2016-10-22T06:07:22 | 2016-10-22T06:07:22 | 15,959,512 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,976 | py | #############################################################
# FILE : nation_mood.py
# WRITER : Leshem Choshen + borgr + 305385338
# EXERCISE : intro2cs ex7 200132014
# DESCRIPTION:nation wide tweet functions
#############################################################
from data import load_tweets
from geo import us_states, Position, geo_distance
from tweet import Tweet
from geo_tweet_tools import find_center, find_closest_state, group_tweets_by_state
def most_talkative_state(tweets,find_state):
"""Return the state that has the largest number of tweets containing term.
>>> state_centers = {n: find_center(s) for n, s in us_states.items()}
>>> tweets = load_tweets('texas')
>>> find_state = find_closest_state(state_centers);
>>> most_talkative_state(tweets,find_state)
'TX'
>>> tweets = load_tweets('sandwich')
>>> most_talkative_state(tweets,find_state)
'NJ'
"""
most_tweets = -float("inf")
most_state = None
grouped = group_tweets_by_state(tweets,find_state)
for state in grouped:
state_tweets = len(grouped[state])
if most_tweets < state_tweets:
most_tweets = state_tweets
most_state = state
return most_state
def average_sentiments(tweets_by_state,word_sentiments):
"""Calculate the average sentiment of the states by averaging over all
the tweets from each state. Return the result as a dictionary from state
names to average sentiment values (numbers).
If a state has no tweets with sentiment values, leave it out of the
dictionary entirely. Do NOT include states with no tweets, or with tweets
that have no sentiment, as 0. 0 represents neutral sentiment, not unknown
sentiment.
tweets_by_state -- A dictionary from state names to lists of tweets
"""
avarage = {}
for state in tweets_by_state.keys():
sentiments = []
for tweet in tweets_by_state[state]:
sentiment = tweet.get_sentiment(word_sentiments)
if sentiment is not None:
sentiments.append(Sentiment)
if sentiments:
print(state, sentiments)
avarage.update({state: sum(sentiments)/len (sentiments)})
return avarage
def group_tweets_by_hour(tweets):
"""Return a list of lists of tweets that are gouped by the hour
they were posted.
The indexes of the returned list represent the hour when they were posted
- the integers 0 through 23.
tweets_by_hour[i] is the list of all
tweets that were posted between hour i and hour i + 1. Hour 0 refers to
midnight, while hour 23 refers to 11:00PM.
To get started, read the Python Library documentation for datetime
objects:
http://docs.python.org/py3k/library/datetime.html#datetime.datetime
tweets -- A list of tweets to be grouped
"""
return [[tweet for tweet in tweets
if tweet.get_time().hour == hour]
for hour in range(24)]
| [
"[email protected]"
] | |
c843c8f5ac94babd9d6dda5862e4393a08c25abd | b0a1884cd6c40362085dc08c7a091ed7cf1ece7f | /eelbrain/tests/test_mne.py | 06e2009c988be2ab9c2632f8d57ae4e63439b6be | [] | no_license | LauraGwilliams/Eelbrain | 4f37dbcc314063e92425dadf9b1f9f2aeea69a9c | d04fa2e7108c5f683fc145fc44a794d39928f2cb | refs/heads/master | 2021-01-18T16:00:36.727474 | 2015-10-23T13:38:17 | 2015-10-23T13:43:17 | 34,459,099 | 0 | 0 | null | 2015-04-23T13:51:09 | 2015-04-23T13:51:09 | null | UTF-8 | Python | false | false | 7,674 | py | """Test mne interaction"""
from itertools import izip
import os
from nose.tools import eq_, ok_, assert_less_equal, assert_not_equal, assert_in
import numpy as np
from numpy.testing import assert_array_equal, assert_allclose
import mne
from eelbrain import datasets, load, testnd, morph_source_space, Factor
from eelbrain._data_obj import asndvar, SourceSpace, _matrix_graph
from eelbrain._mne import shift_mne_epoch_trigger, combination_label
from eelbrain.tests.test_data import assert_dataobj_equal
# mne paths
data_dir = mne.datasets.sample.data_path()
subjects_dir = os.path.join(data_dir, 'subjects')
def test_source_estimate():
"Test SourceSpace dimension"
mne.set_log_level('warning')
ds = datasets.get_mne_sample(src='ico')
dsa = ds.aggregate('side')
# test auto-conversion
asndvar('epochs', ds=ds)
asndvar('epochs', ds=dsa)
asndvar(dsa['epochs'][0])
# source space clustering
res = testnd.ttest_ind('src', 'side', ds=ds, samples=0, pmin=0.05,
tstart=0.05, mintime=0.02, minsource=10)
eq_(res.clusters.n_cases, 52)
# test disconnecting parc
src = ds['src']
source = src.source
parc = source.parc
orig_conn = set(map(tuple, source.connectivity()))
disc_conn = set(map(tuple, source.connectivity(True)))
ok_(len(disc_conn) < len(orig_conn))
for pair in orig_conn:
s, d = pair
if pair in disc_conn:
eq_(parc[s], parc[d])
else:
assert_not_equal(parc[s], parc[d])
# threshold-based test with parc
srcl = src.sub(source='lh')
res = testnd.ttest_ind(srcl, 'side', ds=ds, samples=10, pmin=0.05,
tstart=0.05, mintime=0.02, minsource=10,
parc='source')
eq_(res._cdist.dist.shape[1], len(srcl.source.parc.cells))
label = 'superiortemporal-lh'
c_all = res.find_clusters(maps=True)
c_label = res.find_clusters(maps=True, source=label)
assert_array_equal(c_label['location'], label)
for case in c_label.itercases():
id_ = case['id']
idx = c_all['id'].index(id_)[0]
eq_(case['v'], c_all[idx, 'v'])
eq_(case['tstart'], c_all[idx, 'tstart'])
eq_(case['tstop'], c_all[idx, 'tstop'])
assert_less_equal(case['p'], c_all[idx, 'p'])
assert_dataobj_equal(case['cluster'],
c_all[idx, 'cluster'].sub(source=label))
# threshold-free test with parc
res = testnd.ttest_ind(srcl, 'side', ds=ds, samples=10, tstart=0.05,
parc='source')
cl = res.find_clusters(0.05)
eq_(cl.eval("p.min()"), res.p.min())
mp = res.masked_parameter_map()
assert_in(mp.min(), (0, res.t.min()))
assert_in(mp.max(), (0, res.t.max()))
# indexing source space
s_sub = src.sub(source='fusiform-lh')
idx = source.index_for_label('fusiform-lh')
s_idx = src[idx]
assert_dataobj_equal(s_sub, s_idx)
def test_dataobjects():
"Test handing MNE-objects as data-objects"
ds = datasets.get_mne_sample(sns=True)
ds['C'] = Factor(ds['index'] > 155, labels={False: 'a', True: 'b'})
sds = ds.sub("side % C != ('L', 'b')")
ads = sds.aggregate('side % C')
eq_(ads.n_cases, 3)
# connectivity
sensor = ds['sns'].sensor
c = sensor.connectivity()
assert_array_equal(c[:, 0] < c[:, 1], True)
eq_(c.max(), len(sensor) - 1)
def test_epoch_trigger_shift():
"Test the shift_mne_epoch_trigger() function"
epochs = datasets.get_mne_sample(sns=True, sub="[1,2,3]")['epochs']
n_lost_start = np.sum(epochs.times < epochs.tmin + 0.05)
n_lost_end = np.sum(epochs.times > epochs.tmax - 0.05)
data = epochs.get_data()
epochs_s = shift_mne_epoch_trigger(epochs, [0, 0, 0])
assert_array_equal(epochs_s.get_data(), data)
epochs_s = shift_mne_epoch_trigger(epochs, [-0.05, 0., 0.05])
data_s = epochs_s.get_data()
assert_array_equal(data_s[0], data[0, :, : -(n_lost_end + n_lost_start)])
assert_array_equal(data_s[1], data[1, :, n_lost_start: -n_lost_end])
assert_array_equal(data_s[2], data[2, :, n_lost_end + n_lost_start:])
assert_allclose(epochs_s.times, epochs.times[n_lost_start: -n_lost_end],
rtol=1e-1, atol=1e-3) # ms accuracy
epochs_s = shift_mne_epoch_trigger(epochs, [0.05, 0., 0.05])
data_s = epochs_s.get_data()
assert_array_equal(data_s[0], data[0, :, n_lost_end:])
assert_array_equal(data_s[1], data[1, :, :-n_lost_end])
assert_array_equal(data_s[2], data[2, :, n_lost_end:])
assert_allclose(epochs_s.times, epochs.times[:-n_lost_end],
rtol=1e-1, atol=1e-3) # ms accuracy
def test_combination_label():
"Test combination label creation"
labels = {l.name: l for l in
mne.read_labels_from_annot('fsaverage', subjects_dir=subjects_dir)}
# standard
l = combination_label('temporal', "superiortemporal + middletemporal + inferiortemporal", labels)
lh = labels['superiortemporal-lh'] + labels['middletemporal-lh'] + labels['inferiortemporal-lh']
rh = labels['superiortemporal-rh'] + labels['middletemporal-rh'] + labels['inferiortemporal-rh']
eq_(len(l), 2)
eq_(l[0].name, 'temporal-lh')
eq_(l[1].name, 'temporal-rh')
assert_array_equal(l[0].vertices, lh.vertices)
assert_array_equal(l[1].vertices, rh.vertices)
# only rh
l = combination_label('temporal-rh', "superiortemporal + middletemporal + inferiortemporal", labels)
eq_(len(l), 1)
eq_(l[0].name, 'temporal-rh')
assert_array_equal(l[0].vertices, rh.vertices)
# names with .
labels = {l.name: l for l in
mne.read_labels_from_annot('fsaverage', 'PALS_B12_Brodmann', subjects_dir=subjects_dir)}
l = combination_label('Ba38-lh', "Brodmann.38", labels)[0]
assert_array_equal(l.vertices, labels['Brodmann.38-lh'].vertices)
def test_morphing():
mne.set_log_level('warning')
sss = datasets._mne_source_space('fsaverage', 'ico-4', subjects_dir)
vertices_to = [sss[0]['vertno'], sss[1]['vertno']]
ds = datasets.get_mne_sample(-0.1, 0.1, src='ico', sub='index==0', stc=True)
stc = ds['stc', 0]
morph_mat = mne.compute_morph_matrix('sample', 'fsaverage', stc.vertices,
vertices_to, None, subjects_dir)
ndvar = ds['src']
morphed_ndvar = morph_source_space(ndvar, 'fsaverage')
morphed_stc = mne.morph_data_precomputed('sample', 'fsaverage', stc,
vertices_to, morph_mat)
assert_array_equal(morphed_ndvar.x[0], morphed_stc.data)
morphed_stc_ndvar = load.fiff.stc_ndvar([morphed_stc], 'fsaverage', 'ico-4',
subjects_dir, 'dSPM', False, 'src',
parc=None)
assert_dataobj_equal(morphed_ndvar, morphed_stc_ndvar)
def test_source_space():
"Test SourceSpace dimension"
for subject in ['fsaverage', 'sample']:
mne_src = datasets._mne_source_space(subject, 'ico-4', subjects_dir)
vertno = [mne_src[0]['vertno'], mne_src[1]['vertno']]
ss = SourceSpace(vertno, subject, 'ico-4', subjects_dir, 'aparc')
# connectivity
conn = ss.connectivity()
mne_conn = mne.spatial_src_connectivity(mne_src)
assert_array_equal(conn, _matrix_graph(mne_conn))
# sub-space connectivity
sssub = ss[ss.dimindex('superiortemporal-rh')]
ss2 = SourceSpace(vertno, subject, 'ico-4', subjects_dir, 'aparc')
ss2sub = ss2[ss2.dimindex('superiortemporal-rh')]
assert_array_equal(sssub.connectivity(), ss2sub.connectivity())
| [
"[email protected]"
] | |
2fc98b738bd56aa8aff7591590b98098af6a04b0 | 37b014820aef8b83e3eca3f102b3d04ef504066e | /readcsv/settings.py | 65777c4908e4034159ad3c696a1251fa850b447a | [] | no_license | juniorcarvalho/readcsv | d8ae35efe65c90363920a1049c17554b0757fe15 | 0a20ad4f96929b9f5bab703f2c47f6c0ff345bc3 | refs/heads/master | 2022-05-25T08:00:16.372555 | 2021-04-08T20:38:00 | 2021-04-08T20:38:00 | 208,878,426 | 1 | 0 | null | 2022-04-22T22:17:01 | 2019-09-16T19:14:28 | Python | UTF-8 | Python | false | false | 5,052 | py | """
Django settings for readcsv project.
Generated by 'django-admin startproject' using Django 2.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
from decouple import config, Csv
from dj_database_url import parse as dburl
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
DEBUG = config('DEBUG', default=False, cast=bool)
ALLOWED_HOSTS = config("ALLOWED_HOSTS", default="*", cast=Csv())
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'readcsv.core',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'readcsv.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'readcsv.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DEFAULT_DBURL = "sqlite:///" + os.path.join(BASE_DIR, "db.sqlite3")
DATABASES = {"default": config("DATABASE_URL", default=DEFAULT_DBURL, cast=dburl)}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'America/Sao_Paulo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
LOGGING_APPNAME = 'readcsvlog'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'filters': {
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'console': {
'level': 'INFO',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
},
'logfile': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(BASE_DIR, LOGGING_APPNAME + '.log'),
'maxBytes': 1024 * 1024 * 15,
'formatter': 'verbose'
},
},
'loggers': {
'django': {
'handlers': ['console'],
'propagate': True,
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': False,
},
LOGGING_APPNAME: {
'handlers': ['console', 'mail_admins', 'logfile', ],
'level': 'DEBUG',
}
}
}
# twitter
TWITTER_API_KEY = config('TWITTER_API_KEY', default='')
TWITTER_SECRET_KEY = config('TWITTER_SECRET_KEY', default='')
TWITTER_ACCESS_TOKEN = config('TWITTER_ACCESS_TOKEN', default='')
TWITTER_ACCESS_TOKEN_SECRET = config('TWITTER_ACCESS_TOKEN_SECRET', default='')
| [
"[email protected]"
] | |
a962ff1bdbf5794c6ccf3662675aacd15b94ab20 | de4c5ecaf541d67e7cbf02837d93cf303d23b5da | /tests/app/views/home_tests.py | 457f11123076d3a77c42d472cd58e5fe3b42dc01 | [
"Apache-2.0"
] | permissive | shadowmint/py-test-watcher | d140064cafeb0b2efce8a403a3abd63322f812d0 | 36d33206b104c81e2d6acebdbed2dddee71fe2a7 | refs/heads/master | 2021-01-19T14:07:13.441335 | 2013-07-01T06:07:56 | 2013-07-01T06:07:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,641 | py | # Copyright 2013 Douglas Linder
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import bootstrap
import pau
import os
from pau.model import *
from nark import *
from pau.views import Home
class HomeTests(unittest.TestCase):
# db for this test
db_name = "HomeTests.sqlite"
def setup(self):
""" Setup db and return instance """
self.config = pau.IConfig
self.session = pau.ISession
pau.resolve(self)
self.session.assets = Assets()
self.config.db = self.db_name
self.config.db_debug = False
self.db = pau.IDb
pau.resolve(self)
self.prefs = Prefs()
pau.resolve(self.prefs)
# Instance
i = Home()
pau.resolve(i)
return i
def teardown(self):
self.db.reset()
try:
os.remove(self.db_name)
except:
pass
def test_can_create_instance(self):
a = Assert()
i = self.setup()
a.not_null(i, "Unable to create instance")
self.teardown()
def test_has_setup_fails(self):
a = Assert()
i = self.setup()
rtn = i.has_setup("", "")
a.false(rtn["result"], "Failed to not find preferences")
self.teardown()
def test_has_setup_passes(self):
a = Assert()
i = self.setup()
self.prefs.add(pau.Preferences.LOCATION, "VALUE")
rtn = i.has_setup("", "")
a.true(rtn["result"], "Failed to find preferences")
self.teardown()
def test_preferences(self):
a = Assert()
i = self.setup()
self.prefs.add(pau.Preferences.LOCATION, "VALUE")
rtn = i.preference("", "LOCATION")
a.equals(rtn["result"], "VALUE", "Failed to find preference by key")
self.teardown()
def test_flash(self):
a = Assert()
i = self.setup()
i.flash_service.notice("Hello World")
i.flash_service.success("Hello Again")
rtn = i.flash("", "")
a.equals(rtn["result"], "Hello World", "Failed to return oldest message")
rtn = i.flash("", "")
a.equals(rtn["result"], "Hello Again", "Failed to return second message")
rtn = i.flash("", "")
a.false(rtn["result"], "Invalid return when no messages")
self.teardown()
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
576a5caebe9274dbe6a976f5be2cda765413cea2 | 0faf534ebb6db6f32279e5bee25b968bd425ce3a | /tests/extension/thread_/stream_sink_fifo/test_thread_stream_sink_fifo.py | 2fbe7fdf2573c48b8a204e9797df3ef6b7351a35 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | PyHDI/veriloggen | e8647cb2d40737d84e31d6b89c5799bab9cbd583 | f2b1b9567150af097eed1b5e79ba2b412854ef43 | refs/heads/develop | 2023-08-09T10:02:35.626403 | 2023-08-09T00:50:14 | 2023-08-09T00:50:14 | 37,813,184 | 282 | 60 | Apache-2.0 | 2023-07-20T03:03:29 | 2015-06-21T15:05:30 | Python | UTF-8 | Python | false | false | 547 | py | from __future__ import absolute_import
from __future__ import print_function
import os
import veriloggen
import thread_stream_sink_fifo
def test(request):
veriloggen.reset()
simtype = request.config.getoption('--sim')
rslt = thread_stream_sink_fifo.run(filename=None, simtype=simtype,
outputfile=os.path.splitext(os.path.basename(__file__))[0] + '.out')
verify_rslt = [line for line in rslt.splitlines() if line.startswith('# verify:')][0]
assert(verify_rslt == '# verify: PASSED')
| [
"[email protected]"
] | |
be41226a5a51288ece78ea6a8101e94652515a8a | 74b812828a80190636523cbad4f3a6fc239484ba | /openff/bespokefit/executor/services/optimizer/app.py | cb29e0c1ed66c8e77bb693ced204a9a750190e59 | [
"MIT"
] | permissive | openforcefield/openff-bespokefit | ef438ddc9a072a280e155d1e2d097068a369f73f | 97262756c5c014e9bd5f799d64755b7f73a6160e | refs/heads/main | 2023-08-17T07:09:11.715404 | 2023-08-04T09:43:57 | 2023-08-04T09:43:57 | 241,694,600 | 29 | 4 | MIT | 2023-09-08T06:14:39 | 2020-02-19T18:31:38 | Python | UTF-8 | Python | false | false | 1,559 | py | import json
from fastapi import APIRouter
from qcelemental.util import serialize
from openff.bespokefit.executor.services import current_settings
from openff.bespokefit.executor.services.optimizer import worker
from openff.bespokefit.executor.services.optimizer.models import (
OptimizerGETResponse,
OptimizerPOSTBody,
OptimizerPOSTResponse,
)
from openff.bespokefit.executor.utilities.celery import get_task_information
router = APIRouter()
__settings = current_settings()
__GET_ENDPOINT = "/" + __settings.BEFLOW_OPTIMIZER_PREFIX + "/{optimization_id}"
@router.get(__GET_ENDPOINT)
def get_optimization(optimization_id: str) -> OptimizerGETResponse:
task_info = get_task_information(worker.celery_app, optimization_id)
# noinspection PyTypeChecker
return {
"id": optimization_id,
"self": __settings.BEFLOW_API_V1_STR
+ __GET_ENDPOINT.format(optimization_id=optimization_id),
"status": task_info["status"],
"result": task_info["result"],
"error": json.dumps(task_info["error"]),
}
@router.post("/" + __settings.BEFLOW_OPTIMIZER_PREFIX)
def post_optimization(body: OptimizerPOSTBody) -> OptimizerPOSTResponse:
# We use celery delay method in order to enqueue the task with the given
# parameters
task = worker.optimize.delay(
optimization_input_json=serialize(body.input_schema, "json")
)
return OptimizerPOSTResponse(
id=task.id,
self=__settings.BEFLOW_API_V1_STR
+ __GET_ENDPOINT.format(optimization_id=task.id),
)
| [
"[email protected]"
] | |
b2ccc54baa8f5810be524d0f142b9b43562381c9 | a8592d34f144b71794ebf30f1c2a1b5faf0b053c | /Praktikum2023/Modul_08/codes/diffusion_02.py | c407706419324a395034cba60ab038151f2df39b | [] | no_license | f-fathurrahman/ffr-MetodeNumerik | ee9a6a7153b174b1ba3d714fe61ccbd1cb1dd327 | e3a9da224c0fd5b32e671708e890018a3c4104c4 | refs/heads/master | 2023-07-19T22:29:38.810143 | 2023-07-07T10:02:34 | 2023-07-07T10:02:34 | 107,272,110 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,735 | py | # Explicit method (forward Euler) for diffusion equation
# Using two vectors for current and future time
import numpy as np
import matplotlib.pyplot as plt
# Global variables !!!
L = 1.5
Tfinal = 1.0
α = 0.1
DO_PLOT = True
# Manufactured solution
def u_exact(t, x):
return 5*x*t*(L-x)
def initial_cond(x):
return u_exact(0, x)
def source_term(t, x):
return 10*α*t + 5*x*(L - x)
Nx = 25
x = np.linspace(0.0, L, Nx+1)
Δx = x[1] - x[0]
print("Δx = ", Δx)
Nt = 200
t = np.linspace(0.0, Tfinal, Nt+1)
Δt = t[1] - t[0]
print("Δt = ", Δt)
print("Final t = ", t[-1])
F = α * Δt / Δx**2
print("F = ", F)
if F > 0.5:
print("WARNING: solution is not stable")
# exit()
# Use only two vectors for the solution
un = np.zeros(Nx+1)
unp1 = np.zeros(Nx+1)
un[:] = initial_cond(x)
for n in range(0,Nt):
# Apply boundary condition
un[0] = 0.0 # syarat batas pada x=0
un[Nx] = 0.0 # syarat batas pada x=L
for i in range(1,Nx):
fni = source_term(t[n], x[i])
unp1[i] = un[i] + F*( un[i+1] - 2*un[i] + un[i-1] ) + Δt*fni
#
un[:] = unp1[:] # update for the next iteration
if DO_PLOT:
plt.clf()
plt.plot(x, un)
plt.title("t = " + str(t[n]))
plt.savefig("IMG_diff1d_explicit_" + str(n) + ".png", dpi=150)
print("n = " + str(n) + " is done")
if DO_PLOT:
plt.clf()
plt.plot(x, un, label="numerical")
plt.plot(x, u_exact(t[Nt],x), label="exact sol")
plt.title("t = " + str(t[Nt]))
plt.legend()
plt.savefig("IMG_diff1d_explicit_COMPARE_" + str(n) + ".png", dpi=150)
# Difference between exact solution and numerical solution
Δu = u_exact(t[Nt],x) - un
norm_Δu = np.linalg.norm(Δu)
print("norm_du = ", norm_Δu)
| [
"[email protected]"
] | |
b365e14451153a32b788ea21316db61b97698dc1 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-3648.py | 43de1a025a9b7634670b7fa09e1476f970e269f3 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,754 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
$ID.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"[email protected]"
] | |
cbd55ab8d75ea16cc6b47917fdb3d4bd5b865eac | a3a3e1298db9555eda37f8da0c74a437d897cb1f | /compiled/Python3/Euler_Problem-030.py | a12edacad0729aad1effb41722fb40fbead3d74c | [
"MIT"
] | permissive | LStepanek/Project-Euler_Befunge | 58f52254ee039ef6a5204fc65e62426c5e9d473a | f35fb2adecd737e410dee7b89b456cd61b25ce78 | refs/heads/master | 2021-01-01T17:51:52.413415 | 2017-05-03T17:23:01 | 2017-05-03T17:26:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,953 | py | #!/usr/bin/env python3
# transpiled with BefunCompile v1.1.0 (c) 2015
def td(a,b):
return ((0)if(b==0)else(a//b))
def tm(a,b):
return ((0)if(b==0)else(a%b))
s=[]
def sp():
global s
if (len(s) == 0):
return 0
return s.pop()
def sa(v):
global s
s.append(v)
def sr():
global s
if (len(s) == 0):
return 0
return s[-1]
def _0():
sa(0)
sa(1)
sa(1)
sa(0)
sa(5904)
return 1
def _1():
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()+1);
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sr());
return 2
def _2():
return (13)if(sp()!=0)else(3)
def _3():
global t0
sp();
v0=sp()
sa(sp()-v0)
t0=sp()
return (12)if((t0)!=0)else(4)
def _4():
sa(sp()*59049);
return 5
def _5():
global t0
global t1
global t2
global t3
global t4
global t5
global t6
global t7
sa(sr());
sa(sr());
sa(tm(sr(),10))
sa(sr());
sa(sr());
sa(sp()*sp());
sa(sr());
sa(sp()*sp());
t0=sp()
sa(sp()*t0);
t1=sp()
sa(td(sp(),10))
sa(tm(sr(),10))
sa(sr());
sa(sr());
sa(sp()*sp());
sa(sr());
sa(sp()*sp());
t0=sp()
sa(sp()*t0);
t2=sp()
sa(td(sp(),10))
sa(tm(sr(),10))
sa(sr());
sa(sr());
sa(sp()*sp());
sa(sr());
sa(sp()*sp());
t0=sp()
sa(sp()*t0);
t3=sp()
sa(td(sp(),10))
sa(tm(sr(),10))
sa(sr());
sa(sr());
sa(sp()*sp());
sa(sr());
sa(sp()*sp());
t0=sp()
sa(sp()*t0);
t4=sp()
sa(td(sp(),10))
sa(tm(sr(),10))
sa(sr());
sa(sr());
sa(sp()*sp());
sa(sr());
sa(sp()*sp());
t0=sp()
sa(sp()*t0);
t5=sp()
sa(td(sp(),10))
sa(tm(sr(),10))
sa(sr());
sa(sr());
sa(sp()*sp());
sa(sr());
sa(sp()*sp());
t0=sp()
sa(sp()*t0);
t6=sp()
sa(td(sp(),10))
sa(tm(sp(),10))
sa(sr());
sa(sr());
sa(sp()*sp());
sa(sr());
sa(sp()*sp());
t0=sp()
sa(sp()*t0);
t7=sp()
t0=t7+t6
t6=t5+t0
t0=t4+t6
t4=t3+t0
t0=t2+t4
t2=t1+t0
sa(sp()-t2);
t0=sp()
return (6)if((t0)!=0)else(11)
def _6():
sa(sp()-1);
sa(sr());
sa((0)if(sp()!=0)else(1))
return (7)if(sp()!=0)else(5)
def _7():
sp();
sp();
return 8
def _8():
sa(sp()+sp());
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sr());
return (9)if(sp()!=0)else(10)
def _9():
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sp()+sp());
return 8
def _10():
sp();
print(sp(),end="",flush=True)
return 14
def _11():
sa(sr());
return 6
def _12():
sa(sp()+1);
sa(sr());
sa(sr()*59049)
sa(0)
v0=sp()
v1=sp()
sa(v0)
sa(v1)
sa(sr());
return 2
def _13():
sa(td(sp(),10))
return 1
m=[_0,_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13]
c=0
while c<14:
c=m[c]()
| [
"[email protected]"
] | |
d5cc2e960fa20a62afd08cea843d499c874eb6ff | 67325192c1e528a39d457f11e61b480d68826708 | /mods/mcpython/entity/__init__.py | fcc545f71f824192400ba7b8553e05f7da608cb7 | [
"MIT"
] | permissive | vashistaarav1611/mcpython-a-minecraft-clone-in-python | 5851b377b54fd2b28c106112c7b18f397b71ab50 | c16cd66f319efdeec4130e1a43f5a857caf1ea13 | refs/heads/master | 2023-02-01T22:48:51.787106 | 2020-12-21T15:02:25 | 2020-12-21T15:02:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68 | py | from .entity import *
from .player import *
from .boxmodel import *
| [
"[email protected]"
] | |
497f7ca218fcaa165751dd0dc22241730614b5c1 | cdfbf466a03c1176ad5a9c8c2aaaa42551d6750d | /pythongame/scenes/scene_creating_world/scene_creating_world.py | 265e969f25448a1f80a27e9be9124c5eb2b0cc2e | [] | no_license | JonathanMurray/python-2d-game | 62540cac42291da23176f3ac0e88c8809131f84d | cac61c93545e32dc517d021448715a77fe97f531 | refs/heads/master | 2021-12-14T07:46:31.287997 | 2021-12-11T18:03:18 | 2021-12-11T18:03:18 | 145,474,245 | 63 | 18 | null | 2021-03-15T21:52:18 | 2018-08-20T21:53:03 | Python | UTF-8 | Python | false | false | 8,198 | py | from typing import Optional
from typing import Tuple
from pythongame.core.common import ConsumableType, Sprite, ItemId
from pythongame.core.common import Millis, HeroId, AbstractScene, SceneTransition
from pythongame.core.entity_creation import create_player_state_as_initial, create_hero_world_entity
from pythongame.core.game_state import GameState
from pythongame.core.global_path_finder import init_global_path_finder
from pythongame.core.hero_upgrades import pick_talent
from pythongame.core.npc_behaviors import get_quest
from pythongame.core.quests import QuestId
from pythongame.core.world_behavior import ChallengeBehavior, StoryBehavior
from pythongame.map_file import load_map_from_json_file
from pythongame.player_file import SavedPlayerState
from pythongame.scenes.scene_factory import AbstractSceneFactory
from pythongame.scenes.scenes_game.game_engine import GameEngine
from pythongame.scenes.scenes_game.game_ui_view import GameUiView
from pythongame.world_init_util import register_game_engine_observers, \
register_game_state_observers
class InitFlags:
def __init__(self,
map_file_path: Optional[str],
picked_hero: Optional[HeroId],
saved_player_state: Optional[SavedPlayerState],
hero_start_level: Optional[int],
start_money: Optional[int],
character_file: Optional[str]):
self.map_file_path = map_file_path
self.picked_hero = picked_hero
self.saved_player_state: SavedPlayerState = saved_player_state
self.hero_start_level = hero_start_level
self.start_money = start_money
self.character_file: str = character_file
def __repr__(self):
return "(" + self.map_file_path + ", " + str(self.picked_hero) + ", " + \
str(self.saved_player_state) + ", " + str(self.hero_start_level) + ", " + str(self.start_money) + ")"
class CreatingWorldScene(AbstractScene):
def __init__(
self,
scene_factory: AbstractSceneFactory,
camera_size: Tuple[int, int], ui_view: GameUiView,
flags: InitFlags):
self.scene_factory = scene_factory
self.camera_size = camera_size
self.ui_view = ui_view
# map hero money level saved
self.flags: InitFlags = flags
def run_one_frame(self, _time_passed: Millis) -> Optional[SceneTransition]:
saved_player_state = self.flags.saved_player_state
hero_start_level = self.flags.hero_start_level
start_money = self.flags.start_money
picked_hero_id = self.flags.picked_hero
map_file_path = self.flags.map_file_path
character_file = self.flags.character_file
if saved_player_state:
hero_from_saved_state = HeroId[saved_player_state.hero_id]
if picked_hero_id is not None and picked_hero_id != hero_from_saved_state:
raise Exception("Mismatch! Hero from saved state: " + str(hero_from_saved_state) + ", but picked hero: "
+ str(picked_hero_id))
picked_hero_id = hero_from_saved_state
total_time_played_on_character = saved_player_state.total_time_played_on_character if saved_player_state else 0
# NPC's share a "global path finder" that needs to be initialized before we start creating NPCs.
# TODO This is very messy
path_finder = init_global_path_finder()
game_state = self._load_map_and_setup_game_state(map_file_path, picked_hero_id)
path_finder.set_grid(game_state.pathfinder_wall_grid)
# Must center camera before notifying player position as it affects which walls are shown on the minimap
game_state.center_camera_on_player()
self.ui_view.on_world_area_updated(game_state.game_world.entire_world_area)
self.ui_view.update_hero(game_state.player_state.hero_id)
game_engine = GameEngine(game_state, self.ui_view.info_message)
register_game_engine_observers(game_engine, self.ui_view)
register_game_state_observers(game_state, self.ui_view, include_player_state=True)
if saved_player_state:
game_engine.gain_levels(saved_player_state.level - 1)
game_state.player_state.gain_exp(saved_player_state.exp)
item_ids_in_inventory = [ItemId.from_stats_string(item_stats_and_name[0], item_stats_and_name[1])
if item_stats_and_name
else None
for item_stats_and_name in saved_player_state.items]
game_engine.fill_item_inventory(item_ids_in_inventory)
consumable_slots = {int(slot_number): [ConsumableType[c] for c in consumables]
for (slot_number, consumables) in saved_player_state.consumables_in_slots.items()}
game_state.player_state.consumable_inventory.set_slots(consumable_slots)
game_state.player_state.modify_money(saved_player_state.money)
for portal in game_state.game_world.portals:
if portal.portal_id.name in saved_player_state.enabled_portals:
sprite = saved_player_state.enabled_portals[portal.portal_id.name]
game_engine.activate_portal(portal, Sprite[sprite])
for tier_index, option_index in enumerate(saved_player_state.talent_tier_choices):
if option_index is not None:
pick_talent(game_state, tier_index, option_index)
for completed_quest in saved_player_state.completed_quests:
quest = get_quest(QuestId[completed_quest])
game_state.player_state.start_quest(quest)
game_state.player_state.complete_quest(quest)
for active_quest in saved_player_state.active_quests:
quest = get_quest(QuestId[active_quest])
game_state.player_state.start_quest(quest)
else:
if hero_start_level > 1:
game_engine.gain_levels(hero_start_level - 1)
if start_money > 0:
game_state.player_state.modify_money(start_money)
# When loading from a savefile a bunch of messages are generated (levelup, learning talents, etc), but they
# are irrelevant, since we're loading an exiting character
self.ui_view.info_message.clear_messages()
# Talent toggle is highlighted when new talents are unlocked, but we don't want it to be highlighted on startup
# when loading from a savefile
self.ui_view.remove_highlight_from_talent_toggle()
# We need to handle the initial state (allocating input keys, updating UI, etc)
game_engine.on_abilities_updated()
if map_file_path == 'resources/maps/challenge.json':
world_behavior = ChallengeBehavior(
self.scene_factory, game_state, self.ui_view.info_message, game_engine, self.flags)
else:
world_behavior = StoryBehavior(self.scene_factory, game_engine, game_state, self.ui_view)
new_hero_was_created = saved_player_state is None
playing_scene = self.scene_factory.playing_scene(
game_state, game_engine, world_behavior, self.ui_view, new_hero_was_created, character_file,
total_time_played_on_character)
return SceneTransition(playing_scene)
def _load_map_and_setup_game_state(self, map_file_path: str, picked_hero_id: HeroId) -> GameState:
map_data = load_map_from_json_file(map_file_path)
game_world = map_data.game_world
game_world.player_entity = create_hero_world_entity(picked_hero_id, map_data.player_position)
enabled_portals = {portal.portal_id: portal.world_entity.sprite for portal in game_world.portals
if portal.is_enabled}
return GameState(game_world=game_world,
camera_size=self.camera_size,
player_state=create_player_state_as_initial(picked_hero_id, enabled_portals),
is_dungeon=False,
player_spawn_position=map_data.player_position)
| [
"[email protected]"
] | |
060b1268ae58049eaebc7151db0af7140d6583bd | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/BANYAN-NW-MIB.py | f988b13520165504fffda4cffd448f7e316bcf83 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 27,336 | py | #
# PySNMP MIB module BANYAN-NW-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BANYAN-NW-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:17:24 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, IpAddress, MibIdentifier, NotificationType, Bits, iso, enterprises, Integer32, Counter64, ObjectIdentity, Counter32, Unsigned32, ModuleIdentity, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "IpAddress", "MibIdentifier", "NotificationType", "Bits", "iso", "enterprises", "Integer32", "Counter64", "ObjectIdentity", "Counter32", "Unsigned32", "ModuleIdentity", "Gauge32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
banyan = MibIdentifier((1, 3, 6, 1, 4, 1, 130))
others = MibIdentifier((1, 3, 6, 1, 4, 1, 130, 2))
netware = MibIdentifier((1, 3, 6, 1, 4, 1, 130, 2, 1))
nwmib1 = MibIdentifier((1, 3, 6, 1, 4, 1, 130, 2, 1, 1))
nwfsinfo = MibIdentifier((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1))
nwperipherals = MibIdentifier((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2))
nwinterfaces = MibIdentifier((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3))
nwprotocols = MibIdentifier((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4))
nwName = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1, 1), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwName.setStatus('mandatory')
nwCompany = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwCompany.setStatus('mandatory')
nwRev = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwRev.setStatus('mandatory')
nwRevDate = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwRevDate.setStatus('mandatory')
nwCopyRight = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1, 5), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwCopyRight.setStatus('mandatory')
nwConnsSupp = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwConnsSupp.setStatus('mandatory')
nwConnsInUse = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwConnsInUse.setStatus('mandatory')
nwPeakConnectionsUsed = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwPeakConnectionsUsed.setStatus('mandatory')
nwMaxVolsSupp = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwMaxVolsSupp.setStatus('mandatory')
nwRevArray = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 1, 10), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwRevArray.setStatus('mandatory')
nwVolNumber = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolNumber.setStatus('mandatory')
nwVolTable = MibTable((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2), )
if mibBuilder.loadTexts: nwVolTable.setStatus('mandatory')
nwVolEntry = MibTableRow((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1), ).setIndexNames((0, "BANYAN-NW-MIB", "nwVolIndex"))
if mibBuilder.loadTexts: nwVolEntry.setStatus('mandatory')
nwVolIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolIndex.setStatus('mandatory')
nwVolName = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolName.setStatus('mandatory')
nwVolDrive = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolDrive.setStatus('mandatory')
nwVolSectorsPerBlk = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolSectorsPerBlk.setStatus('mandatory')
nwVolStartBlk = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolStartBlk.setStatus('mandatory')
nwVolTotalBlks = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolTotalBlks.setStatus('mandatory')
nwVolAvailBlks = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolAvailBlks.setStatus('mandatory')
nwVolTotalDirSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolTotalDirSlots.setStatus('mandatory')
nwVolAvailDirSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolAvailDirSlots.setStatus('mandatory')
nwVolMaxDirSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolMaxDirSlots.setStatus('mandatory')
nwVolHashing = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolHashing.setStatus('mandatory')
nwVolRemovable = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolRemovable.setStatus('mandatory')
nwVolMounted = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolMounted.setStatus('mandatory')
nwVolPurgeBlks = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolPurgeBlks.setStatus('mandatory')
nwVolNotPurgeBlks = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 2, 2, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwVolNotPurgeBlks.setStatus('mandatory')
nwIfNumber = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfNumber.setStatus('mandatory')
nwIfCfgTable = MibTable((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2), )
if mibBuilder.loadTexts: nwIfCfgTable.setStatus('mandatory')
nwIfCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1), ).setIndexNames((0, "BANYAN-NW-MIB", "nwIfCfgBoardNo"))
if mibBuilder.loadTexts: nwIfCfgEntry.setStatus('mandatory')
nwIfCfgSignature = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 1), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgSignature.setStatus('mandatory')
nwIfCfgMajVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMajVer.setStatus('mandatory')
nwIfCfgMinVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMinVer.setStatus('mandatory')
nwIfCfgNodeAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgNodeAddress.setStatus('mandatory')
nwIfCfgModeFlags = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgModeFlags.setStatus('mandatory')
nwIfCfgBoardNo = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgBoardNo.setStatus('mandatory')
nwIfCfgBoardInst = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgBoardInst.setStatus('mandatory')
nwIfCfgMaxDataSz = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMaxDataSz.setStatus('mandatory')
nwIfCfgMaxRcvSz = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMaxRcvSz.setStatus('mandatory')
nwIfCfgRcvSz = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgRcvSz.setStatus('mandatory')
nwIfCfgCardName = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 11), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgCardName.setStatus('mandatory')
nwIfCfgShortName = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 12), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgShortName.setStatus('mandatory')
nwIfCfgMediaType = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 13), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMediaType.setStatus('mandatory')
nwIfCfgCardId = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgCardId.setStatus('mandatory')
nwIfCfgMediaId = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMediaId.setStatus('mandatory')
nwIfCfgTransportTM = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgTransportTM.setStatus('mandatory')
nwIfCfgMlidMajVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMlidMajVer.setStatus('mandatory')
nwIfCfgMlidMinVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMlidMinVer.setStatus('mandatory')
nwIfCfgFlags = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgFlags.setStatus('mandatory')
nwIfCfgSendRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgSendRetries.setStatus('mandatory')
nwIfCfgShareFlags = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgShareFlags.setStatus('mandatory')
nwIfCfgSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgSlot.setStatus('mandatory')
nwIfCfgIoAddr1 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgIoAddr1.setStatus('mandatory')
nwIfCfgIoRange1 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgIoRange1.setStatus('mandatory')
nwIfCfgIoAddr2 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 25), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgIoAddr2.setStatus('mandatory')
nwIfCfgIoRange2 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 26), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgIoRange2.setStatus('mandatory')
nwIfCfgMemAddr1 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 27), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMemAddr1.setStatus('mandatory')
nwIfCfgMemSize1 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 28), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMemSize1.setStatus('mandatory')
nwIfCfgMemAddr2 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 29), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMemAddr2.setStatus('mandatory')
nwIfCfgMemSize2 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 30), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgMemSize2.setStatus('mandatory')
nwIfCfgInt1 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 31), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgInt1.setStatus('mandatory')
nwIfCfgInt2 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 32), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgInt2.setStatus('mandatory')
nwIfCfgDma1 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 33), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgDma1.setStatus('mandatory')
nwIfCfgDma2 = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 2, 1, 34), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCfgDma2.setStatus('mandatory')
nwIfStatsTable = MibTable((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3), )
if mibBuilder.loadTexts: nwIfStatsTable.setStatus('mandatory')
nwIfStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1), ).setIndexNames((0, "BANYAN-NW-MIB", "nwIfStatsBoardNo"))
if mibBuilder.loadTexts: nwIfStatsEntry.setStatus('mandatory')
nwIfStatsMajVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsMajVer.setStatus('mandatory')
nwIfStatsMinVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsMinVer.setStatus('mandatory')
nwIfStatsValidMask = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsValidMask.setStatus('mandatory')
nwIfStatsTotalTxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsTotalTxPkts.setStatus('mandatory')
nwIfStatsTotalRxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsTotalRxPkts.setStatus('mandatory')
nwIfStatsNoAvailEcbs = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsNoAvailEcbs.setStatus('mandatory')
nwIfStatsTxTooBigs = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsTxTooBigs.setStatus('mandatory')
nwIfStatsTxTooSmalls = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsTxTooSmalls.setStatus('mandatory')
nwIfStatsRxOverFlows = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsRxOverFlows.setStatus('mandatory')
nwIfStatsRxTooBigs = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsRxTooBigs.setStatus('mandatory')
nwIfStatsRxTooSmalls = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsRxTooSmalls.setStatus('mandatory')
nwIfStatsTxMiscErrs = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsTxMiscErrs.setStatus('mandatory')
nwIfStatsRxMiscErrs = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsRxMiscErrs.setStatus('mandatory')
nwIfStatsTxRetrys = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsTxRetrys.setStatus('mandatory')
nwIfStatsRxChkSumErrs = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsRxChkSumErrs.setStatus('mandatory')
nwIfStatsRxMismatchs = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsRxMismatchs.setStatus('mandatory')
nwIfStatsBoardNo = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsBoardNo.setStatus('mandatory')
nwIfStatsCustom = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 3, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfStatsCustom.setStatus('mandatory')
nwIfCustomStatsTable = MibTable((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 4), )
if mibBuilder.loadTexts: nwIfCustomStatsTable.setStatus('mandatory')
nwIfCustomStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 4, 1), ).setIndexNames((0, "BANYAN-NW-MIB", "nwIfCustomStatsBoardNo"), (0, "BANYAN-NW-MIB", "nwIfCustomStatsIndex"))
if mibBuilder.loadTexts: nwIfCustomStatsEntry.setStatus('mandatory')
nwIfCustomStatsBoardNo = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCustomStatsBoardNo.setStatus('mandatory')
nwIfCustomStatsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCustomStatsIndex.setStatus('mandatory')
nwIfCustomStatsDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 4, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCustomStatsDescr.setStatus('mandatory')
nwIfCustomStatsValue = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 3, 4, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwIfCustomStatsValue.setStatus('mandatory')
nwProtNumber = MibScalar((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtNumber.setStatus('mandatory')
nwProtCfgTable = MibTable((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 2), )
if mibBuilder.loadTexts: nwProtCfgTable.setStatus('mandatory')
nwProtCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 2, 1), ).setIndexNames((0, "BANYAN-NW-MIB", "nwProtCfgProtNo"))
if mibBuilder.loadTexts: nwProtCfgEntry.setStatus('mandatory')
nwProtCfgProtNo = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCfgProtNo.setStatus('mandatory')
nwProtCfgMajVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCfgMajVer.setStatus('mandatory')
nwProtCfgMinVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCfgMinVer.setStatus('mandatory')
nwProtCfgName = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 2, 1, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCfgName.setStatus('mandatory')
nwProtCfgRegName = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 2, 1, 5), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCfgRegName.setStatus('mandatory')
nwProtCfgStkMajVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCfgStkMajVer.setStatus('mandatory')
nwProtCfgStkMinVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCfgStkMinVer.setStatus('mandatory')
nwProtStatsTable = MibTable((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 3), )
if mibBuilder.loadTexts: nwProtStatsTable.setStatus('mandatory')
nwProtStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 3, 1), ).setIndexNames((0, "BANYAN-NW-MIB", "nwProtStatsProtNo"))
if mibBuilder.loadTexts: nwProtStatsEntry.setStatus('mandatory')
nwProtStatsProtNo = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtStatsProtNo.setStatus('mandatory')
nwProtStatsMajVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtStatsMajVer.setStatus('mandatory')
nwProtStatsMinVer = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtStatsMinVer.setStatus('mandatory')
nwProtStatsValidMask = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 3, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtStatsValidMask.setStatus('mandatory')
nwProtStatsTotalTxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 3, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtStatsTotalTxPkts.setStatus('mandatory')
nwProtStatsTotalRxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 3, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtStatsTotalRxPkts.setStatus('mandatory')
nwProtStatsIgnoredRxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 3, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtStatsIgnoredRxPkts.setStatus('mandatory')
nwProtStatsCustom = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 3, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtStatsCustom.setStatus('mandatory')
nwProtCustomStatsTable = MibTable((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 4), )
if mibBuilder.loadTexts: nwProtCustomStatsTable.setStatus('mandatory')
nwProtCustomStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 4, 1), ).setIndexNames((0, "BANYAN-NW-MIB", "nwProtCustomStatsProtNo"), (0, "BANYAN-NW-MIB", "nwProtCustomStatsIndex"))
if mibBuilder.loadTexts: nwProtCustomStatsEntry.setStatus('mandatory')
nwProtCustomStatsProtNo = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCustomStatsProtNo.setStatus('mandatory')
nwProtCustomStatsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCustomStatsIndex.setStatus('mandatory')
nwProtCustomStatsDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 4, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCustomStatsDescr.setStatus('mandatory')
nwProtCustomStatsValue = MibTableColumn((1, 3, 6, 1, 4, 1, 130, 2, 1, 1, 4, 4, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: nwProtCustomStatsValue.setStatus('mandatory')
mibBuilder.exportSymbols("BANYAN-NW-MIB", nwProtCfgMajVer=nwProtCfgMajVer, nwProtCfgStkMinVer=nwProtCfgStkMinVer, nwIfCfgSignature=nwIfCfgSignature, nwVolNumber=nwVolNumber, nwVolDrive=nwVolDrive, nwIfCfgMemSize1=nwIfCfgMemSize1, others=others, nwIfStatsTxMiscErrs=nwIfStatsTxMiscErrs, nwVolNotPurgeBlks=nwVolNotPurgeBlks, nwVolEntry=nwVolEntry, nwVolIndex=nwVolIndex, nwIfStatsRxChkSumErrs=nwIfStatsRxChkSumErrs, banyan=banyan, nwIfCfgDma1=nwIfCfgDma1, nwIfCfgTransportTM=nwIfCfgTransportTM, nwIfCfgMaxDataSz=nwIfCfgMaxDataSz, nwIfCfgMajVer=nwIfCfgMajVer, nwProtCfgName=nwProtCfgName, nwIfStatsMajVer=nwIfStatsMajVer, nwIfCfgTable=nwIfCfgTable, nwPeakConnectionsUsed=nwPeakConnectionsUsed, nwIfCfgMaxRcvSz=nwIfCfgMaxRcvSz, nwRevDate=nwRevDate, nwProtCfgProtNo=nwProtCfgProtNo, nwVolStartBlk=nwVolStartBlk, nwIfCfgModeFlags=nwIfCfgModeFlags, nwIfCfgInt2=nwIfCfgInt2, nwIfCustomStatsIndex=nwIfCustomStatsIndex, nwVolPurgeBlks=nwVolPurgeBlks, nwIfCfgInt1=nwIfCfgInt1, nwProtStatsMinVer=nwProtStatsMinVer, nwIfStatsRxOverFlows=nwIfStatsRxOverFlows, nwIfCfgDma2=nwIfCfgDma2, nwIfStatsValidMask=nwIfStatsValidMask, nwVolAvailBlks=nwVolAvailBlks, nwIfCfgIoAddr2=nwIfCfgIoAddr2, nwIfStatsTotalRxPkts=nwIfStatsTotalRxPkts, nwIfCustomStatsEntry=nwIfCustomStatsEntry, nwCompany=nwCompany, nwCopyRight=nwCopyRight, nwIfStatsRxTooSmalls=nwIfStatsRxTooSmalls, nwProtCustomStatsProtNo=nwProtCustomStatsProtNo, nwProtCustomStatsEntry=nwProtCustomStatsEntry, nwIfCfgBoardNo=nwIfCfgBoardNo, nwProtStatsValidMask=nwProtStatsValidMask, nwIfCfgMlidMajVer=nwIfCfgMlidMajVer, nwVolMaxDirSlots=nwVolMaxDirSlots, nwProtNumber=nwProtNumber, nwIfStatsTotalTxPkts=nwIfStatsTotalTxPkts, nwIfCfgBoardInst=nwIfCfgBoardInst, nwIfStatsNoAvailEcbs=nwIfStatsNoAvailEcbs, nwIfCfgShareFlags=nwIfCfgShareFlags, nwVolRemovable=nwVolRemovable, nwIfCfgNodeAddress=nwIfCfgNodeAddress, nwConnsSupp=nwConnsSupp, nwRev=nwRev, nwVolAvailDirSlots=nwVolAvailDirSlots, nwProtStatsMajVer=nwProtStatsMajVer, nwConnsInUse=nwConnsInUse, nwIfStatsRxMismatchs=nwIfStatsRxMismatchs, nwIfCfgRcvSz=nwIfCfgRcvSz, nwProtStatsCustom=nwProtStatsCustom, nwIfStatsTxTooSmalls=nwIfStatsTxTooSmalls, nwProtCfgEntry=nwProtCfgEntry, nwIfStatsTable=nwIfStatsTable, nwProtCustomStatsDescr=nwProtCustomStatsDescr, nwName=nwName, nwIfStatsTxRetrys=nwIfStatsTxRetrys, nwIfNumber=nwIfNumber, nwIfCustomStatsBoardNo=nwIfCustomStatsBoardNo, nwProtStatsTotalTxPkts=nwProtStatsTotalTxPkts, nwVolTotalDirSlots=nwVolTotalDirSlots, nwIfCustomStatsValue=nwIfCustomStatsValue, nwIfCfgMemSize2=nwIfCfgMemSize2, nwIfCfgFlags=nwIfCfgFlags, nwProtCfgTable=nwProtCfgTable, nwProtCfgRegName=nwProtCfgRegName, nwProtStatsProtNo=nwProtStatsProtNo, nwIfCfgShortName=nwIfCfgShortName, nwProtCfgStkMajVer=nwProtCfgStkMajVer, nwVolTable=nwVolTable, nwIfCfgCardName=nwIfCfgCardName, nwinterfaces=nwinterfaces, nwProtStatsTable=nwProtStatsTable, nwProtStatsEntry=nwProtStatsEntry, nwProtCustomStatsValue=nwProtCustomStatsValue, nwIfCfgMediaId=nwIfCfgMediaId, nwIfStatsRxTooBigs=nwIfStatsRxTooBigs, nwIfCfgMinVer=nwIfCfgMinVer, nwIfCfgMemAddr1=nwIfCfgMemAddr1, nwMaxVolsSupp=nwMaxVolsSupp, nwIfStatsEntry=nwIfStatsEntry, nwIfCfgMediaType=nwIfCfgMediaType, netware=netware, nwIfCfgMemAddr2=nwIfCfgMemAddr2, nwIfStatsRxMiscErrs=nwIfStatsRxMiscErrs, nwProtStatsIgnoredRxPkts=nwProtStatsIgnoredRxPkts, nwfsinfo=nwfsinfo, nwRevArray=nwRevArray, nwIfCfgMlidMinVer=nwIfCfgMlidMinVer, nwVolMounted=nwVolMounted, nwProtCustomStatsIndex=nwProtCustomStatsIndex, nwProtCustomStatsTable=nwProtCustomStatsTable, nwIfCfgSendRetries=nwIfCfgSendRetries, nwIfCfgSlot=nwIfCfgSlot, nwVolSectorsPerBlk=nwVolSectorsPerBlk, nwVolHashing=nwVolHashing, nwIfCfgIoRange2=nwIfCfgIoRange2, nwProtStatsTotalRxPkts=nwProtStatsTotalRxPkts, nwIfCustomStatsDescr=nwIfCustomStatsDescr, nwmib1=nwmib1, nwIfStatsMinVer=nwIfStatsMinVer, nwProtCfgMinVer=nwProtCfgMinVer, nwIfCfgCardId=nwIfCfgCardId, nwVolName=nwVolName, nwIfCustomStatsTable=nwIfCustomStatsTable, nwIfStatsBoardNo=nwIfStatsBoardNo, nwIfStatsCustom=nwIfStatsCustom, nwprotocols=nwprotocols, nwVolTotalBlks=nwVolTotalBlks, nwIfCfgIoAddr1=nwIfCfgIoAddr1, nwIfCfgIoRange1=nwIfCfgIoRange1, nwperipherals=nwperipherals, nwIfCfgEntry=nwIfCfgEntry, nwIfStatsTxTooBigs=nwIfStatsTxTooBigs)
| [
"[email protected]"
] | |
d237833b720ab49c6906e5708002d2834372ac3d | 9ddfd30620c39fb73ac57e79eae0a001c45db45f | /addons/prisme_contact_enhancement/models/__init__.py | 56357c83a049980c861e401c12364ef855b55a5d | [] | no_license | zamzamintl/silver | a89bacc1ba6a7a59de1a92e3f7c149df0468e185 | 8628e4419c4ee77928c04c1591311707acd2465e | refs/heads/master | 2023-01-06T20:29:25.372314 | 2020-10-29T21:02:41 | 2020-10-29T21:02:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,059 | py | # -*- coding: utf-8 -*-
###########################################################################
#
# Prisme Solutions Informatique SA
# Copyright (c) 2016 Prisme Solutions Informatique SA <http://prisme.ch>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public Lic
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Project ID: OERP-001-01
#
# Modifications:
#
##########################################################################
from . import res_partner_model | [
"[email protected]"
] | |
d6830d4ff16376893003b80808781e3aec0c3bb2 | c104dbd09a853725cb4f4b17df7c5dd59d47e04e | /test/test_modify_alert_policy.py | 83402c1d6893da6593578261001770b0aa9a0dea | [
"Apache-2.0"
] | permissive | bm-lab/opsgenie-python-sdk | 5a64e2c24f1b9168ecadf482ba8084ba27a659fc | 244c4c40ddcc25e70df5ba4425ab8d7c8da59c18 | refs/heads/master | 2021-10-09T03:18:48.101672 | 2018-12-15T01:03:36 | 2018-12-20T15:13:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 902 | py | # coding: utf-8
"""
OpsGenie REST API
OpsGenie OpenAPI Specification # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import opsgenie_swagger
from opsgenie_swagger.models.modify_alert_policy import ModifyAlertPolicy # noqa: E501
from opsgenie_swagger.rest import ApiException
class TestModifyAlertPolicy(unittest.TestCase):
"""ModifyAlertPolicy unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testModifyAlertPolicy(self):
"""Test ModifyAlertPolicy"""
# FIXME: construct object with mandatory attributes with example values
# model = opsgenie_swagger.models.modify_alert_policy.ModifyAlertPolicy() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
402ccdca5869a573340f6de12ef5e4dbbe9e588a | e6f0b705a768229c160a2603393709b4b8a683be | /.history/books/api/views_20210424164436.py | c4d52011855368b33b5770873ed269be0a6587e8 | [] | no_license | walaaElbasha/bookstore_django_basics | 0741e16e3fe69d4460c095bb8afc964f52d96f1d | 2037a434d7bdb0ca1954ca3de9f56655b77ec64e | refs/heads/main | 2023-04-19T07:01:05.416800 | 2021-04-28T15:04:15 | 2021-04-28T15:04:15 | 361,507,660 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,712 | py | from django .shortcuts import render,redirect
from rest_framework.response import Response
from rest_framework import status
from books.models import Book,Isbn
from .serializers import IsbnSerializer
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated,BasePermission
from django.http.response import JsonResponse
from rest_framework.parsers import JSONParser
#from django.contrib.auth.decorators import api_view , permission_classes
#from rest_framework import
# class IsViewer(BasePermission):
# def has_permission(self,request,view):
# return request.user.groups.filter(name="viewers").exists()
@api_view(["POST"])
def api_signup(request):
serializer = UserSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(data={
"success": True,
"message": "User has been registered successfully"
}, status=status.HTTP_201_CREATED)
return Response(data={
"success": False,
"errors": serializer.errors
}, status=status.HTTP_400_BAD_REQUEST)
@api_view(["GET"])
def index(request):
#books = Book.objects.all()
#serializer=BookSerializer(instance=books,many=True)
isbns=Isbn.objects.all()
serializer=IsbnSerializer(instance=isbns,many=True)
return Response(data=serializer.data,status=status.HTTP_200_OK)
@api_view(["POST"])
def create(request):
serializer=IsbnSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(data={
"success":True,
"message":"Book has been created successfully",
},
status=status.HTTP_200_OK,
)
return Response(data={
"success":False,
"errors":serializer.errors
},status=status.HTTP_404_BADREQUEST,
)
@api_view(["DELETE"])
def destroy(request, id):
try:
isbn=Isbn.objects.get(pk=id)
isbn.delete()
return Response(data={
"success":True,
"message":"Book has been deleted successfully",
},
status=status.HTTP_200_OK,
)
except Isbn.DoesNotExist:
return JsonResponse({'message': 'The book does not exist'}, status=status.HTTP_404_NOT_FOUND)
@api_view(["PUT"])
def update(request, id):
isbn=Isbn.objects.get(pk=id)
isbn_data = JSONParser().parse(request)
isbn_serializer = IsbnSerializer(isbn, data=isbn_data)
if isbn_serializer.is_valid():
isbn_serializer.save()
return JsonResponse(isbn_serializer.data)
return JsonResponse(isbn_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
| [
"[email protected]"
] | |
c7d568aa9eace0957fa12ebcbc59a6088b37bf31 | 2572a35d4861b1f25add1bf746474636d30134af | /Regex/matching_character_ranges.py | cd108c0f4e072daa738c993a7d4c6a4164099282 | [] | no_license | gargi13832/Hackerrank-Problems | b1e78f42f3627609ddf08d546da7a84d139eb6f2 | 84c6a03ccfaaf5fef91938d3f566af92cc913a12 | refs/heads/master | 2023-01-04T22:22:40.830760 | 2020-10-29T17:31:17 | 2020-10-29T17:31:17 | 289,316,477 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 70 | py | Regex_Pattern = r'^[a-z][1-9][^a-z][^A-Z][A-Z]' # Do not delete 'r'.
| [
"[email protected]"
] | |
5b3a4cf867926248aefb635b5b895a9b7d33a3f8 | 6a087c6fb00ba91f815f997450306a3fac020a8b | /Test_case/CLGL/test自定义车辆.py | 6a0ed72cd83c09c38f031f3ee7b5841b4c947e2f | [] | no_license | anghu3/xizangbianfang | cd2037af5e06cc558bf3ef9ff145e0c33495139b | f4f35195963017c22bd4875853ef31f280f4b6a8 | refs/heads/master | 2020-03-28T22:31:06.591507 | 2018-12-14T07:36:47 | 2018-12-14T07:36:47 | 149,238,712 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,930 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Jun 21 14:11:17 2018
@author: PCCC
"""
import unittest
from selenium import webdriver
from selenium.webdriver.support.ui import Select
import time
import os
import re
from public_package.pubilc_package import url,login_name,login_name_test,login_password,login_password_test
from public_package.pubilc_package import sheet_setting, search, reset, currMenupath, page_title, goback, saveBtn , sheet_menu,sheet_prompt_message,work_space
from public_package.pubilc_package import TESTCASE
import HTMLTestRunner
import xlrd
'''
用例名称:
用例编号:
用例场景:
用例作者:
'''
xlsfile=work_space+r'\\'+sheet_menu.col_values(6,33,34)[0]
excel = xlrd.open_workbook(xlsfile)
global sheet
sheet = excel.sheet_by_name('自定义车辆')
class TESTCAST_ZIDINGYICHELIANG(TESTCASE):
def setUp(self):
self.dr = webdriver.Chrome()
self.dr.maximize_window()
def tearDown(self):
# print("脚本执行完成")
self.dr.quit()
def login(self, username, password):
self.dr.get(url)
self.dr.find_element_by_id('vv').send_keys(username)
self.dr.find_element_by_xpath('//*[@id="login_ff"]/div[2]/input').send_keys(password)
self.dr.find_element_by_xpath('//*[@id="login_ff"]/a').click()
def zidingyicheliang_search(self):
self.login(login_name, login_password)
self.dr.find_element_by_xpath(sheet_menu.col_values(1,33,34)[0]).click()
time.sleep(2)
self.assertEqual('车辆管理',self.dr.find_element_by_xpath(currMenupath).text,'校验车辆管理菜单')
self.dr.find_element_by_xpath(sheet_menu.col_values(3,33,34)[0]).click()
self.dr.find_element_by_xpath(sheet_menu.col_values(5,33,34)[0]).click()
self.dr.switch_to.frame('iframeb')
time.sleep(3)
self.assertEqual('自定义车辆列表',self.dr.find_element_by_xpath(page_title).text,'校验自定义车辆菜单')
def test01_zidingyicheliang_add(self):
self.zidingyicheliang_search()
add_value_cphm=sheet.col_values(1,0,1)[0]
self.dr.find_element_by_xpath('/html/body/div[3]/div[1]/div[2]/a[2]').click()
self.dr.find_element_by_xpath('//*[@id="vehicleNo"]').send_keys(add_value_cphm)
self.dr.find_element_by_xpath('//*[@id="veForm"]/div[1]/div[2]/a/span').click()
time.sleep(2)
self.dr.find_element_by_xpath('//*[@id="monitorReason"]').send_keys(sheet.col_values(1,2,3)[0])
self.dr.find_element_by_xpath('//*[@id="modifyBy"]').send_keys(sheet.col_values(1,3,4)[0])
self.dr.find_element_by_xpath('//*[@id="monitorUnit"]').click()
time.sleep(1)
self.dr.find_element_by_xpath('//*[@id="treeSelect_45_switch"]').click()
time.sleep(1)
self.dr.find_element_by_xpath('//*[@id="treeSelect_46_switch"]').click()
time.sleep(1)
self.dr.find_element_by_xpath('//*[@id="treeSelect_48_span"]').click()
self.dr.find_element_by_xpath(saveBtn).click()
self.dr.switch_to.default_content()
self.dr.switch_to.frame('iframeb')
time.sleep(1)
self.assertEqual(sheet_prompt_message.col_values(1, 1, 2)[0],
self.dr.find_element_by_xpath('//*[@id="gritter-item-1"]/div[2]/div[2]/p').text, '新增成功提示信息校验')
print('车辆管理-自定义车辆:新增功能正常')
def test02_zidingyicheliang_search_cphm(self):
self.zidingyicheliang_search()
search_value_cphm=sheet.col_values(1,0,1)[0]
cphm_path=sheet.col_values(1,1,2)[0]
self.dr.find_element_by_xpath(cphm_path).send_keys(search_value_cphm)
self.dr.find_element_by_xpath(search).click()
self.dr.switch_to.default_content()
time.sleep(2)
self.dr.switch_to.frame('iframeb')
paginal_number=self.dr.find_element_by_xpath(sheet_setting.col_values(4,1,2)[0]).text
column=3
self.pagination_num(paginal_number,search_value_cphm,column)
self.dr.find_element_by_xpath(reset).click()
time.sleep(2)
self.dr.find_element_by_xpath(search).click()
self.assertEqual('',self.dr.find_element_by_xpath(cphm_path).get_attribute('value'),'车牌号码-重置功能异常')
print('车辆管理-自定义车辆:车牌条件查询功能正常')
def test03_zidingyicheliang_details(self):
self.zidingyicheliang_search()
search_value_cphm = sheet.col_values(1, 0, 1)[0]
cphm_path = sheet.col_values(1, 1, 2)[0]
self.dr.find_element_by_xpath(cphm_path).send_keys(search_value_cphm)
self.dr.find_element_by_xpath(search).click()
self.dr.switch_to.default_content()
time.sleep(2)
self.dr.switch_to.frame('iframeb')
paginal_number = self.dr.find_element_by_xpath(sheet_setting.col_values(4, 1, 2)[0]).text
column = 3
self.pagination_num(paginal_number, search_value_cphm, column)
self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr[1]/td[11]/a').click()
self.assertEqual(sheet.col_values(1,0,1)[0],self.dr.find_element_by_xpath('//*[@id="vehicleNo"]').get_attribute('value'),'详情页面车牌号码校验异常')
print('车辆管理-自定义车辆:详情功能正常')
def test04_zidingyicheliang_edit(self):
self.zidingyicheliang_search()
search_value_cphm = sheet.col_values(1, 0, 1)[0]
cphm_path = sheet.col_values(1, 1, 2)[0]
self.dr.find_element_by_xpath(cphm_path).send_keys(search_value_cphm)
self.dr.find_element_by_xpath(search).click()
self.dr.switch_to.default_content()
time.sleep(2)
self.dr.switch_to.frame('iframeb')
paginal_number = self.dr.find_element_by_xpath(sheet_setting.col_values(4, 1, 2)[0]).text
column = 3
self.pagination_num(paginal_number, search_value_cphm, column)
self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr[1]/td[11]/a').click()
self.dr.find_element_by_xpath('//*[@id="monitorReason"]').send_keys(sheet.col_values(2, 2, 3)[0])
self.dr.find_element_by_xpath('//*[@id="modifyBy"]').send_keys(sheet.col_values(2, 3, 4)[0])
self.dr.find_element_by_xpath(saveBtn).click()
self.dr.switch_to.default_content()
self.dr.switch_to.frame('iframeb')
time.sleep(1)
self.assertEqual(sheet_prompt_message.col_values(1, 1, 2)[0],
self.dr.find_element_by_xpath('//*[@id="gritter-item-1"]/div[2]/div[2]/p').text, '新增成功提示信息校验')
print('车辆管理-自定义车辆:新增功能正常')
def test05_zidingyicheliang_delete(self):
self.zidingyicheliang_search()
search_value_cphm = '藏DK0700'
self.dr.find_element_by_xpath('//*[@id="form"]/div[1]/div/input').send_keys(search_value_cphm)
self.dr.find_element_by_xpath('//*[@id="search"]').click()
self.dr.switch_to.default_content()
time.sleep(2)
self.dr.switch_to.frame('iframeb')
paginal_number = self.dr.find_element_by_xpath('/html/body/div[3]/div[2]/div/div[4]/div[1]/span[1]').text
column = 3
self.pagination_num(paginal_number, search_value_cphm, column)
self.dr.find_element_by_xpath('//*[@id="list"]/thead/tr/th[1]/div[1]/input').click()
self.dr.find_element_by_xpath('/html/body/div[3]/div[1]/div[2]/a[1]').click()
self.dr.switch_to.default_content()
time.sleep(3)
self.dr.find_element_by_xpath('/html/body/div[3]/div[3]/div/button[2]/span').click()
time.sleep(5)
self.dr.switch_to.frame('iframeb')
self.assertEqual('没有找到匹配的记录',self.dr.find_element_by_xpath('//*[@id="list"]/tbody/tr/td').text,'校验删除功能是否正常')
print('车辆管理-自定义车辆:删除功能正常')
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
] | |
da110465a674ab0792226d4ac273b86fc9ed28ac | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03090/s812303792.py | 0fb18fdf310d674ae4f216404ba354a1c71ffbf6 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 905 | py | #!/usr/bin/env python3
#AGC32 B
import sys
import math
from bisect import bisect_right as br
from bisect import bisect_left as bl
sys.setrecursionlimit(1000000000)
from heapq import heappush, heappop,heappushpop
from collections import defaultdict
from itertools import accumulate
from collections import Counter
from collections import deque
from operator import itemgetter
from itertools import permutations
mod = 10**9 + 7
inf = float('inf')
def I(): return int(sys.stdin.readline())
def LI(): return list(map(int,sys.stdin.readline().split()))
n = I()
m = n*(n-1)//2
ans = []
for i in range(n):
for j in range(i+1,n):
ans.append((i+1,j+1))
lst = []
if n % 2:
for i in range(n//2):
lst.append((i+1,n-i-1))
m -= 1
else:
for i in range(n//2):
lst.append((i+1,n-i))
m -= 1
for i,j in lst:
ans.remove((i,j))
print(m)
for i,j in ans:
print(i,j)
| [
"[email protected]"
] | |
c8a81c58a5ba30e56493fc3f9f751e59848f81d9 | fab14fae2b494068aa793901d76464afb965df7e | /benchmarks/f3_wrong_hints/scaling_ltl_timed_transition_system/13-sender_receiver_34.py | 716b7a6f40a03d13a219212d8ffce30ce7cdb74a | [
"MIT"
] | permissive | teodorov/F3 | 673f6f9ccc25acdfdecbfc180f439253474ba250 | c863215c318d7d5f258eb9be38c6962cf6863b52 | refs/heads/master | 2023-08-04T17:37:38.771863 | 2021-09-16T07:38:28 | 2021-09-16T07:38:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,103 | py | from typing import FrozenSet
from collections import Iterable
from math import log, ceil
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or, msat_make_iff
from mathsat import msat_make_leq, msat_make_equal, msat_make_true
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
delta_name = "delta"
def decl_consts(menv: msat_env, name: str, c_type) -> tuple:
assert not name.startswith("_"), name
s = msat_declare_function(menv, name, c_type)
s = msat_make_constant(menv, s)
x_s = msat_declare_function(menv, name_next(name), c_type)
x_s = msat_make_constant(menv, x_s)
return s, x_s
def make_enum(menv, v_name: str, enum_size: int):
bool_type = msat_get_bool_type(menv)
num_bits = ceil(log(enum_size, 2))
b_vars = []
for idx in range(num_bits):
c_name = "{}{}".format(v_name, idx)
b_vars.append(tuple(decl_consts(menv, c_name, bool_type)))
vals = []
x_vals = []
for enum_val in range(enum_size):
bit_val = format(enum_val, '0{}b'.format(num_bits))
assert len(bit_val) == num_bits
assert all(c in {'0', '1'} for c in bit_val)
assign = [b_vars[idx] if c == '1' else
(msat_make_not(menv, b_vars[idx][0]),
msat_make_not(menv, b_vars[idx][1]))
for idx, c in enumerate(reversed(bit_val))]
pred = assign[0][0]
x_pred = assign[0][1]
for it in assign[1:]:
pred = msat_make_and(menv, pred, it[0])
x_pred = msat_make_and(menv, x_pred, it[1])
vals.append(pred)
x_vals.append(x_pred)
assert len(vals) == enum_size
assert len(x_vals) == enum_size
return b_vars, vals, x_vals
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
m_one = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, m_one)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def diverging_symbs(menv: msat_env) -> frozenset:
real_type = msat_get_rational_type(menv)
delta = msat_declare_function(menv, delta_name, real_type)
delta = msat_make_constant(menv, delta)
return frozenset([delta])
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
r2s, x_r2s = decl_consts(menv, "r2s", int_type)
s2r, x_s2r = decl_consts(menv, "s2r", int_type)
delta, x_delta = decl_consts(menv, delta_name, real_type)
sender = Sender("s", menv, enc, r2s, x_r2s, s2r, x_s2r, delta)
receiver = Receiver("r", menv, enc, s2r, x_s2r, r2s, x_r2s, delta)
curr2next = {r2s: x_r2s, s2r: x_s2r, delta: x_delta}
for comp in [sender, receiver]:
for s, x_s in comp.symb2next.items():
curr2next[s] = x_s
zero = msat_make_number(menv, "0")
init = msat_make_and(menv, receiver.init, sender.init)
trans = msat_make_and(menv, receiver.trans, sender.trans)
# invar delta >= 0
init = msat_make_and(menv, init,
msat_make_geq(menv, delta, zero))
trans = msat_make_and(menv, trans,
msat_make_geq(menv, x_delta, zero))
# delta > 0 -> (r2s' = r2s & s2r' = s2r)
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_equal(menv, x_r2s, r2s),
msat_make_equal(menv, x_s2r, s2r))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, lhs, rhs))
# (G F !s.stutter) -> G (s.wait_ack -> F s.send)
lhs = enc.make_G(enc.make_F(msat_make_not(menv, sender.stutter)))
rhs = enc.make_G(msat_make_impl(menv, sender.wait_ack,
enc.make_F(sender.send)))
ltl = msat_make_impl(menv, lhs, rhs)
return TermMap(curr2next), init, trans, ltl
class Module:
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
*args, **kwargs):
self.name = name
self.menv = menv
self.enc = enc
self.symb2next = {}
true = msat_make_true(menv)
self.init = true
self.trans = true
def _symb(self, v_name, v_type):
v_name = "{}_{}".format(self.name, v_name)
return decl_consts(self.menv, v_name, v_type)
def _enum(self, v_name: str, enum_size: int):
c_name = "{}_{}".format(self.name, v_name)
return make_enum(self.menv, c_name, enum_size)
class Sender(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
loc, x_loc = self._symb("l", bool_type)
evt, x_evt = self._symb("evt", bool_type)
msg_id, x_msg_id = self._symb("msg_id", int_type)
timeout, x_timeout = self._symb("timeout", real_type)
c, x_c = self._symb("c", real_type)
self.move = evt
self.stutter = msat_make_not(menv, evt)
self.x_move = x_evt
self.x_stutter = msat_make_not(menv, x_evt)
self.send = loc
self.wait_ack = msat_make_not(menv, loc)
self.x_send = x_loc
self.x_wait_ack = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc, evt: x_evt, msg_id: x_msg_id,
timeout: x_timeout, c: x_c}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
base_timeout = one
# send & c = 0 & msg_id = 0
self.init = msat_make_and(menv,
msat_make_and(menv, self.send,
msat_make_equal(menv, c,
zero)),
msat_make_equal(menv, msg_id, zero))
# invar: wait_ack -> c <= timeout
self.init = msat_make_and(
menv, self.init,
msat_make_impl(menv, self.wait_ack,
msat_make_leq(menv, c, timeout)))
self.trans = msat_make_impl(menv, self.x_wait_ack,
msat_make_leq(menv, x_c, x_timeout))
# delta > 0 | stutter -> l' = l & msg_id' = msg_id & timeout' = timeout &
# c' = c + delta & out_c' = out_c
lhs = msat_make_or(menv, msat_make_gt(menv, delta, zero), self.stutter)
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_msg_id, msg_id)),
msat_make_and(menv,
msat_make_equal(menv, x_timeout, timeout),
msat_make_equal(menv, x_c,
msat_make_plus(menv, c, delta))))
rhs = msat_make_and(menv, rhs,
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
disc_t = msat_make_and(menv, self.move,
msat_make_equal(menv, delta, zero))
# (send & send') ->
# (msg_id' = msg_id & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_send))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id, msg_id),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (send & wait_ack') ->
# (msg_id' = msg_id + 1 & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_wait_ack))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id,
msat_make_plus(menv, msg_id, one)),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (c' = 0 & out_c' = out_c &
# (wait_ack' <-> (in_c != msg_id & c > timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs_iff = msat_make_and(menv,
msat_make_not(menv,
msat_make_equal(menv, in_c,
msg_id)),
msat_make_geq(menv, c, timeout))
rhs_iff = msat_make_iff(menv, self.x_wait_ack, rhs_iff)
rhs = msat_make_and(menv,
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c,
out_c)),
rhs_iff)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & wait_ack') -> (timeout' > timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack,
self.x_wait_ack))
rhs = msat_make_gt(menv, x_timeout, timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (send' <-> (in_c = msg_id & c < timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs = msat_make_iff(menv, self.x_send,
msat_make_and(menv,
msat_make_equal(menv, in_c, msg_id),
msat_make_lt(menv, c, timeout)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & send') -> (timeout' = base_timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack, self.x_send))
rhs = msat_make_equal(menv, x_timeout, base_timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
class Receiver(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
loc, x_loc = self._symb("l", bool_type)
self.wait = loc
self.work = msat_make_not(menv, loc)
self.x_wait = x_loc
self.x_work = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc}
zero = msat_make_number(menv, "0")
# wait
self.init = self.wait
# delta > 0 -> loc' = loc & out_c' = out_c
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_impl(menv, lhs, rhs)
disc_t = msat_make_equal(menv, delta, zero)
# wait -> (wait' <-> in_c = out_c)
lhs = msat_make_and(menv, disc_t, self.wait)
rhs = msat_make_iff(menv, self.x_wait,
msat_make_equal(menv, in_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & wait') -> (out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_wait))
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & work') -> out_c' = in_c
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_work))
rhs = msat_make_equal(menv, x_out_c, in_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# work -> out_c' = out_c
lhs = msat_make_and(menv, disc_t, self.work)
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
delta = mgr.Symbol(delta_name, types.REAL)
r2s = mgr.Symbol("r2s", types.INT)
s2r = mgr.Symbol("r2s", types.INT)
s_l = mgr.Symbol("s_l", types.BOOL)
s_evt = mgr.Symbol("s_evt", types.BOOL)
s_msg_id = mgr.Symbol("s_msg_id", types.INT)
s_timeout = mgr.Symbol("s_timeout", types.REAL)
s_c = mgr.Symbol("s_c", types.REAL)
r_l = mgr.Symbol("r_l", types.BOOL)
symbs = frozenset([delta, r2s, s2r, s_l, s_evt, s_msg_id, s_timeout, s_c,
r_l])
x_delta = symb_to_next(mgr, delta)
x_r2s = symb_to_next(mgr, r2s)
x_s2r = symb_to_next(mgr, s2r)
x_s_l = symb_to_next(mgr, s_l)
x_s_evt = symb_to_next(mgr, s_evt)
x_s_msg_id = symb_to_next(mgr, s_msg_id)
x_s_timeout = symb_to_next(mgr, s_timeout)
x_s_c = symb_to_next(mgr, s_c)
x_r_l = symb_to_next(mgr, r_l)
res = []
r0 = mgr.Real(0)
r1 = mgr.Real(1)
i0 = mgr.Int(0)
i1 = mgr.Int(1)
loc0 = Location(env, mgr.Equals(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, i0))
hint = Hint("h_s2r0", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_l)
loc0.set_progress(0, x_s_l)
hint = Hint("h_s_l0", env, frozenset([s_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_evt)
loc0.set_progress(0, x_s_evt)
hint = Hint("h_s_evt0", env, frozenset([s_evt]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_timeout, r0))
loc0.set_progress(0, mgr.Equals(x_s_timeout, r0))
hint = Hint("h_s_timeout0", env, frozenset([s_timeout]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_c, r0))
loc0.set_progress(0, mgr.Equals(x_s_c, r0))
hint = Hint("h_s_c0", env, frozenset([s_c]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, r_l)
loc0.set_progress(0, x_r_l)
hint = Hint("h_r_l0", env, frozenset([r_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, i1))
hint = Hint("h_s2r1", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(r2s, i0))
loc0.set_progress(0, mgr.Equals(x_r2s, i1))
hint = Hint("h_r2s1", env, frozenset([r2s]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_l)
loc0.set_progress(1, mgr.Not(x_s_l))
loc1 = Location(env, mgr.Not(s_l))
loc1.set_progress(0, x_s_l)
hint = Hint("h_s_l1", env, frozenset([s_l]), symbs)
hint.set_locs([loc0, loc1])
res.append(hint)
loc0 = Location(env, s_evt)
loc0.set_progress(1, mgr.Not(x_s_evt))
loc1 = Location(env, mgr.Not(s_evt))
loc1.set_progress(0, x_s_evt)
hint = Hint("h_s_evt1", env, frozenset([s_evt]), symbs)
hint.set_locs([loc0, loc1])
res.append(hint)
loc0 = Location(env, mgr.GE(s_c, r0))
loc0.set_progress(0, mgr.Equals(x_s_c, mgr.Plus(s_c, r1)))
hint = Hint("h_s_c1", env, frozenset([s_c]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(delta, r0))
loc0.set_progress(0, mgr.Equals(x_delta, mgr.Plus(delta, r1)))
hint = Hint("h_delta2", env, frozenset([delta]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, mgr.Plus(s2r, i1)))
hint = Hint("h_s2r2", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
return frozenset(res)
| [
"[email protected]"
] | |
79a3a476ca30b1e4a997e58c1852b7ece681f724 | afbfb4479c031c4515d623507c3529d019b2506a | /link/json/collection.py | 070cfd0e3a2b277dd4bd2c73129a02fd1bc2979a | [] | no_license | linkdd/link.json | 22e95664635986208a8ce51d6d7d410bb5012f68 | 2169a4252d3393def0e37d3a1aa167dd0b77c730 | refs/heads/master | 2020-04-15T13:38:33.317573 | 2016-09-16T13:50:41 | 2016-09-16T13:50:41 | 58,967,314 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,275 | py | # -*- coding: utf-8 -*-
from b3j0f.conf import Configurable, category, Parameter
from link.json.schema import JsonSchema
from link.json import CONF_BASE_PATH
DEFAULT_SCHEMA = 'http://hyperschema.org/mediatypes/collection-json.json'
@Configurable(
paths='{0}/collection.conf'.format(CONF_BASE_PATH),
conf=category(
'JSONCOLLECTION',
Parameter(name='version', value='1.0'),
Parameter(name='schema', value=DEFAULT_SCHEMA)
)
)
class CollectionJSONResponse(object):
"""
Helper class used to generate valid Collection+JSON objects.
"""
ITEM_ID = 'id'
def __init__(
self,
href,
links=None,
items=None,
queries=None,
template=None,
error=None,
*args, **kwargs
):
"""
:param href: Base URL
:type href: str
:param links: Optional list of links
:type links: list
:param items: Optional list of items
:type items: list
:param queries: Optional list of queries
:type queries: list
:param template: Optional item template
:type template: dict
:param error: Optional error
:type error: dict
"""
super(CollectionJSONResponse, self).__init__(*args, **kwargs)
self.href = href
self.links = links
self.items = items
self.queries = queries
self.template = template
self.error = error
self.validator = JsonSchema()
def json(self):
"""
Generate JSON object.
:return: Collection+JSON object
:rtype: dict
"""
base = {
'collection': {
'version': self.version,
'href': self.href
}
}
if self.links is not None:
base['collection']['links'] = self.links
if self.items is not None:
base['collection']['items'] = self.items
if self.queries is not None:
base['collection']['queries'] = self.queries
if self.template is not None:
base['collection']['template'] = self.template
if self.error is not None:
base['collection']['error'] = self.error
self.validator.validate(self.schema, base)
return base
@staticmethod
def template_from_schema(schema):
tmpl = {
'template': {
'data': []
}
}
if 'properties' in schema:
for propname in schema['properties']:
prop = schema['properties'][propname]
data = {
'name': propname
}
if 'default' in prop:
data['value'] = prop['default']
if 'title' in prop:
data['prompt'] = prop['title']
elif 'description' in prop:
data['prompt'] = prop['description']
tmpl['template']['data'].append(data)
return tmpl
@classmethod
def make_item(cls, href, document, schema=None):
item = {
'href': '{0}/{1}'.format(href, document.get(cls.ITEM_ID, '')),
'data': []
}
if schema is not None and 'links' in schema:
item['links'] = []
for link in schema['links']:
itemlink = {
'href': link['href'].format(**document),
'rel': link['rel']
}
if 'title' in link:
itemlink['name'] = link['title']
if 'description' in link:
itemlink['prompt'] = link['description']
item['links'].append(itemlink)
for key in document:
data = {
'name': key,
'value': document[key]
}
if schema is not None and key in schema.get('properties', {}):
prop = schema['properties'][key]
if 'title' in prop:
data['prompt'] = prop['title']
elif 'description' in prop:
data['prompt'] = prop['description']
item['data'].append(data)
return item
def generate_collection_response(
href,
links=None,
items=None,
queries=None,
schema=None,
error=None
):
"""
Helper instantiating a ``CollectionJSONResponse`` class using the default
schema.
:param href: Base URL
:type href: str
:param links: Optional list of links
:type links: list
:param items: Optional list of items
:type items: list
:param queries: Optional list of queries
:type queries: list
:param schema: Optional item schema
:type schema: dict
:param error: Optional error
:type error: dict
:return: Collection+JSON object
:rtype: dict
"""
resp = CollectionJSONResponse(
href,
links=links,
items=[
CollectionJSONResponse.make_item(href, item, schema=schema)
for item in items
],
queries=queries,
template=CollectionJSONResponse.template_from_schema(schema),
error=error
)
return resp.json()
| [
"[email protected]"
] | |
df99f66d6a4ea213d5897a886fa15861e00d5972 | dcd772f567ef8a8a1173a9f437cd68f211fb9362 | /tests/framework/Samplers/EnsembledSampler/EnsembleForwardTestFunction/funct2.py | 02dee0def8bbab059befbaac1ff1f937f1f2f1a2 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | idaholab/raven | 39cdce98ad916c638399232cdc01a9be00e200a2 | 2b16e7aa3325fe84cab2477947a951414c635381 | refs/heads/devel | 2023-08-31T08:40:16.653099 | 2023-08-29T16:21:51 | 2023-08-29T16:21:51 | 85,989,537 | 201 | 126 | Apache-2.0 | 2023-09-13T21:55:43 | 2017-03-23T19:29:27 | C++ | UTF-8 | Python | false | false | 57 | py | def evaluate(self):
value = 2.0*self.x2
return value
| [
"[email protected]"
] | |
bd5ba5c7452d780e35d39750ff9e32f378d30a5f | 0760fb4901a75766921a205b55686d6d6f049b30 | /python/ray/_private/runtime_env/py_modules.py | 97fccf60addb8d4131a5f5171d938b8c5bdf6255 | [
"MIT",
"BSD-3-Clause",
"Apache-2.0"
] | permissive | ray-project/ray | a4bb6940b08b59a61ef0b8e755a52d8563a2f867 | edba68c3e7cf255d1d6479329f305adb7fa4c3ed | refs/heads/master | 2023-08-31T03:36:48.164405 | 2023-08-31T03:20:38 | 2023-08-31T03:20:38 | 71,932,349 | 29,482 | 5,669 | Apache-2.0 | 2023-09-14T21:48:14 | 2016-10-25T19:38:30 | Python | UTF-8 | Python | false | false | 8,762 | py | import logging
import os
from pathlib import Path
from types import ModuleType
from typing import Any, Dict, List, Optional
from ray._private.runtime_env.conda_utils import exec_cmd_stream_to_logger
from ray._private.runtime_env.context import RuntimeEnvContext
from ray._private.runtime_env.packaging import (
Protocol,
delete_package,
download_and_unpack_package,
get_local_dir_from_uri,
get_uri_for_directory,
get_uri_for_package,
is_whl_uri,
package_exists,
parse_uri,
upload_package_if_needed,
upload_package_to_gcs,
)
from ray._private.runtime_env.plugin import RuntimeEnvPlugin
from ray._private.runtime_env.working_dir import set_pythonpath_in_context
from ray._private.utils import get_directory_size_bytes, try_to_create_directory
from ray.exceptions import RuntimeEnvSetupError
default_logger = logging.getLogger(__name__)
def _check_is_uri(s: str) -> bool:
try:
protocol, path = parse_uri(s)
except ValueError:
protocol, path = None, None
if protocol in Protocol.remote_protocols() and not path.endswith(".zip"):
raise ValueError("Only .zip files supported for remote URIs.")
return protocol is not None
def upload_py_modules_if_needed(
runtime_env: Dict[str, Any],
scratch_dir: Optional[str] = os.getcwd(),
logger: Optional[logging.Logger] = default_logger,
upload_fn=None,
) -> Dict[str, Any]:
"""Uploads the entries in py_modules and replaces them with a list of URIs.
For each entry that is already a URI, this is a no-op.
"""
py_modules = runtime_env.get("py_modules")
if py_modules is None:
return runtime_env
if not isinstance(py_modules, list):
raise TypeError(
"py_modules must be a List of local paths, imported modules, or "
f"URIs, got {type(py_modules)}."
)
py_modules_uris = []
for module in py_modules:
if isinstance(module, str):
# module_path is a local path or a URI.
module_path = module
elif isinstance(module, Path):
module_path = str(module)
elif isinstance(module, ModuleType):
# NOTE(edoakes): Python allows some installed Python packages to
# be split into multiple directories. We could probably handle
# this, but it seems tricky & uncommon. If it's a problem for
# users, we can add this support on demand.
if len(module.__path__) > 1:
raise ValueError(
"py_modules only supports modules whose __path__ has length 1."
)
[module_path] = module.__path__
else:
raise TypeError(
"py_modules must be a list of file paths, URIs, "
f"or imported modules, got {type(module)}."
)
if _check_is_uri(module_path):
module_uri = module_path
else:
# module_path is a local path.
if Path(module_path).is_dir():
excludes = runtime_env.get("excludes", None)
module_uri = get_uri_for_directory(module_path, excludes=excludes)
if upload_fn is None:
try:
upload_package_if_needed(
module_uri,
scratch_dir,
module_path,
excludes=excludes,
include_parent_dir=True,
logger=logger,
)
except Exception as e:
raise RuntimeEnvSetupError(
f"Failed to upload module {module_path} to the Ray "
f"cluster: {e}"
) from e
else:
upload_fn(module_path, excludes=excludes)
elif Path(module_path).suffix == ".whl":
module_uri = get_uri_for_package(Path(module_path))
if upload_fn is None:
if not package_exists(module_uri):
try:
upload_package_to_gcs(
module_uri, Path(module_path).read_bytes()
)
except Exception as e:
raise RuntimeEnvSetupError(
f"Failed to upload {module_path} to the Ray "
f"cluster: {e}"
) from e
else:
upload_fn(module_path, excludes=None, is_file=True)
else:
raise ValueError(
"py_modules entry must be a directory or a .whl file; "
f"got {module_path}"
)
py_modules_uris.append(module_uri)
# TODO(architkulkarni): Expose a single URI for py_modules. This plugin
# should internally handle the "sub-URIs", the individual modules.
runtime_env["py_modules"] = py_modules_uris
return runtime_env
class PyModulesPlugin(RuntimeEnvPlugin):
name = "py_modules"
def __init__(
self, resources_dir: str, gcs_aio_client: "GcsAioClient" # noqa: F821
):
self._resources_dir = os.path.join(resources_dir, "py_modules_files")
self._gcs_aio_client = gcs_aio_client
try_to_create_directory(self._resources_dir)
def _get_local_dir_from_uri(self, uri: str):
return get_local_dir_from_uri(uri, self._resources_dir)
def delete_uri(
self, uri: str, logger: Optional[logging.Logger] = default_logger
) -> int:
"""Delete URI and return the number of bytes deleted."""
local_dir = get_local_dir_from_uri(uri, self._resources_dir)
local_dir_size = get_directory_size_bytes(local_dir)
deleted = delete_package(uri, self._resources_dir)
if not deleted:
logger.warning(f"Tried to delete nonexistent URI: {uri}.")
return 0
return local_dir_size
def get_uris(self, runtime_env: dict) -> List[str]:
return runtime_env.py_modules()
async def _download_and_install_wheel(
self, uri: str, logger: Optional[logging.Logger] = default_logger
):
"""Download and install a wheel URI, and then delete the local wheel file."""
wheel_file = await download_and_unpack_package(
uri, self._resources_dir, self._gcs_aio_client, logger=logger
)
module_dir = self._get_local_dir_from_uri(uri)
pip_install_cmd = [
"pip",
"install",
wheel_file,
f"--target={module_dir}",
]
logger.info(
"Running py_modules wheel install command: %s", str(pip_install_cmd)
)
try:
# TODO(architkulkarni): Use `await check_output_cmd` or similar.
exit_code, output = exec_cmd_stream_to_logger(pip_install_cmd, logger)
finally:
if Path(wheel_file).exists():
Path(wheel_file).unlink()
if exit_code != 0:
if Path(module_dir).exists():
Path(module_dir).unlink()
raise RuntimeError(
f"Failed to install py_modules wheel {wheel_file}"
f"to {module_dir}:\n{output}"
)
return module_dir
async def create(
self,
uri: str,
runtime_env: "RuntimeEnv", # noqa: F821
context: RuntimeEnvContext,
logger: Optional[logging.Logger] = default_logger,
) -> int:
if is_whl_uri(uri):
module_dir = await self._download_and_install_wheel(uri=uri, logger=logger)
else:
module_dir = await download_and_unpack_package(
uri, self._resources_dir, self._gcs_aio_client, logger=logger
)
return get_directory_size_bytes(module_dir)
def modify_context(
self,
uris: List[str],
runtime_env_dict: Dict,
context: RuntimeEnvContext,
logger: Optional[logging.Logger] = default_logger,
):
module_dirs = []
for uri in uris:
module_dir = self._get_local_dir_from_uri(uri)
if not module_dir.exists():
raise ValueError(
f"Local directory {module_dir} for URI {uri} does "
"not exist on the cluster. Something may have gone wrong while "
"downloading, unpacking or installing the py_modules files."
)
module_dirs.append(str(module_dir))
set_pythonpath_in_context(os.pathsep.join(module_dirs), context)
| [
"[email protected]"
] | |
1f7723168e6a49bcdc616eed2a91fc7a05d1b3d3 | f3057f554643bd68ccf5fb34966f8233c57c600f | /djmodels/blog/migrations/0007_auto_20171213_1357.py | 0f340f6c17444af0f3f8de61a33fd8e29ffcc8c9 | [] | no_license | travishen/djcore | e917eeefc0d8a985fa083bbaf6b426ad4af31dcb | e611d82797abd04f5909b809e6c4debb46eece11 | refs/heads/master | 2021-09-03T02:52:38.336089 | 2018-01-05T02:24:16 | 2018-01-05T02:24:16 | 113,840,771 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 716 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-12-13 13:57
from __future__ import unicode_literals
import blog.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0006_auto_20171213_1225'),
]
operations = [
migrations.AddField(
model_name='postmodel',
name='slug',
field=models.SlugField(blank=True, null=True),
),
migrations.AlterField(
model_name='postmodel',
name='author_email',
field=models.CharField(blank=True, max_length=240, null=True, validators=[blog.validators.validate_author_email]),
),
]
| [
"[email protected]"
] | |
d2da3d3a0bb26f5782b9c140d847572bc7cd8ec0 | fc0a6e0f9ffa90a2473fec77bc52ea02e9b21f55 | /venv/lib/python3.7/site-packages/akshare/qhkc/funcs.py | ef5311e4c8f7909a2359e139fe2ab25f1364655b | [] | no_license | YixuanSeanZhou/COVID19_Scraping | 3903e697caf406c7d357afd8cc43811d62896244 | b84890c4a5ddef589cd76d1ed8fd4a1976f4e3c4 | refs/heads/master | 2022-09-08T16:14:33.292096 | 2020-05-23T04:26:02 | 2020-05-23T04:26:02 | 266,261,823 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,527 | py | # -*- coding:utf-8 -*-
# /usr/bin/env python
"""
Author: Albert King
date: 2020/2/13 23:11
contact: [email protected]
desc: 可用函数库 --> client.py --> DataApi
"""
class QhkcFunctions:
@staticmethod
def variety_positions(fields="shorts", code="rb1810", date="2018-08-08"):
"""
奇货可查-商品-持仓数据接口
:param fields: 需要返回的字段, shorts or longs
:type fields: str
:param code: 合约代号
:type code: str
:param date: 查询日期
:type date: str
:return: 商品-持仓数据
:rtype: pandas.DataFrame
broker string 席位
long int 该席位多头持仓量
long_chge int 该席位多头持仓变化量
short int 该席位空头持仓量
short_chge int 该席位空头持仓变化量
"""
pass
@staticmethod
def variety_net_positions(fields="", symbol="RB", broker="永安期货", date="2018-08-08"):
"""
奇货可查-商品-商品净持仓数据接口
:param fields: 需要返回的字段
:type fields: str
:param symbol: 查询品种编码
:type symbol: str
:param broker: 席位
:type broker: str
:param date: 查询日期
:type date: str
:return: 商品-商品净持仓数据
:rtype: dict
trans_date date 查询日期
net_position int 净持仓数据
"""
pass
| [
"[email protected]"
] | |
2f5013ac6fd73da64e153628614dfa276fd04d3b | 48a36fddd9e7c584a9792533c11601f0e4619885 | /torchvision/edgeailite/xvision/losses/flow_loss.py | d3d238fa87d39813f0da26d516e3e186af56441d | [
"MIT",
"BSD-2-Clause-Views",
"BSD-3-Clause"
] | permissive | supermy00/edgeai-torchvision | 8c152e796590ae5f6ae4f6948cbfb132409506a0 | 29b02c32b26ea8c0c319a376ab8a9a1b9ada25b5 | refs/heads/master | 2023-09-02T02:36:47.068701 | 2021-11-17T10:08:17 | 2021-11-17T10:08:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,504 | py | #################################################################################
# Copyright (c) 2018-2021, Texas Instruments Incorporated - http://www.ti.com
# All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#################################################################################
import torch
from .basic_loss import *
from .loss_utils import *
__all__ = [
'end_point_error', 'end_point_loss', 'outlier_fraction', 'outlier_precentage'
]
############################################################################
class EPError(BasicNormLossModule):
def __init__(self, sparse=False, error_fn=l2_norm, error_name='EPError'):
super().__init__(sparse=sparse, error_fn=error_fn, error_name=error_name)
end_point_error = EPError
end_point_loss = EPError
############################################################################
def outlier_check(prediction, target, absolute_thr=3.0, relative_thr=0.05):
norm_dist = l2_norm(prediction, target)
norm_pred = l2_norm(prediction)
norm_target = l2_norm(target)
eps_arr = (norm_target == 0).float() * (1e-6) # To avoid division by zero.
rel_dist = norm_pred / (norm_target + eps_arr)
is_outlier = ((norm_dist > absolute_thr) & (rel_dist > relative_thr)).float()
return is_outlier
def outlier_check_x100(opt, target, absolute_thr=3.0, relative_thr=0.05):
return outlier_check(opt, target, absolute_thr, relative_thr) * 100.0
class OutlierFraction(BasicNormLossModule):
def __init__(self, sparse=False, error_fn=outlier_check, error_name='OutlierFraction'):
super().__init__(sparse=sparse, error_fn=error_fn, error_name=error_name)
#
outlier_fraction = OutlierFraction
class OutlierPercentage(BasicNormLossModule):
def __init__(self, sparse=False, error_fn=outlier_check_x100, error_name='OutlierPercentage'):
super().__init__(sparse=sparse, error_fn=error_fn, error_name=error_name)
#
outlier_precentage = OutlierPercentage
############################################################################
| [
"a0393608@udtensorlab5"
] | a0393608@udtensorlab5 |
5a1cfbe5e8072892a669b1003898b436401022a5 | a807ce0fa3e3e9c3b558b2e977c05e60c3a667b1 | /nemo_text_processing/inverse_text_normalization/en/verbalizers/cardinal.py | bd053bebf7645a4fbb554dfb082d991b1be9c614 | [
"Apache-2.0"
] | permissive | blisc/NeMo | 630376e7555c0face994da2f6f9af5d8d31243c3 | fadeb45c84d6b323d78e30475538455a88b7c151 | refs/heads/rework_reqs | 2023-08-17T00:03:39.248669 | 2021-08-12T15:15:06 | 2021-08-12T15:15:06 | 208,142,160 | 2 | 0 | Apache-2.0 | 2022-02-03T16:30:33 | 2019-09-12T20:37:24 | Jupyter Notebook | UTF-8 | Python | false | false | 1,812 | py | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
# Copyright 2015 and onwards Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nemo_text_processing.text_normalization.en.graph_utils import NEMO_NOT_QUOTE, GraphFst, delete_space
try:
import pynini
from pynini.lib import pynutil
PYNINI_AVAILABLE = True
except (ModuleNotFoundError, ImportError):
PYNINI_AVAILABLE = False
class CardinalFst(GraphFst):
"""
Finite state transducer for verbalizing cardinal
e.g. cardinal { integer: "23" negative: "-" } -> -23
"""
def __init__(self):
super().__init__(name="cardinal", kind="verbalize")
optional_sign = pynini.closure(
pynutil.delete("negative:")
+ delete_space
+ pynutil.delete("\"")
+ NEMO_NOT_QUOTE
+ pynutil.delete("\"")
+ delete_space,
0,
1,
)
graph = (
pynutil.delete("integer:")
+ delete_space
+ pynutil.delete("\"")
+ pynini.closure(NEMO_NOT_QUOTE, 1)
+ pynutil.delete("\"")
)
self.numbers = graph
graph = optional_sign + graph
delete_tokens = self.delete_tokens(graph)
self.fst = delete_tokens.optimize()
| [
"[email protected]"
] | |
ca75f05cd80acb75911622ecaa2e8e98c607b2fa | 1213bcf770a94a89b39be8dc7b99a3f7e35fd369 | /src/alloy/backend/x64/optimizer/passes/sources.gypi | 6d80257ca4d2836c37d094b89bcaa73a046bbcae | [] | no_license | wtfaremyinitials/xenia | c86e4625a1dd084d97d44c3242e2faf208bca2b8 | 16b3ecd5897051f82bc236ad9a4d0ab5cab22e87 | refs/heads/master | 2020-12-31T02:43:53.168712 | 2014-01-14T22:06:05 | 2014-01-14T22:06:05 | 15,918,955 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | gypi | # Copyright 2013 Ben Vanik. All Rights Reserved.
{
'sources': [
'reachability_pass.cc',
'reachability_pass.h',
'redundant_mov_pass.cc',
'redundant_mov_pass.h',
'register_allocation_pass.cc',
'register_allocation_pass.h',
],
}
| [
"[email protected]"
] | |
fc52d8ec81fe4b307261bd7e11d50deb5d97ee67 | 9ba00eb872099e6fe69c4e6b3561b0578cc09ca4 | /exoplanets.py | cc3e8a16376dfb30a79845c999770f1eb685ffd8 | [] | no_license | RuthAngus/exoplanet_travel | 4ab56ee3adfaaeca009e6997706f42091d634c01 | b7248030e8713811f65239d5745fbd9493dcfd58 | refs/heads/master | 2020-12-24T13:28:35.234907 | 2014-07-01T16:14:24 | 2014-07-01T16:14:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,548 | py | from exoplanet_pictograms import plot_exoplanet, plot_name, remove_space
from flask import Flask, url_for
from flask import render_template
import numpy as np
import kplr
import csv
app = Flask(__name__)
@app.route('/')
def index(name=None, text=None):
name = nm()
print name
text1 = p_text(name)
text2 = d_text(name)
text3 = price(name)
return render_template('index.html', name=name, p_text=text1, d_text=text2, cut_name=remove_space(name), \
dist = text3)
@app.route('/image/<name>')
def image(name):
return plot_name(name)
def nm():
data = np.genfromtxt("/users/angusr/python/exoplanet_travel/transit_data.txt", \
dtype=str, delimiter=',', skip_header=2).t
# select Kepler planets only
r = np.random.randint(0, len(data[0][0]))
name = data[0]
while str(name)[0] != 'K':
r = np.random.randint(0, len(data[0][0]))
name = data[0]
return name[r]
def p_text(name):
data = np.genfromtxt("/Users/angusr/Python/exoplanet_travel/transit_data.txt", \
dtype=str, delimiter=',', skip_header=2).T
n = data[0]
l = n == name
periods = data[3]
a = float(data[2][l][0])#/1.48e-11
p = float(periods[l][0])
print p
if p < 20:
tstar = 5000.
albedo = .5
rstar = 1.
teq = int(600.)
# teq = tstar*(1-albedo)**.25 * (rstar/(2*a))**.5
if p < 20:
r = np.random.randint(0,2)
if r == 0:
return "It's time to turn up the heat! With surface temperatures in excess of %s C, \
this planet is a scorcher"%teq
elif r == 1:
return "Love sunbathing? On this planet its so hot that even a moment of exposure will incinerate \
you. High factor Sun scream required."
elif r == 2:
return "Enjoy long summer evenings and 1000 degree days? You'll need to spend them all in a \
protective lead case, but they'll probably still be enjoyable."
else:
r = np.random.randint(0,1)
if r == 0:
return "This is a cool place to be - too cool in fact. At -100 degrees you'll need to \
take that extra layer for sure."
else:
return "Might want to pack an extra jumper."
def d_text(name):
data = np.genfromtxt("/Users/angusr/Python/exoplanet_travel/transit_data.txt", \
dtype=str, delimiter=',', skip_header=2).T
n = data[0]
l = n == name
name = data[0][l][0]
mass = float(data[1][l][0])
radius = 1.
try:
radius = float(data[10][l][0])
except:
pass
print mass, radius
d = mass/(4*radius)**3
r = np.random.randint(0,1)
# low g
if d > .5:
if r == 0:
return "If things are getting a little 'heavy' back home, you'll feel lighter \
than air on this low-g planet"
else:
return "One of the big pulls for %s is its gravitational field. At... Gs \
you'll come back feeling like Superman" %name
# high g
if d < .5:
if r == 0:
return "There are many attractions on this planet, but gravity isn't one of them. Its \
gravitational field is a mere 50 percent of the Earth's so you'll feel \
like you're floating the whole time"
else:
return "This is the perfect place to lose a few pounds. In fact you'll only weigh 0.1 Kg due to its \
low gravity"
def nm():
data = np.genfromtxt("/Users/angusr/Python/exoplanet_travel/transit_data.txt", \
dtype=str, delimiter=',', skip_header=2).T
r = np.random.randint(0,len(data[0][0]))
name = data[0]
return name[r]
def price(name):
distances = (500, 300, 100, 600, 1000, 10)
# 12 litres per km
# 2.82 dollars per gallon
# 4.54 litres per gallon
# .62 dollars per litre
# 7.44 dollars per km
# 4.3896 GBP per km
# 1.317e+17 gbp per parsec
r = np.random.randint(0, len(distances))
cost = 1.317e+17 * distances[r]
# stringy = str(cost)
# z = str[-2:]
r = np.random.randint(0,3)
if r == 0:
return "Only %s GBP!*" %cost
elif r == 1:
return "Special offer! %s GBP!*" %cost
elif r == 2:
return "%s GBP TODAY ONLY*" %cost
elif r == 3:
return "Only 2 seats left at %s GBP*" %cost
if __name__ == '__main__':
# name = nm()
# d_text(name)
# raw_input('enter')
app.run(debug=True)
| [
"[email protected]"
] | |
00061ee28548b2c6b4aefaa5471d754a09c8788d | 653a3d9d66f3d359083cb588fc7c9ece8bb48417 | /src/graph_transpiler/webdnn/backend/fallback/kernels/cosh.py | ceb89aa2948de8ed8a485af2566d587d6dfce987 | [
"Zlib",
"MIT"
] | permissive | leonskim/webdnn | fec510254b15f3dec00f5bed8f498737b372e470 | f97c798c9a659fe953f9dc8c8537b8917e4be7a2 | refs/heads/master | 2020-04-15T18:42:43.632244 | 2019-01-10T10:07:18 | 2019-01-10T10:07:18 | 164,921,764 | 0 | 0 | NOASSERTION | 2019-01-09T19:07:35 | 2019-01-09T19:07:30 | Python | UTF-8 | Python | false | false | 186 | py | from webdnn.backend.fallback.kernels.elementwise import register_elementwise_kernel
from webdnn.graph.operators.cosh import Cosh
register_elementwise_kernel(Cosh, "y = Math.cosh(x0);")
| [
"[email protected]"
] | |
9edbd7dd5d1dc89004fd5abd46398875aab00e34 | 1afa6c852dfc922d1a26a384d965976f31a87692 | /Imaging/Core/Testing/Python/TestSeparableFilter.py | da6b137d7329c12a4b1484262e2fe32f6bd2f967 | [
"BSD-3-Clause"
] | permissive | dgobbi/VTK | 631d037aacc7258861e70f77c586b01cd4ebff3f | 17f232ee440025c26bc78a897edef78e9fc78510 | refs/heads/master | 2021-01-04T22:27:46.611907 | 2013-03-01T19:44:02 | 2013-03-01T19:44:02 | 938,377 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,282 | py | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Image pipeline
reader = vtk.vtkPNGReader()
reader.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/fullhead15.png")
# Take the gradient in X, and smooth in Y
# Create a simple gradient filter
kernel = vtk.vtkFloatArray()
kernel.SetNumberOfTuples(3)
kernel.InsertValue(0,-1)
kernel.InsertValue(1,0)
kernel.InsertValue(2,1)
# Create a gaussian for Y
sigma = 1.5
sigma2 = expr.expr(globals(), locals(),["sigma","*","sigma"])
gaussian = vtk.vtkFloatArray()
gaussian.SetNumberOfTuples(31)
i = 0
while i < 31:
x = expr.expr(globals(), locals(),["i","-","15"])
g = expr.expr(globals(), locals(),["exp","(","-","(","x","*","x",")","/","(","2.0","*","sigma2",")",")","/","(","sqrt","(","2.0","*","3.1415",")","*","sigma",")"])
gaussian.InsertValue(i,g)
i = i + 1
convolve = vtk.vtkImageSeparableConvolution()
convolve.SetInputConnection(reader.GetOutputPort())
convolve.SetDimensionality(2)
convolve.SetXKernel(kernel)
convolve.SetYKernel(gaussian)
viewer = vtk.vtkImageViewer()
#viewer DebugOn
viewer.SetInputConnection(convolve.GetOutputPort())
viewer.SetColorWindow(500)
viewer.SetColorLevel(100)
viewer.Render()
# --- end of script --
| [
"[email protected]"
] | |
a2face64492994c1d89d87de17f1735956e91868 | 1f214956382f62e876de3d00c40c5a8684a3b5df | /preprocessing/tests/test_mysql.py | ef73d63c507114b639af108327ad9518ec0fd887 | [
"MIT"
] | permissive | ennima/omas | e1f00e0d3445f995d36b221c43ab47113750aeee | c8507b95c8c07a311c29c70acc0a7d3504d28f78 | refs/heads/master | 2021-01-18T18:11:49.027451 | 2016-10-14T19:28:14 | 2016-10-14T19:28:14 | 56,949,079 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,041 | py | import sys
import unittest
sys.path.append('../')
sys.path.append('../db')
from MysqlProcessing import *
from pprint import pprint
class MysqlProcessingTest(unittest.TestCase):
# def test_it_should_be_able_to_construct(self):
# mysql = MysqlProcessing()
# self.assertIsInstance(mysql,MysqlProcessing,"Es instancia")
# def test_it_should_be_able_to_load(self):
# mysql = MysqlProcessing()
# self.assertTrue(mysql.load("../db/db.json"))
# #pprint(mysql.data)
def test_making_a_table(self):
#Global path is for find compiler files.
mysql = MysqlProcessing("../db/")
mysql.publish_path = "C:\\Users\\enrique.nieto\\Documents\\develops\\omas\\build_test\\db\\"
mysql.prettyfy = False
mysql.publish_single_file = True
mysql.publsh_to_file = True
mysql.db_mysql_engine = "INNODB"
# mysql.db_charset_latin = False
#mysql.process_create = False
#This is a specific path not global
mysql.load("../db/db.json")
mysql.process()
print(mysql.tables)
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
] | |
3435d0e6edaab0672cac56775147c66023890e1f | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_75/288.py | 5d27fe84e3a6db4dda7c09565446712b904e8b3e | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,467 | py | # coding:utf-8
import sys
argvs = sys.argv
argc = len(argvs)
#print argvs
#print argc
f = open(argvs[1])
T = f.readline()
for i in range(int(T)):
comb_rules = []
oppo_rules = []
line = f.readline()
tc = line.split(' ')
C = int(tc[0]) # 変換ルールの個数
for j in range(C):
temp = tc[j+1]
t_l = [[temp[0], temp[1]], temp[2]]
comb_rules.append(t_l)
t_l = [[temp[1], temp[0]], temp[2]]
comb_rules.append(t_l)
D = int(tc[C+1]) # 反発ルールの数
for j in range(D):
temp = tc[j + C + 2]
t_l = [temp[0], temp[1]]
oppo_rules.append(t_l)
t_l = [temp[1], temp[0]]
oppo_rules.append(t_l)
N = tc[C+D+2] # 文字数
S = tc[C+D+3] # 文字列
ls = []
for j in range(int(N)):
temp = S[j]
for l in comb_rules:
if l[0][0] == temp and len(ls) > 0 and l[0][1] == ls[len(ls)-1]:
ls.pop()
temp = l[1]
chk = 0
for l in oppo_rules:
lss = set(ls)
if l[0] == temp and l[1] in lss:
ls = []
chk = 1
if chk == 0:
ls.append(temp)
ans_s = str(ls)
sys.stdout.write("Case #")
sys.stdout.write(str(i+1))
sys.stdout.write(": ")
sys.stdout.write(ans_s.replace("'",""))
sys.stdout.write("\n")
# print ls
f.close
| [
"[email protected]"
] | |
0f1efd38304a5382f434bb66d79a51c0c7e7975c | b521802cca8e4ee4ff5a5ffe59175a34f2f6d763 | /maya/maya-utils/Scripts/Animation/2019-2-15 Tim Cam_Route_Manager/.history/Cam_Main/Cam_Main/Cam_Attribute_Panel_20190119093039.py | 087baf065ed52ddf41a02765c0e1351af2098e5e | [] | no_license | all-in-one-of/I-Do-library | 2edf68b29558728ce53fe17168694ad0353a076e | 8972ebdcf1430ccc207028d8482210092acf02ce | refs/heads/master | 2021-01-04T06:58:57.871216 | 2019-12-16T04:52:20 | 2019-12-16T04:52:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,514 | py | # -*- coding:utf-8 -*-
# Require Header
import os
import json
from functools import partial
# Sys Header
import sys
import traceback
import subprocess
import plugin.Qt as Qt
from Qt.QtCore import *
from Qt.QtGui import *
from Qt.QtWidgets import *
from maya import cmds
def loadUiType(uiFile):
import plugin.Qt as Qt
if Qt.__binding__.startswith('PyQt'):
from Qt import _uic as uic
return uic.loadUiType(uiFile)
elif Qt.__binding__ == 'PySide':
import pysideuic as uic
else:
import pyside2uic as uic
import xml.etree.ElementTree as xml
from cStringIO import StringIO
parsed = xml.parse(uiFile)
widget_class = parsed.find('widget').get('class')
form_class = parsed.find('class').text
with open(uiFile, 'r') as f:
o = StringIO()
frame = {}
uic.compileUi(f, o, indent=0)
pyc = compile(o.getvalue(), '<string>', 'exec')
exec pyc in frame
# Fetch the base_class and form class based on their type
# in the xml from designer
form_class = frame['Ui_%s'%form_class]
base_class = eval('%s'%widget_class)
return form_class, base_class
from Qt.QtCompat import wrapInstance
DIR = os.path.dirname(__file__)
UI_PATH = os.path.join(DIR,"ui","Cam_Attrubte_Panel.ui")
GUI_STATE_PATH = os.path.join(DIR, "json" ,'GUI_STATE.json')
form_class , base_class = loadUiType(UI_PATH)
class Cam_Attribute_Panel(form_class,base_class):
def __init__(self,MainWindow):
super(Cam_Attribute_Panel,self).__init__()
self.setupUi(self)
self.MainWindow = MainWindow
self.Current_Item = None
# Note - 功能函数
self.Position_BTN.clicked.connect(self.Position_Fn)
self.Keyframe_BTN.clicked.connect(self.Keyframe_Fn)
# Note - 动画切换效果
self.Cam_Input_Toggle_Anim = QPropertyAnimation(self.Cam_Input_Layout, b"maximumHeight")
self.Cam_Input_Toggle_Anim.setDuration(300)
self.Cam_Input_Toggle_Anim.setStartValue(0)
self.Cam_Input_Toggle_Anim.setEndValue(self.Cam_Input_Layout.sizeHint().height())
self.Cam_Input_Toggle_Check = False
self.Cam_Input_Toggle.clicked.connect(self.Cam_Input_Toggle_Fn)
self.Cam_Output_Toggle_Anim = QPropertyAnimation(self.Cam_Output_Layout, b"maximumHeight")
self.Cam_Output_Toggle_Anim.setDuration(300)
self.Cam_Output_Toggle_Anim.setStartValue(0)
self.Cam_Output_Toggle_Anim.setEndValue(self.Cam_Output_Layout.sizeHint().height())
self.Cam_Output_Toggle_Check = False
self.Cam_Output_Toggle.clicked.connect(self.Cam_Output_Toggle_Fn)
# Note - 选择功能函数
self.Add_CamGrp_Get.setVisible(False)
self.Add_CamGrp_Pick.clicked.connect(self.Add_CamGrp_Pick_Fun)
self.Add_Crv_Get.setVisible(False)
self.Add_Crv_Pick.clicked.connect(self.Add_Crv_Pick_Fun)
self.Add_Loc_Get.setVisible(False)
self.Add_Loc_Pick.clicked.connect(self.Add_Loc_Pick_Fun)
self.Add_Motion_Path_Get.setVisible(False)
self.Add_Motion_Path_Pick.clicked.connect(self.Add_Motion_Path_Pick_Fun)
# Note - SpinBox
self.Strat_Time_SB.valueChanged.connect(self.Strat_Time_SB_Fn)
self.End_Time_SB.valueChanged.connect(self.End_Time_SB_Fn)
def Strat_Time_SB_Fn(self):
self.Current_Item.Attr["Strat_Time_SB"] = self.Strat_Time_SB.value()
self.MainWindow.Save_Json_Fun()
def End_Time_SB_Fn(self):
self.Current_Item.Attr["End_Time_SB"] = self.End_Time_SB.value()
self.MainWindow.Save_Json_Fun()
def Check_Selection(self):
"""
Check_Selection
# Note 检查是否选择好所有的东西
"""
Check_List=[
self.Add_Crv_LE.text() != "",
self.Add_Loc_LE.text() != "",
self.Add_Motion_Path_LE.text() != "",
self.MainWindow.Cam_Item_Widget.Attr["Add_Crv_LE"] != "",
# End_Time_Cehck = self.End_Time_SB.value() != 0,
# Start_Time_Cehck = self.Strat_Time_SB.value() != 0,
]
Exist_Check_list = [
cmds.objExists(self.Add_Crv_LE.text()),
cmds.objExists(self.Add_Loc_LE.text()),
cmds.objExists(self.Add_Motion_Path_LE.text()),
cmds.objExists(self.MainWindow.Cam_Item_Widget.Attr["Add_Crv_LE"]),
]
if False in Check_List:
return True
else:
return False
def Position_Fn(self):
if self.Check_Selection():
Base_Curve = self.MainWindow.Cam_Item_Widget.Attr["Add_Crv_LE"]
CamGrp = self.Add_CamGrp_LE.text()
cmds.xform( CamGrp,cp=1 )
cmds.delete(cmds.parentConstraint( Base_Curve,CamGrp ))
Target_Curve = self.Add_Crv_LE.text()
# Note 解除曲线的锁定
cmds.setAttr("%s.tx" % Target_Curve,lock=False)
cmds.setAttr("%s.ty" % Target_Curve,lock=False)
cmds.setAttr("%s.tz" % Target_Curve,lock=False)
cmds.setAttr("%s.rx" % Target_Curve,lock=False)
cmds.setAttr("%s.ry" % Target_Curve,lock=False)
cmds.setAttr("%s.rz" % Target_Curve,lock=False)
cmds.delete(cmds.parentConstraint( Base_Curve,Target_Curve ))
cmds.headsUpMessage(u"位置匹配完成")
else:
cmds.warning(u"请完成镜头组的选择")
cmds.headsUpMessage(u"请完成镜头组的选择")
def Keyframe_Fn(self):
if self.Check_Selection():
Path = self.Add_Motion_Path_LE.text()
offset = cmds.keyframe(Path,q=1)[0]
cmds.keyframe("%s_uValue"% Path,e=1,iub=1,r=1,o="over",tc=-offset)
else:
cmds.warning(u"请完成镜头组的选择")
cmds.headsUpMessage(u"请完成镜头组的选择")
def Add_CamGrp_Pick_Fun(self):
if len(cmds.ls(sl=True)) > 0:
Selection = cmds.ls(sl=True,l=1)[0]
# SelectionShape = cmds.listRelatives(Selection)[0]
SelectionType = cmds.nodeType( Selection )
if SelectionType == "transform":
self.Add_CamGrp_LE.setText(Selection)
self.Current_Item.Cam_LE.setText(Selection)
self.Cam_Name_Label.setText(u"<center> - %s - </center>" % Selection)
try :
self.Add_CamGrp_Get.clicked.disconnect()
except:
pass
self.Add_CamGrp_Get.clicked.connect(partial(self.Select_OBJ_Fun,Selection))
# Note 自动获取相关的物体
TypeList =[
"motionPath",
"nurbsCurve",
"locator",
]
FnList = [
self.Add_Motion_Path_Pick_Fun,
self.Add_Crv_Pick_Fun,
self.Add_Loc_Pick_Fun,
]
SelectionList = cmds.listRelatives(Selection,f=1)
for sel in SelectionList:
SelectionShape = cmds.listRelatives(sel,f=1)
SelectionType = cmds.nodeType( SelectionShape[0] )
for i,Type in enumerate(TypeList):
if SelectionType == Type:
cmds.select(sel)
FnList[i]()
break
self.Current_Item.Attr["Add_CamGrp_LE"] = self.Add_CamGrp_LE.text()
else:
cmds.warning(u"请选择组进行获取")
cmds.headsUpMessage(u"请选择组进行获取")
else :
self.Add_CamGrp_LE.setText("")
if self.Add_CamGrp_LE.text() != "":
self.Current_Item.Attr["Add_CamGrp_LE"] = self.Add_CamGrp_LE.text()
self.Add_CamGrp_Label.setVisible(False)
self.Add_CamGrp_Get.setVisible(True)
else:
self.Add_CamGrp_Label.setVisible(True)
self.Add_CamGrp_Get.setVisible(False)
self.MainWindow.Save_Json_Fun()
def Add_Motion_Path_Pick_Fun(self):
if len(cmds.ls(sl=True)) > 0:
Selection = cmds.ls(sl=True,l=1)[0]
# SelectionShape = cmds.listRelatives(Selection)[0]
SelectionType = cmds.nodeType( Selection )
if SelectionType == "motionPath":
self.Add_Motion_Path_LE.setText(Selection)
try :
self.Add_Motion_Path_Get.clicked.disconnect()
except:
pass
self.Add_Motion_Path_Get.clicked.connect(partial(self.Select_OBJ_Fun,Selection))
else:
cmds.warning(u"请选择motionPath进行获取")
cmds.headsUpMessage(u"请选择motionPath进行获取")
else :
self.Add_Motion_Path_LE.setText("")
if self.Add_Motion_Path_LE.text() != "":
self.Current_Item.Attr["Add_Motion_Path_LE"] = self.Add_Motion_Path_LE.text()
self.Add_Motion_Path_Label.setVisible(False)
self.Add_Motion_Path_Get.setVisible(True)
else:
self.Add_Motion_Path_Label.setVisible(True)
self.Add_Motion_Path_Get.setVisible(False)
self.MainWindow.Save_Json_Fun()
def Add_Crv_Pick_Fun(self):
if len(cmds.ls(sl=True)) > 0:
Selection = cmds.ls(sl=True,l=1)[0]
SelectionShape = cmds.listRelatives(Selection,pa=1)[0]
SelectionType = cmds.nodeType( SelectionShape )
if SelectionType == "nurbsCurve":
self.Add_Crv_LE.setText(Selection)
try :
self.Add_Crv_Get.clicked.disconnect()
except:
pass
self.Add_Crv_Get.clicked.connect(partial(self.Select_OBJ_Fun,Selection))
# Note 自动识别MotionPath
MotionPath = cmds.listConnections(SelectionShape,type="motionPath")[0]
cmds.select(MotionPath)
self.Add_Motion_Path_Pick_Fun()
else:
cmds.warning(u"请选择NurbsCurve进行获取")
cmds.headsUpMessage(u"请选择NurbsCurve进行获取")
else :
self.Add_Crv_LE.setText("")
if self.Add_Crv_LE.text() != "":
self.Current_Item.Attr["Add_Crv_LE"] = self.Add_Crv_LE.text()
self.Add_Crv_Label.setVisible(False)
self.Add_Crv_Get.setVisible(True)
else:
self.Add_Crv_Label.setVisible(True)
self.Add_Crv_Get.setVisible(False)
self.MainWindow.Save_Json_Fun()
def Add_Loc_Pick_Fun(self):
if len(cmds.ls(sl=True)) > 0 :
Selection = cmds.ls(sl=True,l=1)[0]
SelectionShape = cmds.listRelatives(Selection,pa=1)[0]
SelectionType = cmds.nodeType( SelectionShape )
if SelectionType == "locator":
self.Add_Loc_LE.setText(Selection)
try :
self.Add_Loc_Get.clicked.disconnect()
except:
pass
self.Add_Loc_Get.clicked.connect(partial(self.Select_OBJ_Fun,Selection))
else:
cmds.warning(u"请选择Locator进行获取")
cmds.headsUpMessage(u"请选择Locator进行获取")
else :
self.Add_Loc_LE.setText("")
if self.Add_Loc_LE.text() != "":
self.Current_Item.Attr["Add_Loc_LE"] = self.Add_Loc_LE.text()
self.Add_Loc_Label.setVisible(False)
self.Add_Loc_Get.setVisible(True)
else:
self.Add_Loc_Label.setVisible(True)
self.Add_Loc_Get.setVisible(False)
self.MainWindow.Save_Json_Fun()
def Cam_Input_Toggle_Fn(self):
if self.Cam_Input_Toggle_Check:
self.Cam_Input_Toggle_Check = False
self.Cam_Input_Toggle_Anim.setDirection(QAbstractAnimation.Forward)
self.Cam_Input_Toggle_Anim.start()
self.Cam_Input_Toggle.setText(u"▼输入设置")
self.Cam_Input_Toggle.setStyleSheet('font:normal')
else:
self.Cam_Input_Toggle_Check = True
self.Cam_Input_Toggle_Anim.setDirection(QAbstractAnimation.Backward)
self.Cam_Input_Toggle_Anim.start()
self.Cam_Input_Toggle.setText(u"■输入设置")
self.Cam_Input_Toggle.setStyleSheet('font:bold')
self.MainWindow.Save_Json_Fun()
def Cam_Output_Toggle_Fn(self):
if self.Cam_Output_Toggle_Check:
self.Cam_Output_Toggle_Check = False
self.Cam_Output_Toggle_Anim.setDirection(QAbstractAnimation.Forward)
self.Cam_Output_Toggle_Anim.start()
self.Cam_Output_Toggle.setText(u"▼输出设置")
self.Cam_Output_Toggle.setStyleSheet('font:normal')
else:
self.Cam_Output_Toggle_Check = True
self.Cam_Output_Toggle_Anim.setDirection(QAbstractAnimation.Backward)
self.Cam_Output_Toggle_Anim.start()
self.Cam_Output_Toggle.setText(u"■输出设置")
self.Cam_Output_Toggle.setStyleSheet('font:bold')
self.MainWindow.Save_Json_Fun()
def Select_OBJ_Fun(self,selectTarget):
if selectTarget != "":
cmds.select(selectTarget) | [
"[email protected]"
] | |
1fdcdc99bcf503501c3716c003b8e28528b0ce68 | 0857913ae5cde7481c5bca63ed5081e11f217581 | /p1804/p10While/whileTest.py | 5d346d2f002ca0cee68216cb7d6c969ca233f173 | [] | no_license | nmww/p1804_ceshi | 48c2d93849018d8601f0732c5005395c81490ef1 | 72bf25cc7767371594b41f8919454e46fe178023 | refs/heads/master | 2020-03-15T23:25:43.345763 | 2018-06-27T03:19:55 | 2018-06-27T03:19:55 | 132,393,611 | 53 | 7 | null | null | null | null | UTF-8 | Python | false | false | 565 | py |
'''
while 循环
# while语句作用的范围是
# 下方相同缩进的 所有语句
while 循环条件:
pass
pass
循环条件:
1. 是否成立: True / False
2. 条件判断控制 a 比较 b
'''
#-------------------------------
qiandao = 0 # 没有签到
xinqing = 'kaixin'
while qiandao < 10:
print ("您未签到,请尽快去签到,否则要跪键盘 %d "% qiandao)
qiandao = qiandao + 1
#----------------------------------
# True 始终成立
# Fasle 始终不成立
while False:
print ("你应该签到了!")
| [
"[email protected]"
] | |
daab02c365ce921ab5905425f021e1307979d25e | 2293c76c3d18e2fcd44ded90bd40113d26285663 | /pyeccodes/defs/mars/stream_table.py | e671c33b78355b98ed7a672fe0156befb48febba | [
"Apache-2.0"
] | permissive | ecmwf/pyeccodes | b1f121dbddf68d176a03805ed5144ba0b37ac211 | dce2c72d3adcc0cb801731366be53327ce13a00b | refs/heads/master | 2022-04-23T10:37:40.524078 | 2020-04-18T06:30:29 | 2020-04-18T06:30:29 | 255,554,540 | 9 | 3 | null | null | null | null | UTF-8 | Python | false | false | 9,953 | py | def load(h):
return ({'abbr': 0, 'code': 0, 'title': 'Unknown'},
{'abbr': 'fsob',
'code': 1022,
'title': 'Forecast sensitivity to observations'},
{'abbr': 'fsow',
'code': 1023,
'title': 'Forecast sensitivity to observations wave'},
{'abbr': 'dahc', 'code': 1024, 'title': 'Daily archive hindcast'},
{'abbr': 'oper', 'code': 1025, 'title': 'Atmospheric model'},
{'abbr': 'scda',
'code': 1026,
'title': 'Atmospheric model',
'units': 'short cutoff'},
{'abbr': 'scwv', 'code': 1027, 'title': 'Wave model', 'units': 'short cutoff'},
{'abbr': 'dcda',
'code': 1028,
'title': 'Atmospheric model',
'units': 'delayed cutoff'},
{'abbr': 'dcwv',
'code': 1029,
'title': 'Wave model',
'units': 'delayed cutoff'},
{'abbr': 'enda', 'code': 1030, 'title': 'Ensemble data assimilation'},
{'abbr': 'efho', 'code': 1032, 'title': 'Ensemble forecast hindcast overlap'},
{'abbr': 'enfh', 'code': 1033, 'title': 'Ensemble forecast hindcasts'},
{'abbr': 'efov', 'code': 1034, 'title': 'Ensemble forecast overlap'},
{'abbr': 'enfo', 'code': 1035, 'title': 'Ensemble prediction system'},
{'abbr': 'sens', 'code': 1036, 'title': 'Sensitivity forecast'},
{'abbr': 'maed', 'code': 1037, 'title': 'Multianalysis ensemble data'},
{'abbr': 'amap', 'code': 1038, 'title': 'Analysis for multianalysis project'},
{'abbr': 'efhc',
'code': 1039,
'title': 'Ensemble forecast hindcasts',
'units': 'obsolete'},
{'abbr': 'efhs',
'code': 1040,
'title': 'Ensemble forecast hindcast statistics'},
{'abbr': 'toga', 'code': 1041, 'title': 'TOGA'},
{'abbr': 'cher', 'code': 1042, 'title': 'Chernobyl'},
{'abbr': 'mnth', 'code': 1043, 'title': 'Monthly means'},
{'abbr': 'supd', 'code': 1044, 'title': 'Deterministic supplementary data'},
{'abbr': 'wave', 'code': 1045, 'title': 'Wave model'},
{'abbr': 'ocea', 'code': 1046, 'title': 'Ocean'},
{'abbr': 'fgge', 'code': 1047, 'title': 'FGGE'},
{'abbr': 'egrr', 'code': 1050, 'title': 'Bracknell'},
{'abbr': 'kwbc', 'code': 1051, 'title': 'Washington'},
{'abbr': 'edzw', 'code': 1052, 'title': 'Offenbach'},
{'abbr': 'lfpw', 'code': 1053, 'title': 'Toulouse'},
{'abbr': 'rjtd', 'code': 1054, 'title': 'Tokyo'},
{'abbr': 'cwao', 'code': 1055, 'title': 'Montreal'},
{'abbr': 'ammc', 'code': 1056, 'title': 'Melbourne'},
{'abbr': 'efas',
'code': 1057,
'title': 'European Flood Awareness System',
'units': 'EFAS'},
{'abbr': 'efse',
'code': 1058,
'title': 'European Flood Awareness System (EFAS) seasonal forecasts'},
{'abbr': 'efcl',
'code': 1059,
'title': 'European Flood Awareness System (EFAS) climatology'},
{'abbr': 'wfas',
'code': 1060,
'title': 'Global flood awareness system',
'units': 'GLOFAS'},
{'abbr': 'wfcl',
'code': 1061,
'title': 'Global flood awareness system (GLOFAS) climatology'},
{'abbr': 'msdc',
'code': 1070,
'title': 'Monthly standard deviation and covariance'},
{'abbr': 'moda', 'code': 1071, 'title': 'Monthly means of daily means'},
{'abbr': 'monr',
'code': 1072,
'title': "Monthly means using G. Boer's step function"},
{'abbr': 'mnvr',
'code': 1073,
'title': "Monthly variance and covariance data using G. Boer's step "
'function'},
{'abbr': 'msda',
'code': 1074,
'title': 'Monthly standard deviation and covariance of daily means'},
{'abbr': 'mdfa',
'code': 1075,
'title': 'Monthly means of daily forecast accumulations'},
{'abbr': 'dacl', 'code': 1076, 'title': 'Daily climatology'},
{'abbr': 'wehs',
'code': 1077,
'title': 'Wave ensemble forecast hindcast statistics'},
{'abbr': 'ewho',
'code': 1078,
'title': 'Ensemble forecast wave hindcast overlap'},
{'abbr': 'enwh', 'code': 1079, 'title': 'Ensemble forecast wave hindcasts'},
{'abbr': 'wamo', 'code': 1080, 'title': 'Wave monthly means'},
{'abbr': 'waef', 'code': 1081, 'title': 'Wave ensemble forecast'},
{'abbr': 'wasf', 'code': 1082, 'title': 'Wave seasonal forecast'},
{'abbr': 'mawv', 'code': 1083, 'title': 'Multianalysis wave data'},
{'abbr': 'ewhc',
'code': 1084,
'title': 'Wave ensemble forecast hindcast',
'units': 'obsolete'},
{'abbr': 'wvhc', 'code': 1085, 'title': 'Wave hindcast'},
{'abbr': 'weov', 'code': 1086, 'title': 'Wave ensemble forecast overlap'},
{'abbr': 'wavm', 'code': 1087, 'title': 'Wave model', 'units': 'standalone'},
{'abbr': 'ewda', 'code': 1088, 'title': 'Ensemble wave data assimilation'},
{'abbr': 'dacw', 'code': 1089, 'title': 'Daily climatology wave'},
{'abbr': 'seas', 'code': 1090, 'title': 'Seasonal forecast'},
{'abbr': 'sfmm',
'code': 1091,
'title': 'Seasonal forecast atmospheric monthly means'},
{'abbr': 'swmm',
'code': 1092,
'title': 'Seasonal forecast wave monthly means'},
{'abbr': 'mofc', 'code': 1093, 'title': 'Monthly forecast'},
{'abbr': 'mofm', 'code': 1094, 'title': 'Monthly forecast means'},
{'abbr': 'wamf', 'code': 1095, 'title': 'Wave monthly forecast'},
{'abbr': 'wmfm', 'code': 1096, 'title': 'Wave monthly forecast means'},
{'abbr': 'smma', 'code': 1097, 'title': 'Seasonal monthly means anomalies'},
{'abbr': 'seap', 'code': 1110, 'title': 'Sensitive area prediction'},
{'abbr': 'mnfc', 'code': 1200, 'title': 'Real-time'},
{'abbr': 'mnfh', 'code': 1201, 'title': 'Hindcasts'},
{'abbr': 'mnfa', 'code': 1202, 'title': 'Anomalies'},
{'abbr': 'mnfw', 'code': 1203, 'title': 'Wave real-time'},
{'abbr': 'mfhw', 'code': 1204, 'title': 'Monthly forecast hindcasts wave'},
{'abbr': 'mfaw', 'code': 1205, 'title': 'Wave anomalies'},
{'abbr': 'mnfm', 'code': 1206, 'title': 'Real-time means'},
{'abbr': 'mfhm', 'code': 1207, 'title': 'Hindcast means'},
{'abbr': 'mfam', 'code': 1208, 'title': 'Anomaly means'},
{'abbr': 'mfwm', 'code': 1209, 'title': 'Wave real-time means'},
{'abbr': 'mhwm', 'code': 1210, 'title': 'Wave hindcast means'},
{'abbr': 'mawm', 'code': 1211, 'title': 'Wave anomaly means'},
{'abbr': 'mmsf', 'code': 1220, 'title': 'Multi-model seasonal forecast'},
{'abbr': 'msmm',
'code': 1221,
'title': 'Multi-model seasonal forecast atmospheric monthly means'},
{'abbr': 'wams', 'code': 1222, 'title': 'Multi-model seasonal forecast wave'},
{'abbr': 'mswm',
'code': 1223,
'title': 'Multi-model seasonal forecast wave monthly means'},
{'abbr': 'mmsa',
'code': 1224,
'title': 'Multi-model seasonal forecast monthly anomalies'},
{'abbr': 'mmaf', 'code': 1230, 'title': 'Multi-model multi-annual forecast'},
{'abbr': 'mmam',
'code': 1231,
'title': 'Multi-model multi-annual forecast means'},
{'abbr': 'mmaw',
'code': 1232,
'title': 'Multi-model multi-annual forecast wave'},
{'abbr': 'mmwm',
'code': 1233,
'title': 'Multi-model multi-annual forecast wave means'},
{'abbr': 'esmm', 'code': 1240, 'title': 'Combined multi-model monthly means'},
{'abbr': 'ehmm',
'code': 1241,
'title': 'Combined multi-model hindcast monthly means'},
{'abbr': 'edmm',
'code': 1242,
'title': 'Ensemble data assimilation monthly means'},
{'abbr': 'edmo',
'code': 1243,
'title': 'Ensemble data assimilation monthly means of daily means'},
{'abbr': 'ewmo',
'code': 1244,
'title': 'Ensemble wave data assimilation monthly means of daily means'},
{'abbr': 'ewmm',
'code': 1245,
'title': 'Ensemble wave data assimilation monthly means'},
{'abbr': 'espd', 'code': 1246, 'title': 'Ensemble supplementary data'},
{'abbr': 'lwda', 'code': 1247, 'title': 'Long window daily archive'},
{'abbr': 'lwwv', 'code': 1248, 'title': 'Long window wave'},
{'abbr': 'elda',
'code': 1249,
'title': 'Ensemble Long window Data Assimilation'},
{'abbr': 'ewla',
'code': 1250,
'title': 'Ensemble Wave Long window data Assimilation'},
{'abbr': 'wamd', 'code': 1251, 'title': 'Wave monthly means of daily means'},
{'abbr': 'gfas', 'code': 1252, 'title': 'Global fire assimilation system'},
{'abbr': 'cnrm', 'code': 2231, 'title': 'Meteo France climate centre'},
{'abbr': 'mpic', 'code': 2232, 'title': 'Max Plank Institute'},
{'abbr': 'ukmo', 'code': 2233, 'title': 'UKMO climate centre'})
| [
"[email protected]"
] | |
c5de7953254fdb1308494e7667c8a26fe17950d4 | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/ModemManager/ModemOmaClass.py | aa6224a47cc7b630f23469b8985f3eb96ae06ecb | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 4,603 | py | # encoding: utf-8
# module gi.repository.ModemManager
# from /usr/lib64/girepository-1.0/ModemManager-1.0.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.Gio as __gi_overrides_Gio
import gi.overrides.GObject as __gi_overrides_GObject
import gi.repository.Gio as __gi_repository_Gio
import gobject as __gobject
class ModemOmaClass(__gi.Struct):
"""
:Constructors:
::
ModemOmaClass()
"""
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
parent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(ModemOmaClass), '__module__': 'gi.repository.ModemManager', '__gtype__': <GType void (4)>, '__dict__': <attribute '__dict__' of 'ModemOmaClass' objects>, '__weakref__': <attribute '__weakref__' of 'ModemOmaClass' objects>, '__doc__': None, 'parent': <property object at 0x7f69438c7950>})"
__gtype__ = None # (!) real value is '<GType void (4)>'
__info__ = StructInfo(ModemOmaClass)
| [
"[email protected]"
] | |
4dc98360b6b09c2585684d65e841dc4239d20e34 | 401ea01ffb848f1eabd8aa17690ec1ff5dc8e6bd | /test/test_action_event.py | 62370e432b086e11d277be6874c16774885dc2d8 | [] | no_license | bbrangeo/python-api-client | 735acda3627d7a0ddd78ecb1e9617bb4082c9001 | c2481e0cd012a41aeceefdce289d48509540b909 | refs/heads/master | 2020-03-14T18:24:20.888631 | 2018-04-30T14:47:47 | 2018-04-30T14:47:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 873 | py | # coding: utf-8
"""
BIMData API
BIMData API documentation # noqa: E501
OpenAPI spec version: v1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import bimdata_api_client
from bimdata_api_client.models.action_event import ActionEvent # noqa: E501
from bimdata_api_client.rest import ApiException
class TestActionEvent(unittest.TestCase):
"""ActionEvent unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testActionEvent(self):
"""Test ActionEvent"""
# FIXME: construct object with mandatory attributes with example values
# model = bimdata_api_client.models.action_event.ActionEvent() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
9d4a06013d0517080446f5dfd957558c9e77d8d9 | a86fda09a185ebf367e31cf26589161303f9497a | /metrics/topologyVertex.py | 3b6fc44e77e468ade8bbf02eebf09fef94eecc0f | [
"BSD-3-Clause"
] | permissive | kristianeschenburg/metrics | 67ec2cd5b697241eee35da46daf71b2d735cdb64 | 53900f8130cb7dd762ae3e816225fb4f178a5b29 | refs/heads/master | 2020-03-25T10:27:29.623361 | 2019-04-02T21:20:08 | 2019-04-02T21:20:08 | 116,600,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,711 | py | import numpy as np
def labelCounts(label, adjacencyList):
"""
For each vertex, count the number of vertices with other labels that are
adjacent to it.
Parameters:
- - - - -
label : int, array
label vector
adjacencyList : SurfaceAdjacency
adjacency list for surface mesh
"""
values = list(set(np.unique(label)).difference({-1,0 }))
aggregateCounts = {k: {h: 0 for h in values} for k in values}
aggregateMatrix = np.zeros((len(values), len(values)))
# loop over each unique label value
for j, v in enumerate(values):
# get indices of label
idxv = np.where(label == v)[0]
# loop over vertices with this label and count number of neighboring vertices
# with different label values
for ind in idxv:
n = adjacencyList[ind]
nCounts = neighborhoodCounts(n, label, values)
for n in nCounts:
aggregateCounts[v][n] += nCounts[n]
counts = aggregateCounts[v].values()
aggregateMatrix[j, :] = counts
rowSums = aggregateMatrix.sum(axis=1)
rowNormed = aggregateMatrix / rowSums[:, None]
return [aggregateMatrix, rowNormed]
def neighborhoodCounts(subscripts, label, values):
"""
Compute the number of neighbors of each label directly adjacent to a
vertex.
Parameters:
- - - - -
subscripts : list
indices of directly-adjacent vertices / voxels
reshaped : int, array
label vector
values : accepted label values
"""
neighbors = list(label[subscripts])
counts = {}.fromkeys(values)
for v in values:
counts[v] = neighbors.count(v)
return counts
| [
"[email protected]"
] | |
d8c33a598956091bab533eda8716d63e6ce852b5 | 2986a62a5221885b2f070060aadb9c9ab1e84035 | /Aula10 - Condições em Python/ex033 - Maior e menor valor.py | 1084395b85db8996bebe2ec6aea68e8a3f430e7f | [] | no_license | igorkoury/cev-phyton-exercicios-parte-1 | dcb39772be48ba7333a391af4e9fda025654472f | b187565ca996402f4862ad3000d18bfb461e269e | refs/heads/main | 2023-08-28T02:38:32.311162 | 2021-10-06T21:38:39 | 2021-10-06T21:38:39 | 414,375,879 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 627 | py | # Faça um programa que leia 3 números e diga qual é o maior e o menor.
print("Digite 3 numeros para saber quem é o menor e o maior.")
n1 = int(input("Digite um número: "))
n2 = int(input("Digite outro: "))
n3 = int(input("Digite mais um: "))
print("Você digitou {}, {} e {} respectivamente.".format(n1, n2, n3))
menor = n1
if n1 > n2 and n3 > n2:
menor = n2
if n2 > n3 and n1 > n3:
menor = n3
maior = n1
if n1 < n2 and n1 < n3:
maior = n2
if n2 < n3 and n1 < n3:
maior = n3
print("O menor valor digitado foi {}.".format(menor))
print("O maior valor digitado foi {}.".format(maior))
| [
"[email protected]"
] | |
df9181e897d1c79756259eeadcb4711eca0fff67 | 04975a41eb459f1528dcbdcb1143a3cb535aa620 | /Tree_easy/leetcode_590.py | ef470b8651a6a2b4c709b9cdf7f36ce6651cff88 | [] | no_license | RickLee910/Leetcode_easy | 2a50d632379826979a985e1b9950d4cf6bbd8b18 | c2687daf334f96a908737067bb915b8b072d0d56 | refs/heads/master | 2023-01-29T11:09:26.701243 | 2020-12-02T04:36:14 | 2020-12-02T04:36:14 | 294,952,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | class Solution:
def postorder(self, root: 'Node') -> List[int]:
ans = []
def dfs(root,ans):
if root == None:
return
for i in root.children:
dfs(i,ans)
ans.append(root.val)
return ans
dfs(root, ans)
return ans
| [
"[email protected]"
] | |
455ab705f17ca676bf7042b5beb8912bd9ac74c8 | 6ab31b5f3a5f26d4d534abc4b197fe469a68e8e5 | /tests/kyu_7_tests/test_patterncraft_visitor.py | 8824069b8298d54921b0b5d2084e335ee8fe30a0 | [
"MIT"
] | permissive | mveselov/CodeWars | e4259194bfa018299906f42cd02b8ef4e5ab6caa | 1eafd1247d60955a5dfb63e4882e8ce86019f43a | refs/heads/master | 2021-06-09T04:17:10.053324 | 2017-01-08T06:36:17 | 2017-01-08T06:36:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 552 | py | import unittest
from katas.kyu_7.patterncraft_visitor import Marauder, Marine, TankBullet
class PatterncraftVisitorTestCase(unittest.TestCase):
def setUp(self):
self.bullet = TankBullet()
self.light = Marine()
self.bullet2 = TankBullet()
self.armored = Marauder()
def test_equals(self):
self.light.accept(self.bullet)
self.assertEqual(self.light.health, 100 - 21)
def test_equals_2(self):
self.armored.accept(self.bullet2)
self.assertEqual(self.armored.health, 125 - 32)
| [
"[email protected]"
] | |
31de42d8a93f76985b06a65ff8e74ce14733af1e | bec8f33002130d8395f4ac4f0c74b785aa22cac5 | /appium/options/common/language_option.py | f82de63d08680cc5a2a131403286ba603d4c8827 | [
"Apache-2.0"
] | permissive | appium/python-client | 1c974fdf1ac64ce4ac37f3fc8c0a3e30c186d3ca | 2e49569ed45751df4c6953466f9769336698c033 | refs/heads/master | 2023-09-01T22:14:03.166402 | 2023-09-01T11:52:27 | 2023-09-01T11:52:27 | 18,525,395 | 1,588 | 606 | Apache-2.0 | 2023-09-10T02:00:09 | 2014-04-07T17:01:35 | Python | UTF-8 | Python | false | false | 1,317 | py | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Optional
from .supports_capabilities import SupportsCapabilities
LANGUAGE = 'language'
class LanguageOption(SupportsCapabilities):
@property
def language(self) -> Optional[str]:
"""
Language abbreviation to use in a test session.
"""
return self.get_capability(LANGUAGE)
@language.setter
def language(self, value: str) -> None:
"""
Set language abbreviation to use in a test session.
"""
self.set_capability(LANGUAGE, value)
| [
"[email protected]"
] | |
8f16b2d94dc4d8a4ebc5c2b779b1049670c0faa5 | d8829cbc2d2863f68cb1b447a878dce0ac20a878 | /scraper/positions.py | 40a42e62b99118840ea7c9eef7ef2123b8b36208 | [] | no_license | XiaoxiaoWang87/InsightPrj | 170a9757dfdf4669ee2c52322f2f5e5d766ce2a1 | 767a3719fad93ddb9711817f543b5e7b1822f680 | refs/heads/master | 2021-01-19T00:41:44.195663 | 2014-10-06T22:28:24 | 2014-10-06T22:28:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | import pandas as pd
df = pd.read_csv('position_draft.csv',sep='\t')
for index, row in df.iterrows():
#if row["DRAFT"] == '[]':
if row["POSITION"] == '[]':
print row["PLAYERCODE"]
| [
"="
] | = |
8ba17eac98795a2e90941bb8a59a7c5f271b75eb | a9937139b1af85180cea706a52d447abce2430f4 | /e/Pliki/otwieranie_pliku.py | 1f748da7717c491ec9513b293b31648e29137cf4 | [] | no_license | MirekPz/Altkom | 8f16014d43adb10e87804ae2b5d23151924cb226 | 0a49e75e681593b41d07cbff63dea0723a11756b | refs/heads/master | 2020-09-29T00:12:37.990353 | 2019-12-13T15:52:30 | 2019-12-13T15:52:30 | 226,899,295 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | plik = open('example_txt_file.dmo', 'r')
zawartosc = plik.read()
print(zawartosc)
plik.close()
input('drugi:')
# with open
with open('example_txt_file.dmo') as file:
print(file.read())
| [
"[email protected]"
] | |
a3d44c517ab38a95c83ab8b1483260b48e15327b | 4328c71ddbb3dd564c04c599f8e95afef16e0d7c | /dg/4-sysProcess/06-全局变量在多个进程中不共享.py | 773b57b49e89c4bc420af8e6576049b0201d5761 | [] | no_license | amtfbky/git_py | 470fb7bc8f2c5017d47c31e1af8bb042bfe377b4 | 6159049b088036ebd133f5dab917d3e540a304c8 | refs/heads/master | 2020-03-09T04:16:12.991206 | 2018-06-01T01:16:05 | 2018-06-01T01:16:05 | 128,583,362 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 275 | py | import os
import time
g_num = 100
ret = os.fork()
if ret == 0:
print("------process-1--------")
g_num += 1
print("------pricess-1---g_num=%d--"%g_num)
else:
time.sleep(1)
print("------process-2--------")
print("------pricess-2---g_num=%d--"%g_num)
| [
"[email protected]"
] | |
2901a64d6a75331ae821fc785d834c03fc4d5a31 | 616c3c02be31b9ae4d06bd7c5a8d4a2e7c446aa1 | /1370.上升下降字符串.py | 36aae8d92dbacc45541253677dc27fdce052d717 | [] | no_license | L1nwatch/leetcode-python | 8b7c47c04ee9400d50d8b0764a544a0463df8f06 | 0484cbc3273ada25992c72105658cd67411c5d39 | refs/heads/master | 2023-01-11T14:53:15.339276 | 2023-01-11T05:24:43 | 2023-01-11T05:24:43 | 194,516,548 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 935 | py | #
# @lc app=leetcode.cn id=1370 lang=python3
#
# [1370] 上升下降字符串
#
# @lc code=start
class Solution:
def sortString(self, s: str) -> str:
s = list(s)
s.sort()
answer = list()
while len(s) > 0:
# step1
char = s.pop(0)
answer.append(char)
# step2 + step3
index = 0
while index < len(s):
if s[index] != char:
char = s.pop(index)
answer.append(char)
else:
index += 1
# step4
if len(s) > 0:
char = s.pop()
answer.append(char)
# step5 + step6
for index in range(len(s)-1, -1, -1):
if s[index] != char:
char = s.pop(index)
answer.append(char)
return "".join(answer)
# @lc code=end
| [
"[email protected]"
] | |
be8e5e49757da2d235076e9890d0f790ea19a91d | 1273fc880f09305fd1ed9ddc9b273df4253f15e7 | /muffin_babel/__init__.py | 3c8cca5765181a5b60e915fac27beabaa6161269 | [
"MIT"
] | permissive | klen/muffin-babel | ac63b997bb7ecb589b72d6e8a0ce3092fb87193f | c627a4da4263e883bec04793c6637d3f8aa1f9bc | refs/heads/develop | 2023-06-01T04:38:45.708882 | 2023-05-22T12:04:37 | 2023-05-22T12:04:37 | 39,961,646 | 3 | 0 | MIT | 2021-05-28T10:02:34 | 2015-07-30T16:51:18 | Python | UTF-8 | Python | false | false | 10,443 | py | """Muffin-Babel -- I18n engine for Muffin framework."""
import csv
import logging
import sys
from contextlib import contextmanager
from pathlib import Path
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, Optional, Tuple, TypeVar
from asgi_babel import current_locale, select_locale_by_request
from babel import Locale, UnknownLocaleError, support
from babel.messages.catalog import Catalog
from babel.messages.extract import extract_from_dir
from babel.messages.mofile import write_mo
from babel.messages.pofile import read_po, write_po
from muffin import Application, Request
from muffin.plugins import BasePlugin
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
if TYPE_CHECKING:
from asgi_tools.types import TASGIReceive, TASGISend
TLocaleSelector = Callable[[Request], Awaitable[Optional[str]]]
TVLocaleSelector = TypeVar("TVLocaleSelector", bound=TLocaleSelector)
TRANSLATIONS: Dict[Tuple[str, str], support.Translations] = {}
class Plugin(BasePlugin):
"""The class is used to control the babel integration to Muffin application."""
name = "babel"
defaults = {
"auto_detect_locale": True,
"configure_jinja2": True, # install i18n support in muffin-jinja2
"default_locale": "en", # default locale
"domain": "messages", # default domain
"locale_folders": ["locales"], # where compiled locales are leaving
"sources_map": [
("**.py", "python"),
("**.html", "jinja2"),
],
"options_map": {"**.html": {"encoding": "utf-8"}},
}
def setup(self, app: Application, **options): # noqa: C901,PLR0915
"""Setup the plugin's commands."""
super(Plugin, self).setup(app, **options)
self.domain = self.cfg.domain
self.__locale_selector: Callable[
[Request],
Awaitable[Optional[str]],
] = select_locale_by_request
# Install a middleware for autodetection
if self.cfg.auto_detect_locale:
app.middleware(self.__middleware__, insert_first=True)
@app.manage(lifespan=False)
def babel_extract_messages(
*dirnames: str,
project: str = app.cfg.name,
domain: str = self.domain,
locations: bool = True,
charset: str = "utf-8",
locale: str = self.cfg.default_locale,
):
"""Extract messages from source code.
:param charset: charset to use in the output
:param domain: set domain name for locales
:param project: set project name in output
:param version: set project version in output
:param locations: add message locations
"""
paths = [Path(d) for d in dirnames]
dirs = [path for path in paths if path.is_dir()]
catalog = Catalog(locale=locale, project=project, charset=charset)
for dpath in dirs:
for filename, lineno, message, comments, context in extract_from_dir(
dpath,
method_map=self.cfg.sources_map,
options_map=self.cfg.options_map,
):
lines = []
if locations:
filepath = dpath.absolute() / filename
lines = [(filepath.as_posix(), lineno)]
catalog.add(
message,
None,
lines,
auto_comments=comments,
context=context,
)
locales_dir = Path(self.cfg.locale_folders[0])
output = locales_dir / locale / "LC_MESSAGES" / f"{domain}.po"
if output.exists():
with output.open("rb") as f:
template = read_po(f, locale=locale, charset=charset)
template.update(catalog)
catalog = template
if not output.parent.exists():
output.parent.mkdir(parents=True)
logger.info("writing PO template file to %s", output)
with output.open("wb") as f:
write_po(f, catalog, sort_output=not locations, sort_by_file=locations)
@app.manage(lifespan=False)
def babel_compile_messages(*, use_fuzzy=False, domain=self.domain):
"""Compile messages for locales.
:param domain: set domain name for locales
"""
for locales_dir in self.cfg.locale_folders:
source = Path(locales_dir)
for locale in source.iterdir():
po_file = locale / "LC_MESSAGES" / f"{domain}.po"
if not po_file.exists():
continue
with po_file.open("rb") as po:
catalog = read_po(po, locale.name)
mo_file = po_file.with_suffix(".mo")
with mo_file.open("wb") as mo:
logger.info("writing MO template file to %s", mo_file)
write_mo(mo, catalog, use_fuzzy=use_fuzzy)
@app.manage(lifespan=False)
def babel_export_csv(*, domain: str = self.domain, locale: str = self.cfg.default_locale):
"""Export messages from a PO files as CSV."""
writer = csv.writer(sys.stdout)
writer.writerow(["id", "string", "context", "comment"])
for locales_dir in self.cfg.locale_folders:
po_file = Path(locales_dir) / locale / "LC_MESSAGES" / f"{domain}.po"
if not po_file.exists():
continue
with po_file.open("rb") as po:
catalog = read_po(po, locale)
for message in catalog:
writer.writerow(
[
message.id,
message.string,
message.context,
"\n".join(message.auto_comments),
]
)
async def __middleware__(
self, handler: Callable, request: Request, receive: "TASGIReceive", send: "TASGISend"
) -> Any:
"""Auto detect a locale by the given request."""
lang = await self.__locale_selector(request)
self.current_locale = lang or self.cfg.default_locale # type: ignore[assignment]
return await handler(request, receive, send)
async def startup(self):
"""Tune Jinja2 if the plugin is installed."""
if self.cfg.configure_jinja2 and "jinja2" in self.app.plugins:
jinja2 = self.app.plugins["jinja2"]
env = jinja2.env # type: ignore[]
env.add_extension("jinja2.ext.i18n")
env.install_gettext_callables(
lambda x: self.get_translations().ugettext(x),
lambda s, p, n: self.get_translations().ungettext(s, p, n),
newstyle=True,
)
def locale_selector(self, fn: TVLocaleSelector) -> TVLocaleSelector:
"""Update self locale selector."""
self.__locale_selector = fn
return fn
@property
def current_locale(self) -> Locale:
"""Get current locale."""
locale = current_locale.get()
if locale is None:
locale = Locale.parse(self.cfg.default_locale, sep="-")
current_locale.set(locale)
return locale
@current_locale.setter
def current_locale(self, lang: str):
"""Set current locale."""
try:
locale = Locale.parse(lang, sep="-")
return current_locale.set(locale)
except (UnknownLocaleError, ValueError):
return
@contextmanager
def locale_ctx(self, lang: str):
"""Update current locale as context manager."""
old_locale = current_locale.get()
self.current_locale = lang # type: ignore[assignment]
yield self
current_locale.set(old_locale)
def get_translations(
self, domain: Optional[str] = None, locale: Optional[Locale] = None
) -> support.Translations:
"""Load and cache translations."""
locale = locale or self.current_locale
domain = domain or self.domain
if (domain, locale.language) not in TRANSLATIONS:
translations = None
for path in reversed(self.cfg.locale_folders):
trans = support.Translations.load(path, locales=locale.language, domain=domain)
if translations:
translations._catalog.update(trans._catalog)
else:
translations = trans
TRANSLATIONS[(domain, locale.language)] = translations # type: ignore[assignment]
return TRANSLATIONS[(domain, locale.language)]
def gettext(self, string: str, domain: Optional[str] = None, **variables) -> str:
"""Translate a string with the current locale."""
t = self.get_translations(domain)
return render(t.ugettext(string), variables)
def ngettext(
self, singular: str, plural: str, num: int, domain: Optional[str] = None, **variables
) -> str:
"""Translate a string wity the current locale.
The `num` parameter is used to dispatch between singular and various plural forms of the
message.
"""
variables.setdefault("num", num)
t = self.get_translations(domain)
return t.ungettext(singular, plural, num) % variables
def pgettext(self, context: str, string: str, domain: Optional[str] = None, **variables) -> str:
"""Like :meth:`gettext` but with a context."""
t = self.get_translations(domain)
return render(t.upgettext(context, string), variables)
def npgettext(
self,
context: str,
singular: str,
plural: str,
num: int,
domain: Optional[str] = None,
**variables,
) -> str:
"""Like :meth:`ngettext` but with a context."""
variables.setdefault("num", num)
t = self.get_translations(domain)
return t.unpgettext(context, singular, plural, num) % variables
def render(value: str, variables) -> str:
"""Render a string with variables."""
if variables:
return value % variables
return value
# ruff: noqa: PLR0913
| [
"[email protected]"
] | |
f4ba49f50c1a6829dee16f64d136e2f0406c7115 | d992f98d3c5d009c567e9dac83f38770d8d72f77 | /postCMeval/annoate_summary_with_pscore.py | 942920673d5212449ff7547f0710420b4b39e90f | [] | no_license | rlleras/quasiClique | c65643dcc35e2426e4c519ee1b3400895e9610cc | d57be41a213d2e57778d2feb9c103594ebbbf705 | refs/heads/master | 2021-01-21T20:53:05.841720 | 2012-02-28T22:49:17 | 2012-02-28T22:49:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,335 | py | import os
import sys
import glob
import csv
def annotate_rank_summary_with_pscore(filename, delimiter=','):
"""
Given a rank_cmfinder.pl-output summary file X,
create a new file X.pscore_added that has the motifs' pscores appended
"""
motif_dir = os.path.dirname(filename)
in_csv = csv.DictReader(open(filename), delimiter=delimiter) # cmfinder rank summaries are comma-separated
with open(filename + '.pscore_added', 'w') as out_f:
if in_csv.fieldnames is None:
print >> sys.stderr, "file {0} is odd. IGNORE now".format(filename)
return
new_fieldnames = in_csv.fieldnames + ['pscore']
out_csv = csv.DictWriter(out_f, new_fieldnames, delimiter=delimiter)
# need to write out the field names
#out_csv.writeheader()# lol this function only in 2.7 and i have 2.6 Orz
out_f.write(delimiter.join(new_fieldnames) + '\n')
for obj in in_csv:
motif_full_path = os.path.join(motif_dir, obj['motif'])
pscore = os.popen("grep \"Total pair posterior\" {0}.pscoreout".format(motif_full_path)).read().strip()
obj['pscore'] = float( pscore[len('Total pair posterior '):] )
out_csv.writerow(obj)
if __name__ == "__main__":
for filename in glob.iglob('motifs/*/*.fna.summary'):
print >> sys.stderr, "annotating pscore to {0}.pscore_added....".format(filename)
annotate_rank_summary_with_pscore(filename)
| [
"[email protected]"
] | |
4532e075eae5a1ddc4b9609e44839947d035ec9f | a68f37fcbf069c0656d4838af7386d6a9919fe59 | /company_account/decorators.py | 1454352528e707241395d14220ced8df0d57a92c | [] | no_license | mitchellpottratz/JobMatch | 7dd2cbd979ca55cf651bcee4356a97e061145b90 | 797a9b1c3dfad57c05db52384d87d5f82be632f5 | refs/heads/master | 2022-11-26T08:44:27.535809 | 2019-12-13T20:05:11 | 2019-12-13T20:05:11 | 224,901,935 | 0 | 0 | null | 2022-11-22T04:53:46 | 2019-11-29T18:08:55 | JavaScript | UTF-8 | Python | false | false | 457 | py | from functools import wraps
from django.http import HttpResponseRedirect
# this decorators checks if the current user is a company_user, and redirects
# them to the login user if they are not a company_user
def company_account_required(function):
@wraps(function)
def wrap(request, *args, **kwargs):
if not request.user.company_user:
return HttpResponseRedirect('/users/login/')
else:
return function(request, *args, **kwargs)
return wrap
| [
"[email protected]"
] | |
6add13271b6ebf08177a20b058dc78e738d70a03 | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /dockerized-gists/5320734/snippet.py | 5d6a295d577a6aa909c64302aa627f27dc8e67b7 | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 1,522 | py | import os
import matplotlib.pyplot as plt
def save(path, ext='png', close=True, verbose=True):
"""Save a figure from pyplot.
Parameters
----------
path : string
The path (and filename, without the extension) to save the
figure to.
ext : string (default='png')
The file extension. This must be supported by the active
matplotlib backend (see matplotlib.backends module). Most
backends support 'png', 'pdf', 'ps', 'eps', and 'svg'.
close : boolean (default=True)
Whether to close the figure after saving. If you want to save
the figure multiple times (e.g., to multiple formats), you
should NOT close it in between saves or you will have to
re-plot it.
verbose : boolean (default=True)
Whether to print information about when and where the image
has been saved.
"""
# Extract the directory and filename from the given path
directory = os.path.split(path)[0]
filename = "%s.%s" % (os.path.split(path)[1], ext)
if directory == '':
directory = '.'
# If the directory does not exist, create it
if not os.path.exists(directory):
os.makedirs(directory)
# The final path to save to
savepath = os.path.join(directory, filename)
if verbose:
print("Saving figure to '%s'..." % savepath),
# Actually save the figure
plt.savefig(savepath)
# Close it
if close:
plt.close()
if verbose:
print("Done") | [
"[email protected]"
] | |
64c32586630e0c01a48bb1e4907ab4ac66192273 | 98d8b7dd018b95dbf2c81431561546fd9619e391 | /model/ItemLimitModel.py | 4c9e5c484728ea86929619b001baca2ad8e6fea9 | [] | no_license | c1xfr2e/kkyadmin | 1642c42659724a676000768bab559684581f3b07 | e5cef8b8ed63d3db8c1ff22af0edf44c36c4df63 | refs/heads/master | 2021-09-08T08:55:17.448640 | 2018-03-09T00:07:46 | 2018-03-09T00:07:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,436 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
""""""
from third_party.orm.field import StringField
from third_party.orm.field import IntegerField
from third_party.orm.field import ObjectIdField
from third_party.orm.field import ListField
from third_party.orm import Document
from tornado import gen
import settings
import time
class ItemLimitModel(Document):
meta = {
'db': settings.mongodb.SHARK_DB,
'collection': 'item_limit'
}
item_id = ObjectIdField(required=True)
user_id = ObjectIdField(required=True)
counter = IntegerField(required=True, default=int(0))
updated_time = ListField(IntegerField(), required=True)
@classmethod
@gen.coroutine
def UpdateUserItemCounter(cls, user_id, item_id, counter_inc):
condition = {
"user_id": user_id,
"item_id": item_id
}
setter = {
"$inc": {
"counter": counter_inc
},
"$push": {
"updated_time": int(time.time() * 1000)
}
}
result = yield cls.update(condition, setter, upsert=True)
raise gen.Return(result)
@classmethod
@gen.coroutine
def GetUserItemLimit(cls, user_id, item_id):
condition = {
"user_id": user_id,
"item_id": item_id
}
result = yield cls.find_one(condition)
raise gen.Return(result)
| [
"[email protected]"
] | |
c47b81eae5619450ed32f1b322237587c0217bf4 | 33d490698f2958f2d53a6436043959bac5c9f63d | /l10n_es_aeat_mod340/__openerp__.py | 5bc97b620692f733dc328a58c8b83803b21a8568 | [] | no_license | ideosoft/odoo-modules | cf1a4bf0a1f0f25bfa44a83f8c10a2c73baed67e | 3183a533ec9b89a57fd2b4c09cca0111afc86730 | refs/heads/master | 2021-03-30T18:13:42.873503 | 2016-07-14T13:46:01 | 2016-07-14T13:46:01 | 49,328,128 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,276 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 Ting (http://www.ting.es) All Rights Reserved.
# Copyright (c) 2011-2013 Acysos S.L. (http://acysos.com)
# Ignacio Ibeas Izquierdo <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Generación de fichero modelo 340 y libro de IVA',
'version': '2.0',
'author': "Acysos S.L., "
"Francisco Pascual (Ting), "
"Nan-tic, "
"Odoo Community Association (OCA)",
'website': 'www.acysos.com, www.ting.es, www.nan-tic.com',
'category': 'Localisation/Accounting',
'description': '''
Módulo para la presentación del modelo 340. Exportación a formato AEAT. Libro
de IVA
Los impuestos incluidos en este modelo se indican en el Código base cuenta. Por
defecto actualiza todos los código base que deban incluirse.
Si el plan contable esta instalado recuerde utilizar account_chart_update para
actualizar
los códigos. Contabilidad y Finanzas -> Configuración -> Contabilidad
Financiera -> Actualizar plan contable a partir de una plantila de plan
contable
Búsqueda de facturas emitidas y recibidas.
Exportación a formato de AEAT de facturas emitidas y recibidas.
Exportación de facturas con varios tipos impositivos. Clave de operación C.
Facturas intracomunitarias excepto las operaciones a las que hace referencia el
artículo 66 del RIVA que tienen un tratamiento especial.
Facturas rectificativas.
Facturas resumen de tiques.
Permite imprimir el libro de IVA, basado en la misma legislación.
---- COSAS PENDIENTES (TODO LIST) ---------------------------------------------
Facturas bienes de inversión
Facturas intracomunitarias. Operaciones a las que hace referencia el artículo
66 del RIVA.
Asientos contables de resumen de tiques
Exportación de asientos resumen de facturas
''',
'license': 'AGPL-3',
'depends': [
'account',
'base_vat',
'l10n_es',
'l10n_es_aeat',
'account_refund_original',
'account_chart_update',
],
'data': [
'report/report_view.xml',
'wizard/export_mod340_to_boe.xml',
'mod340_view.xml',
'mod340_workflow.xml',
'security/ir.model.access.csv',
'res_partner_view.xml',
'mod340_sequence.xml',
'account_invoice_view.xml',
'account_view.xml',
'taxes_data.xml',
],
'installable': True,
}
| [
"[email protected]"
] | |
6e475fa396e9a6dcef96eed3caf04907181bd82f | 03f9b8bdea312636afb4df3737b55cb0cc4b21ff | /RLEIterator.py | abaa4b9c89f81c3310c20d22aec12ca5c65ea68a | [] | no_license | ellinx/LC-python | f29dd17bbe15407ba0d06ad68386efdc9a343b56 | 9190d3d178f1733aa226973757ee7e045b7bab00 | refs/heads/master | 2021-06-01T15:21:24.379811 | 2020-10-29T04:37:07 | 2020-10-29T04:37:07 | 132,704,788 | 1 | 1 | null | 2019-05-15T03:26:11 | 2018-05-09T05:13:26 | Python | UTF-8 | Python | false | false | 2,435 | py | """
Write an iterator that iterates through a run-length encoded sequence.
The iterator is initialized by RLEIterator(int[] A), where A is a run-length encoding of some sequence.
More specifically, for all even i, A[i] tells us the number of times that
the non-negative integer value A[i+1] is repeated in the sequence.
The iterator supports one function: next(int n),
which exhausts the next n elements (n >= 1) and returns the last element exhausted in this way.
If there is no element left to exhaust, next returns -1 instead.
For example, we start with A = [3,8,0,9,2,5], which is a run-length encoding of the sequence [8,8,8,5,5].
This is because the sequence can be read as "three eights, zero nines, two fives".
Example 1:
Input: ["RLEIterator","next","next","next","next"], [[[3,8,0,9,2,5]],[2],[1],[1],[2]]
Output: [null,8,8,5,-1]
Explanation:
RLEIterator is initialized with RLEIterator([3,8,0,9,2,5]).
This maps to the sequence [8,8,8,5,5].
RLEIterator.next is then called 4 times:
.next(2) exhausts 2 terms of the sequence, returning 8. The remaining sequence is now [8, 5, 5].
.next(1) exhausts 1 term of the sequence, returning 8. The remaining sequence is now [5, 5].
.next(1) exhausts 1 term of the sequence, returning 5. The remaining sequence is now [5].
.next(2) exhausts 2 terms, returning -1. This is because the first term exhausted was 5,
but the second term did not exist. Since the last term exhausted does not exist, we return -1.
Note:
1. 0 <= A.length <= 1000
2. A.length is an even integer.
3. 0 <= A[i] <= 10^9
4. There are at most 1000 calls to RLEIterator.next(int n) per test case.
5. Each call to RLEIterator.next(int n) will have 1 <= n <= 10^9.
"""
class RLEIterator:
def __init__(self, A):
"""
:type A: List[int]
"""
self.A = A
self.idx = 0
def next(self, n):
"""
:type n: int
:rtype: int
"""
while self.idx<len(self.A) and self.A[self.idx]==0:
self.idx += 2
while self.idx<len(self.A) and n>=self.A[self.idx]:
n -= self.A[self.idx]
self.idx += 2
if n==0:
return self.A[self.idx-1]
if self.idx>=len(self.A):
return -1
self.A[self.idx] -= n
return self.A[self.idx+1]
# Your RLEIterator object will be instantiated and called as such:
# obj = RLEIterator(A)
# param_1 = obj.next(n)
| [
"[email protected]"
] | |
f890543311bf3991003bb75799e29477022c6245 | 00ce0f4d0c380d60cb336484200153636b249120 | /hearthbreaker/effects/minion.py | 18d49d2a99c6775f7c8aca516afe987f51750f98 | [
"MIT"
] | permissive | tezheng/hearthbreaker | 21784aeba11f557703e22a23af54886c496d3fec | 169ad0d00e62300054e7cbaf5562d750f28730a8 | refs/heads/master | 2021-01-15T14:30:05.542012 | 2014-09-24T20:03:12 | 2014-09-24T20:03:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,857 | py | import copy
import json
import abc
from hearthbreaker.constants import MINION_TYPE
import hearthbreaker.game_objects
class MinionEffect (metaclass=abc.ABCMeta):
def __init__(self):
self.target = None
def set_target(self, target):
self.target = target
@abc.abstractmethod
def apply(self):
pass
@abc.abstractmethod
def unapply(self):
pass
def __str__(self):
return json.dumps(self.__to_json__())
@abc.abstractmethod
def __to_json__(self):
pass
@staticmethod
def from_json(game, action, *args, **kwargs):
__class_mappings = {
"immune": Immune,
"charge_aura": ChargeAura,
"stats_aura": StatsAura,
"increase_battlecry": IncreaseBattlecryMinionCost,
"double_deathrattle": DoubleDeathrattle,
"heal_as_damage": HealAsDamage,
"mana_filter": ManaFilter,
"buff": Buff,
"kill": Kill,
"freeze": Freeze,
"heal": Heal,
"damage": Damage,
"add_card": AddCard,
"draw": Draw,
"summon": Summon,
"resurrect_friendly": ResurrectFriendlyMinionsAtEndOfTurn,
"original_deathrattle": OriginalDeathrattle,
"charge": Charge,
"taunt": Taunt,
"stealth": Stealth,
"no_spell_target": NoSpellTarget,
"change_attack": ChangeAttack,
"change_health": ChangeHealth,
}
if action in __class_mappings:
clazz = __class_mappings[action]
obj = clazz(*args, **kwargs)
return obj
else:
return None
class Immune(MinionEffect):
"""
Gives a character immunity. This immunity will last until the end of the player' turn
"""
def apply(self):
self.target.immune = True
self.target.game.current_player.bind("turn_ended", self.remove_immunity)
def unapply(self):
self.remove_immunity()
self.target.game.current_player.unbind("turn_ended", self.remove_immunity)
def remove_immunity(self):
self.target.immune = False
def __to_json__(self):
return {
"action": "immune",
}
class TranientEffect(MinionEffect):
"""
TransientEffects are used only for serialization and de-serialization
What they do is tracked by the game engine itself, and doesn't need an effect in the list of effects.
As such, these effects are generated at the time a minion is serialized, and removed when it is deserialized
"""
def unapply(self):
pass
class Charge(TranientEffect):
"""
Gives a minion charge.
"""
def apply(self):
self.target.charge = True
def __to_json__(self):
return {
"action": "charge"
}
class Taunt(TranientEffect):
"""
Gives a minion charge.
"""
def apply(self):
self.target.taunt = True
def __to_json__(self):
return {
"action": "taunt"
}
class Stealth(TranientEffect):
"""
Gives a minion stealth
"""
def apply(self):
self.target.stealth = True
def __to_json__(self):
return {
"action": "stealth"
}
class ChangeAttack(TranientEffect):
"""
Changes the attack of a minion
"""
def __init__(self, amount):
super().__init__()
self.amount = amount
def apply(self):
self.target.attack_delta = self.amount
def __to_json__(self):
return {
"action": "change_attack",
"amount": self.amount,
}
class ChangeHealth(TranientEffect):
"""
Changes the max health of a minion
"""
def __init__(self, amount):
super().__init__()
self.amount = amount
def apply(self):
self.target.health_delta = self.amount
def __to_json__(self):
return {
"action": "change_health",
"amount": self.amount,
}
class NoSpellTarget(TranientEffect):
"""
Keeps a minion from being targeted by spells (can still be targeted by battlecries)
"""
def apply(self):
self.target.can_be_targeted_by_spells = False
def __to_json__(self):
return {
"action": "no_spell_target"
}
class Aura():
def __init__(self, apply_func, unapply_func, filter_func):
self.apply = apply_func
self.unapply = unapply_func
self.filter = filter_func
class AuraEffect(MinionEffect):
def __init__(self, apply_aura, unapply_aura, minion_filter="minion", players="friendly", include_self=False):
self.apply_aura = apply_aura
self.unapply_aura = unapply_aura
self.minion_filter = minion_filter
self.players = players
self.aura = None
self.include_self = include_self
def apply(self):
if self.minion_filter == "minion":
if self.include_self:
filter_func = lambda m: True
else:
filter_func = lambda m: m is not self.target
elif self.minion_filter == "adjacent":
filter_func = lambda m: m.index == self.target.index + 1 or m.index == self.target.index - 1
else:
type_id = MINION_TYPE.from_str(self.minion_filter)
if self.include_self:
filter_func = lambda m: m.card.minion_type == type_id
else:
filter_func = lambda m: m is not self.target and m.card.minion_type == type_id
if self.players == "friendly":
players = [self.target.player]
elif self.players == "enemy":
players = [self.target.player.opponent]
elif self.players == "both":
players = [self.target.player, self.target.player.opponent]
self.aura = Aura(self.apply_aura, self.unapply_aura, filter_func)
for player in players:
player.auras.append(self.aura)
def unapply(self):
if self.players == "friendly":
players = [self.target.player]
elif self.players == "enemy":
players = [self.target.player.opponent]
elif self.players == "both":
players = [self.target.player, self.target.player.opponent]
for player in players:
player.auras.remove(self.aura)
def __str__(self):
return json.dumps(self.__to_json__())
def __to_json__(self):
return {
"filter": self.minion_filter,
"players": self.players,
"include_self": self.include_self
}
class ChargeAura(AuraEffect):
"""
A Charge Aura gives affected minions charge. Whether the minions are friendly or not as well as what
type of minions are affected can be customized
"""
def __init__(self, players="friendly", minion_filter="minion", include_self=False):
"""
Create a new ChargeAura
:param string players: Whose minions should be given charge. Possible values are "friendly", "enemy" and "both"
:param string minion_filter: A string representing either a minion type ("Beast", "Dragon", etc.) or "minion"
for any type of minion
:param boolean include_self: Whether or not this aura should also affect the minion that created it.
"""
super().__init__(self.give_charge, self.take_charge, minion_filter, players, include_self)
self.affected_minions = set()
def give_charge(self, minion):
if not minion.charge:
minion.charge = True
self.affected_minions.add(minion)
def take_charge(self, minion):
if minion in self.affected_minions:
minion.charge = False
self.affected_minions.remove(minion)
def __to_json__(self):
return super().__to_json__().update({
"type": "charge"
})
class StatsAura(AuraEffect):
"""
A StatsAura increases the health and/or attack of affected minions. Whether the minions are friendly or not as well
as what type of minions are affected can be customized.
"""
def __init__(self, attack=0, health=0, players="friendly", minion_filter="minion"):
"""
Create a new StatsAura
:param int attack: The amount to increase this minion's attack by
:param int health: The amount to increase this minion's health by
:param string players: Whose minions should be given charge. Possible values are "friendly", "enemy" and "both"
:param string minion_filter: A string representing either a minion type ("Beast", "Dragon", etc.) or "minion"
for any type of minion
"""
super().__init__(self.increase_stats, self.decrease_stats, minion_filter, players)
self.attack = attack
self.health = health
def increase_stats(self, minion):
minion.aura_attack += self.attack
minion.aura_health += self.health
minion.health += self.health
def decrease_stats(self, minion):
minion.aura_attack -= self.attack
minion.aura_health -= self.health
if minion.health > minion.calculate_max_health():
minion.health = minion.calculate_max_health()
def __to_json__(self):
return super().__to_json__().update({
"type": "stats",
"attack": self.attack,
"health": self.health,
})
class IncreaseBattlecryMinionCost(MinionEffect):
def __init__(self, amount):
super().__init__()
self.amount = amount
self.mana_filter = None
def apply(self):
amount = self.amount
target = self.target
class Filter:
def __init__(self):
self.amount = -amount
self.filter = lambda c: isinstance(c, hearthbreaker.game_objects.MinionCard) and \
c.create_minion(target.player).battlecry is not None
self.min = 0
self.mana_filter = Filter()
self.target.game.current_player.mana_filters.append(self.mana_filter)
self.target.game.other_player.mana_filters.append(self.mana_filter)
def unapply(self):
self.target.game.current_player.mana_filters.remove(self.mana_filter)
self.target.game.other_player.mana_filters.remove(self.mana_filter)
def __to_json__(self):
return {
"action": "increase_battlecry",
"amount": self.amount,
}
class DoubleDeathrattle(MinionEffect):
def apply(self):
if self.target.player.effect_count[DoubleDeathrattle] == 1:
self.target.player.bind("minion_died", self.trigger_deathrattle)
def unapply(self):
if self.target.player.effect_count[DoubleDeathrattle] == 0:
self.target.player.unbind("minion_died", self.trigger_deathrattle)
def trigger_deathrattle(self, minion, killed_by):
if minion.deathrattle is not None:
minion.deathrattle(minion)
def __to_json__(self):
return {
"action": "double_deathrattle",
}
class HealAsDamage(MinionEffect):
def apply(self):
if self.target.player.effect_count[HealAsDamage] == 1:
self.target.player.heal_does_damage = True
def unapply(self):
if self.target.player.effect_count[HealAsDamage] == 0:
self.target.player.heal_does_damage = False
def __to_json__(self):
return {
"action": "heal_as_damage",
}
class ManaFilter(MinionEffect):
"""
Associates a mana filter with this minion. A mana filter affects a player by making cards of a certain type
cost more or less. The amount to change, player affected, and cards changed can all be customized
"""
def __init__(self, amount, filter_type="card", minimum=0, players="friendly"):
"""
Creates a new mana filter
:param int amount: The amount to reduce mana by (can be negative)
:param string filter_type: A filter to determine which cards can be affected. Should be one of "card",
"spell", "secret" or "minion"
:param int minimum: The least amount that this filter can adjust the card to
"""
super().__init__()
self.amount = amount
self.minimum = minimum
self.filter_type = filter_type
self.filter_object = None
self.players = players
def apply(self):
if self.filter_type == "minion":
my_filter = lambda c: isinstance(c, hearthbreaker.game_objects.MinionCard)
elif self.filter_type == "spell":
my_filter = lambda c: c.is_spell()
elif self.filter_type == "secret":
my_filter = lambda c: isinstance(c, hearthbreaker.game_objects.SecretCard)
else:
my_filter = lambda c: True
class Filter:
def __init__(self, amount, minimum, filter):
self.amount = amount
self.min = minimum
self.filter = filter
self.filter_object = Filter(self.amount, self.minimum, my_filter)
if self.players == "friendly" or self.players == "both":
self.target.player.mana_filters.append(self.filter_object)
if self.players == "enemy" or self.players == "both":
self.target.player.opponent.mana_filters.append(self.filter_object)
def unapply(self):
if self.players == "friendly" or self.players == "both":
self.target.player.mana_filters.remove(self.filter_object)
if self.players == "enemy" or self.players == "both":
self.target.player.opponent.mana_filters.remove(self.filter_object)
def __str__(self):
return "ManaFilter({0}, {1}, {2}, {3})".format(self.amount, self.minimum, self.filter_type, self.players)
def __to_json__(self):
return {
"action": "mana_filter",
"amount": self.amount,
"minimum": self.minimum,
"players": self.players
}
class EventEffect(MinionEffect, metaclass=abc.ABCMeta):
def __init__(self, when, minion_filter, target, players, include_self):
super().__init__()
self.when = when
self.minion_filter = minion_filter
self.action_target = target
self.players = players
self.include_self = include_self
self.other = None
def apply(self):
if self.players == "friendly":
players = [self.target.player]
elif self.players == "enemy":
players = [self.target.player.opponent]
elif self.players == "both":
players = [self.target.player, self.target.player.opponent]
else:
raise RuntimeError("Required players to be 'friendly', 'enemy', or 'both', got '{0}".format(self.players))
if self.when == "death":
for player in players:
player.bind("minion_died", self._check_minion_filter)
elif self.when == "damaged":
for player in players:
player.bind("minion_damaged", self._check_minion_filter)
elif self.when == "summoned":
for player in players:
player.bind("minion_summoned", self._check_minion_filter)
elif self.when == "played":
if self.minion_filter == "spell" or self.minion_filter == "secret" or self.minion_filter == "card":
for player in players:
player.bind("card_played", self._check_card_filter)
else:
for player in players:
player.bind("minion_played", self._check_minion_filter)
elif self.when == "placed":
for player in players:
player.bind("minion_placed", self._check_minion_filter)
elif self.when == "after_added":
for player in players:
player.bind("after_minion_added", self._check_minion_filter)
elif self.when == "attack":
self.target.bind("attack", self._check_minion_filter)
elif self.when == "attacked":
self.target.bind("attacked", self._check_minion_filter)
elif self.when == "did_damage":
self.target.bind("did_damage", self._check_minion_filter)
elif self.when == "overloaded":
for player in players:
player.bind("overloaded", self._check_turn_end_filter)
elif self.when == "turn_ended":
for player in players:
player.bind("turn_ended", self._check_turn_end_filter)
elif self.when == "turn_started":
for player in players:
player.bind("turn_started", self._check_turn_end_filter)
def unapply(self):
if self.players == "friendly":
players = [self.target.player]
elif self.players == "enemy":
players = [self.target.player.opponent]
elif self.players == "both":
players = [self.target.player, self.target.player.opponent]
else:
raise RuntimeError("Required players to be 'friendly', 'enemy', or 'both', got '{0}".format(self.players))
if self.when == "death":
for player in players:
player.unbind("minion_died", self._check_minion_filter)
elif self.when == "damaged":
for player in players:
player.unbind("minion_damaged", self._check_minion_filter)
elif self.when == "summoned":
for player in players:
player.unbind("minion_summoned", self._check_minion_filter)
elif self.when == "played":
if self.minion_filter == "spell" or self.minion_filter == "secret" or self.minion_filter == "card":
for player in players:
player.unbind("card_played", self._check_card_filter)
else:
for player in players:
player.unbind("minion_played", self._check_minion_filter)
elif self.when == "placed":
for player in players:
player.unbind("minion_placed", self._check_minion_filter)
elif self.when == "after_added":
for player in players:
player.unbind("after_minion_added", self._check_minion_filter)
elif self.when == "attack":
self.target.unbind("attack", self._check_minion_filter)
elif self.when == "attacked":
self.target.unbind("attacked", self._check_minion_filter)
elif self.when == "did_damage":
self.target.unbind("did_damage", self._check_minion_filter)
elif self.when == "overloaded":
for player in players:
player.unbind("overloaded", self._check_turn_end_filter)
elif self.when == "turn_ended":
for player in players:
player.unbind("turn_ended", self._check_turn_end_filter)
elif self.when == "turn_started":
for player in players:
player.unbind("turn_started", self._check_turn_end_filter)
def _check_minion_filter(self, minion, *args):
self.other = minion
if self.minion_filter == "self":
if minion == self.target:
self._select_target()
elif self.include_self or minion is not self.target:
if self.minion_filter == "minion":
self._select_target()
elif self.minion_filter == "deathrattle" and minion.deathrattle is not None:
self._select_target()
elif self.target is not minion:
try:
type_id = MINION_TYPE.from_str(self.minion_filter)
if minion.card.minion_type == type_id:
self._select_target()
except KeyError:
pass
def _check_card_filter(self, card, index):
if self.minion_filter == "spell" and card.is_spell():
self._select_target()
elif self.minion_filter == "secret" and isinstance(card, hearthbreaker.game_objects.SecretCard):
self._select_target()
elif self.minion_filter == "card":
self._select_target()
def _check_turn_end_filter(self):
if self.minion_filter != "secret" or len(self.target.player.secrets) > 0:
self._select_target()
def _select_target(self):
if self.action_target == "self":
target = self.target
elif self.action_target == "other":
target = self.other
else:
if self.action_target == "random_minion":
targets = copy.copy(self.target.player.minions)
targets.extend(self.target.player.opponent.minions)
targets.remove(self.target)
elif self.action_target == "random_friendly_minion":
targets = copy.copy(self.target.player.minions)
targets.remove(self.target)
elif self.action_target == "random_friendly":
targets = copy.copy(self.target.player.minions)
targets.append(self.target.player.hero)
targets.remove(self.target)
elif self.action_target == "random_enemy_minion":
targets = copy.copy(self.target.player.opponent.minions)
elif self.action_target == "random_enemy":
targets = copy.copy(self.target.player.opponent.minions)
targets.append(self.target.player.opponent.hero)
else:
raise RuntimeError("Expected 'target' to be one of 'self', 'other', 'random', " +
"'random_friendly' or 'random_enemy'. Got '{0}'".format(self.action_target))
target = targets[self.target.game.random(0, len(targets) - 1)]
self._do_action(target)
@abc.abstractmethod
def _do_action(self, target):
pass
def __str__(self):
return json.dumps(self.__to_json__())
def __to_json__(self):
return {
"when": self.when,
"minion_filter": self.minion_filter,
"target": self.action_target,
"players": self.players,
"include_self": self.include_self,
}
class Buff(EventEffect):
def __init__(self, when, minion_filter="self", target="self", attack=0, health=0, players="friendly",
include_self=False):
super().__init__(when, minion_filter, target, players, include_self)
self.attack = attack
self.health = health
def _do_action(self, target):
if self.health > 0:
target.increase_health(self.health)
elif self.health < 0:
target.decrease_health(-self.health)
if self.attack != 0:
target.change_attack(self.attack)
def __to_json__(self):
s_json = super().__to_json__()
s_json.update({
"action": "buff",
"attack": self.attack,
"health": self.health,
})
return s_json
class Kill(EventEffect):
def __init__(self, when, minion_filter="self", target="self", players="friendly", include_self=False):
super().__init__(when, minion_filter, target, players, include_self)
def _do_action(self, target):
if isinstance(target, hearthbreaker.game_objects.Minion):
target.die(None)
def __to_json__(self):
s_json = super().__to_json__()
s_json.update({
"action": "kill",
})
return s_json
class Freeze(EventEffect):
def __init__(self, when, minion_filter="self", target="self", players="friendly", include_self=False):
super().__init__(when, minion_filter, target, players, include_self)
def _do_action(self, target):
if isinstance(target, hearthbreaker.game_objects.Character):
target.freeze()
def __to_json__(self):
s_json = super().__to_json__()
s_json.update({
"action": "freeze",
})
return s_json
class Heal(EventEffect):
def __init__(self, when, amount, minion_filter="self", target="self", players="friendly", include_self=False):
super().__init__(when, minion_filter, target, players, include_self)
self.amount = amount
def _do_action(self, target):
if isinstance(target, hearthbreaker.game_objects.Character):
target.heal(self.amount, self.target)
def __to_json__(self):
s_json = super().__to_json__()
s_json.update({
"action": "heal",
"amount": self.amount
})
return s_json
class Damage(EventEffect):
def __init__(self, when, amount, minion_filter="self", target="self", players="friendly"):
super().__init__(when, minion_filter, target, players, False)
self.amount = amount
def _do_action(self, target):
if isinstance(target, hearthbreaker.game_objects.Character):
target.damage(self.amount, self.target)
def __to_json__(self):
s_json = super().__to_json__()
s_json.update({
"action": "damage",
"amount": self.amount
})
return s_json
class EventEffectPlayer(EventEffect):
def __init__(self, when, minion_filter="self", target="owner", players="friendly", include_self=False):
super().__init__(when, minion_filter, target, players, include_self)
def _select_target(self):
if self.action_target == "owner":
self._do_action(self.target.player)
elif self.action_target == "opponent":
self._do_action(self.target.player.opponent)
elif self.action_target == "p1":
self._do_action(self.target.game.players[0])
elif self.action_target == "p2":
self._do_action(self.target.game.players[1])
class AddCard(EventEffectPlayer):
def __init__(self, when, card, minion_filter="self", target="owner", players="friendly"):
super().__init__(when, minion_filter, target, players)
self.card = card
def _do_action(self, target):
if len(target.hand) < 10:
target.hand.append(self.card())
def __to_json__(self):
s_json = super().__to_json__()
s_json.update({
"action": "add_card",
"card": self.card.name
})
return s_json
class Draw(EventEffectPlayer):
def __init__(self, when, minion_filter="self", target="owner", players="friendly", probability=1.0):
super().__init__(when, minion_filter, target, players)
self.prob = probability
def _do_action(self, target):
if not self.prob < 1.0 or target.game.random(0, 100) / 100 < self.prob:
target.draw()
def __to_json__(self):
s_json = super().__to_json__()
s_json.update({
"action": "draw",
"probability": self.prob
})
return s_json
class Summon(EventEffectPlayer):
def __init__(self, when, card, minion_filter="self", target="owner", players="friendly", include_self=False):
super().__init__(when, minion_filter, target, players, include_self)
self.card = card
def _do_action(self, target):
self.card().summon(target, target.game, len(target.minions))
def __to_json__(self):
s_json = super().__to_json__()
s_json.update({
"action": "summon",
"card": self.card.name
})
return s_json
class ResurrectFriendlyMinionsAtEndOfTurn(MinionEffect):
def __init__(self):
super().__init__()
def apply(self):
self.target.player.bind("turn_ended", self._turn_ended)
self.target.player.opponent.bind("turn_ended", self._turn_ended)
def unapply(self):
self.target.player.unbind("turn_ended", self._turn_ended)
self.target.player.opponent.unbind("turn_ended", self._turn_ended)
def _turn_ended(self):
# Will be called once per Kel'Thuzad on the board
# http://www.hearthhead.com/card=1794/kelthuzad#comments
for minion in sorted(self.target.player.dead_this_turn, key=lambda m: m.born):
minion.card.summon(self.target.player, self.target.game, len(self.target.player.minions))
def __to_json__(self):
return {
"action": "resurrect_friendly"
}
def __str__(self):
return ""
class OriginalDeathrattle(MinionEffect):
def __init__(self):
super().__init__()
def apply(self):
self.target.deathrattle = self.target.card.create_minion(self.target.player).deathrattle
def unapply(self):
self.target.deathrattle = None
def __to_json__(self):
return {
"action": "original_deathrattle"
}
| [
"[email protected]"
] | |
c1ceb404454ce567e24d20d975800d5a86c84ea7 | 4140a1eecd862356d7d41b171d8956a7ab96be7b | /nitro-python-1.0/nssrc/com/citrix/netscaler/nitro/resource/stat/lsn/lsndslite_stats.py | 23db16cfc4e72bdd3a3490761b166565c8a07ce0 | [
"Apache-2.0",
"Python-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Citrix-TechSpecialist/NS-Init | 1c4311fef80d47d80fb5bfe107df058f5ff93e20 | bd1b695584a6acadec0140457782c7f4e97c266b | refs/heads/master | 2020-12-02T10:00:13.245312 | 2017-07-09T09:30:51 | 2017-07-09T09:30:51 | 96,673,273 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,736 | py | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class lsndslite_stats(base_resource) :
def __init__(self) :
self._clearstats = ""
self._lsntotdsliterxpkts = 0
self._lsndsliterxpktsrate = 0
self._lsntotdsliterxbytes = 0
self._lsndsliterxbytesrate = 0
self._lsntotdslitetxpkts = 0
self._lsndslitetxpktsrate = 0
self._lsntotdslitetxbytes = 0
self._lsndslitetxbytesrate = 0
self._lsntotdslitetcprxpkts = 0
self._lsndslitetcprxpktsrate = 0
self._lsntotdslitetcprxbytes = 0
self._lsndslitetcprxbytesrate = 0
self._lsntotdslitetcptxpkts = 0
self._lsndslitetcptxpktsrate = 0
self._lsntotdslitetcptxbytes = 0
self._lsndslitetcptxbytesrate = 0
self._lsntotdslitetcpdrppkts = 0
self._lsndslitetcpdrppktsrate = 0
self._lsncurdslitetcpsessions = 0
self._lsncurdslitetcpsessionsrate = 0
self._lsntotdsliteudprxpkts = 0
self._lsndsliteudprxpktsrate = 0
self._lsntotdsliteudprxbytes = 0
self._lsndsliteudprxbytesrate = 0
self._lsntotdsliteudptxpkts = 0
self._lsndsliteudptxpktsrate = 0
self._lsntotdsliteudptxbytes = 0
self._lsndsliteudptxbytesrate = 0
self._lsntotdsliteudpdrppkts = 0
self._lsndsliteudpdrppktsrate = 0
self._lsncurdsliteudpsessions = 0
self._lsncurdsliteudpsessionsrate = 0
self._lsntotdsliteicmprxpkts = 0
self._lsndsliteicmprxpktsrate = 0
self._lsntotdsliteicmprxbytes = 0
self._lsndsliteicmprxbytesrate = 0
self._lsntotdsliteicmptxpkts = 0
self._lsndsliteicmptxpktsrate = 0
self._lsntotdsliteicmptxbytes = 0
self._lsndsliteicmptxbytesrate = 0
self._lsntotdsliteicmpdrppkts = 0
self._lsndsliteicmpdrppktsrate = 0
self._lsncurdsliteicmpsessions = 0
self._lsncurdsliteicmpsessionsrate = 0
self._lsncurdslitesessions = 0
self._lsncurdslitesessionsrate = 0
self._lsndslitecursubscribers = 0
self._lsndslitecursubscribersrate = 0
@property
def clearstats(self) :
ur"""Clear the statsistics / counters.<br/>Possible values = basic, full.
"""
try :
return self._clearstats
except Exception as e:
raise e
@clearstats.setter
def clearstats(self, clearstats) :
ur"""Clear the statsistics / counters
"""
try :
self._clearstats = clearstats
except Exception as e:
raise e
@property
def lsntotdsliteudpdrppkts(self) :
ur"""Number of LSN DS-Lite UDP Dropped packets.
"""
try :
return self._lsntotdsliteudpdrppkts
except Exception as e:
raise e
@property
def lsndsliteudprxpktsrate(self) :
ur"""Rate (/s) counter for lsntotdsliteudprxpkts.
"""
try :
return self._lsndsliteudprxpktsrate
except Exception as e:
raise e
@property
def lsntotdsliterxpkts(self) :
ur"""Total number of LSN DS-Lite rx pkts.
"""
try :
return self._lsntotdsliterxpkts
except Exception as e:
raise e
@property
def lsntotdslitetxbytes(self) :
ur"""Total number of LSN DS-Lite tx bytes.
"""
try :
return self._lsntotdslitetxbytes
except Exception as e:
raise e
@property
def lsncurdsliteicmpsessionsrate(self) :
ur"""Rate (/s) counter for lsncurdsliteicmpsessions.
"""
try :
return self._lsncurdsliteicmpsessionsrate
except Exception as e:
raise e
@property
def lsntotdsliteudptxbytes(self) :
ur"""Number of LSN DS-Lite UDP Transmitted bytes.
"""
try :
return self._lsntotdsliteudptxbytes
except Exception as e:
raise e
@property
def lsntotdsliteicmptxpkts(self) :
ur"""Number of LSN DS-Lite ICMP Transmitted packets.
"""
try :
return self._lsntotdsliteicmptxpkts
except Exception as e:
raise e
@property
def lsndslitetxpktsrate(self) :
ur"""Rate (/s) counter for lsntotdslitetxpkts.
"""
try :
return self._lsndslitetxpktsrate
except Exception as e:
raise e
@property
def lsntotdsliteicmptxbytes(self) :
ur"""Number of LSN DS-Lite ICMP Transmitted bytes.
"""
try :
return self._lsntotdsliteicmptxbytes
except Exception as e:
raise e
@property
def lsntotdsliterxbytes(self) :
ur"""Total number of LSN DS-Lite rx bytes.
"""
try :
return self._lsntotdsliterxbytes
except Exception as e:
raise e
@property
def lsntotdsliteicmpdrppkts(self) :
ur"""Number of LSN DS-Lite ICMP Dropped packets.
"""
try :
return self._lsntotdsliteicmpdrppkts
except Exception as e:
raise e
@property
def lsncurdslitetcpsessionsrate(self) :
ur"""Rate (/s) counter for lsncurdslitetcpsessions.
"""
try :
return self._lsncurdslitetcpsessionsrate
except Exception as e:
raise e
@property
def lsndsliteudptxbytesrate(self) :
ur"""Rate (/s) counter for lsntotdsliteudptxbytes.
"""
try :
return self._lsndsliteudptxbytesrate
except Exception as e:
raise e
@property
def lsntotdsliteudprxbytes(self) :
ur"""Number of LSN DS-Lite UDP Received bytes.
"""
try :
return self._lsntotdsliteudprxbytes
except Exception as e:
raise e
@property
def lsndsliterxbytesrate(self) :
ur"""Rate (/s) counter for lsntotdsliterxbytes.
"""
try :
return self._lsndsliterxbytesrate
except Exception as e:
raise e
@property
def lsncurdsliteicmpsessions(self) :
ur"""Number of LSN DS-Lite ICMP Current Sessions.
"""
try :
return self._lsncurdsliteicmpsessions
except Exception as e:
raise e
@property
def lsndsliteicmprxbytesrate(self) :
ur"""Rate (/s) counter for lsntotdsliteicmprxbytes.
"""
try :
return self._lsndsliteicmprxbytesrate
except Exception as e:
raise e
@property
def lsndsliteudpdrppktsrate(self) :
ur"""Rate (/s) counter for lsntotdsliteudpdrppkts.
"""
try :
return self._lsndsliteudpdrppktsrate
except Exception as e:
raise e
@property
def lsntotdslitetcpdrppkts(self) :
ur"""Number of LSN DS-Lite TCP Dropped packets.
"""
try :
return self._lsntotdslitetcpdrppkts
except Exception as e:
raise e
@property
def lsntotdsliteicmprxbytes(self) :
ur"""Number of LSN DS-Lite ICMP Received bytes.
"""
try :
return self._lsntotdsliteicmprxbytes
except Exception as e:
raise e
@property
def lsncurdslitesessionsrate(self) :
ur"""Rate (/s) counter for lsncurdslitesessions.
"""
try :
return self._lsncurdslitesessionsrate
except Exception as e:
raise e
@property
def lsntotdsliteicmprxpkts(self) :
ur"""Number of LSN DS-Lite ICMP Received packets.
"""
try :
return self._lsntotdsliteicmprxpkts
except Exception as e:
raise e
@property
def lsntotdslitetxpkts(self) :
ur"""Total number of LSN DS-Lite tx pkts.
"""
try :
return self._lsntotdslitetxpkts
except Exception as e:
raise e
@property
def lsncurdsliteudpsessionsrate(self) :
ur"""Rate (/s) counter for lsncurdsliteudpsessions.
"""
try :
return self._lsncurdsliteudpsessionsrate
except Exception as e:
raise e
@property
def lsndslitetcptxpktsrate(self) :
ur"""Rate (/s) counter for lsntotdslitetcptxpkts.
"""
try :
return self._lsndslitetcptxpktsrate
except Exception as e:
raise e
@property
def lsntotdslitetcptxpkts(self) :
ur"""Number of LSN DS-Lite TCP Transmitted packets.
"""
try :
return self._lsntotdslitetcptxpkts
except Exception as e:
raise e
@property
def lsndsliteicmptxpktsrate(self) :
ur"""Rate (/s) counter for lsntotdsliteicmptxpkts.
"""
try :
return self._lsndsliteicmptxpktsrate
except Exception as e:
raise e
@property
def lsndsliteicmpdrppktsrate(self) :
ur"""Rate (/s) counter for lsntotdsliteicmpdrppkts.
"""
try :
return self._lsndsliteicmpdrppktsrate
except Exception as e:
raise e
@property
def lsntotdsliteudprxpkts(self) :
ur"""Number of LSN DS-Lite UDP Received packets.
"""
try :
return self._lsntotdsliteudprxpkts
except Exception as e:
raise e
@property
def lsncurdslitetcpsessions(self) :
ur"""Number of LSN DS-Lite TCP Current Sessions.
"""
try :
return self._lsncurdslitetcpsessions
except Exception as e:
raise e
@property
def lsndslitetxbytesrate(self) :
ur"""Rate (/s) counter for lsntotdslitetxbytes.
"""
try :
return self._lsndslitetxbytesrate
except Exception as e:
raise e
@property
def lsntotdslitetcprxbytes(self) :
ur"""Number of LSN DS-Lite TCP Received bytes.
"""
try :
return self._lsntotdslitetcprxbytes
except Exception as e:
raise e
@property
def lsndsliteicmptxbytesrate(self) :
ur"""Rate (/s) counter for lsntotdsliteicmptxbytes.
"""
try :
return self._lsndsliteicmptxbytesrate
except Exception as e:
raise e
@property
def lsndsliteicmprxpktsrate(self) :
ur"""Rate (/s) counter for lsntotdsliteicmprxpkts.
"""
try :
return self._lsndsliteicmprxpktsrate
except Exception as e:
raise e
@property
def lsncurdslitesessions(self) :
ur"""Current number of LSN DS-Lite sessions.
"""
try :
return self._lsncurdslitesessions
except Exception as e:
raise e
@property
def lsndslitetcprxbytesrate(self) :
ur"""Rate (/s) counter for lsntotdslitetcprxbytes.
"""
try :
return self._lsndslitetcprxbytesrate
except Exception as e:
raise e
@property
def lsndslitecursubscribers(self) :
ur"""Current number of LSN DS-Lite subscribers.
"""
try :
return self._lsndslitecursubscribers
except Exception as e:
raise e
@property
def lsntotdslitetcprxpkts(self) :
ur"""Number of LSN DS-Lite TCP Received packets.
"""
try :
return self._lsntotdslitetcprxpkts
except Exception as e:
raise e
@property
def lsndsliterxpktsrate(self) :
ur"""Rate (/s) counter for lsntotdsliterxpkts.
"""
try :
return self._lsndsliterxpktsrate
except Exception as e:
raise e
@property
def lsncurdsliteudpsessions(self) :
ur"""Number of LSN DS-Lite UDP Current Sessions.
"""
try :
return self._lsncurdsliteudpsessions
except Exception as e:
raise e
@property
def lsndslitetcprxpktsrate(self) :
ur"""Rate (/s) counter for lsntotdslitetcprxpkts.
"""
try :
return self._lsndslitetcprxpktsrate
except Exception as e:
raise e
@property
def lsntotdsliteudptxpkts(self) :
ur"""Number of LSN DS-Lite UDP Transmitted packets.
"""
try :
return self._lsntotdsliteudptxpkts
except Exception as e:
raise e
@property
def lsntotdslitetcptxbytes(self) :
ur"""Number of LSN DS-Lite TCP Transmitted bytes.
"""
try :
return self._lsntotdslitetcptxbytes
except Exception as e:
raise e
@property
def lsndslitetcpdrppktsrate(self) :
ur"""Rate (/s) counter for lsntotdslitetcpdrppkts.
"""
try :
return self._lsndslitetcpdrppktsrate
except Exception as e:
raise e
@property
def lsndslitetcptxbytesrate(self) :
ur"""Rate (/s) counter for lsntotdslitetcptxbytes.
"""
try :
return self._lsndslitetcptxbytesrate
except Exception as e:
raise e
@property
def lsndsliteudprxbytesrate(self) :
ur"""Rate (/s) counter for lsntotdsliteudprxbytes.
"""
try :
return self._lsndsliteudprxbytesrate
except Exception as e:
raise e
@property
def lsndsliteudptxpktsrate(self) :
ur"""Rate (/s) counter for lsntotdsliteudptxpkts.
"""
try :
return self._lsndsliteudptxpktsrate
except Exception as e:
raise e
@property
def lsndslitecursubscribersrate(self) :
ur"""Rate (/s) counter for lsndslitecursubscribers.
"""
try :
return self._lsndslitecursubscribersrate
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(lsndslite_response, response, self.__class__.__name__.replace('_stats',''))
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.lsndslite
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
return 0
except Exception as e :
raise e
@classmethod
def get(cls, service, name="", option_="") :
ur""" Use this API to fetch the statistics of all lsndslite_stats resources that are configured on netscaler.
"""
try :
obj = lsndslite_stats()
if not name :
response = obj.stat_resources(service, option_)
return response
except Exception as e:
raise e
class Clearstats:
basic = "basic"
full = "full"
class lsndslite_response(base_response) :
def __init__(self, length=1) :
self.lsndslite = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.lsndslite = [lsndslite_stats() for _ in range(length)]
| [
"[email protected]"
] | |
69a1f0b6fd6245b262107582163cc8ddd44ea88e | 38a92e99215d4bd6146374408513e7b8446ec828 | /ocpu/views.py | 59a753c4ddfbdf47b2487cad764f92feee416851 | [] | no_license | andi-nl/ANDI-frontend | b953ca50d1c1f3a4d6d12de8e68b7d81e815f0dc | 8d41bc8b6764444ab7b7a7ac053cdf8c317a568a | refs/heads/master | 2020-04-14T23:12:14.309292 | 2017-06-22T12:08:43 | 2017-06-22T12:08:43 | 41,303,252 | 1 | 12 | null | 2017-01-02T13:17:41 | 2015-08-24T13:00:35 | JavaScript | UTF-8 | Python | false | false | 932 | py |
import json
import logging
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth.decorators import login_required
from django.core.exceptions import SuspiciousOperation
from .utils import do_normcomp, do_calccomposite
logger = logging.getLogger(__name__)
@login_required
@csrf_exempt
def compute(request):
logger.info('Called compute')
try:
parameters = json.loads(request.body.decode('utf-8'))
except:
raise SuspiciousOperation('Invalid input for ocpu.')
method = parameters.get('method')
if method == 'normcomp':
return do_normcomp(parameters)
elif method == 'calccomposite':
return do_calccomposite(parameters)
else:
msg = 'ocpu called with "{}"; method not implemented'.format(method)
logger.error(msg)
return JsonResponse({'error': 'method "{}" not implemented in ocpu.'})
| [
"[email protected]"
] | |
7b6058bd35243dcac57bec06afb8147e964d4555 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_softwoods.py | bf431aa729f540bf80fb4fb0ce05106eaa0eb798 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py |
from xai.brain.wordbase.nouns._softwood import _SOFTWOOD
#calss header
class _SOFTWOODS(_SOFTWOOD, ):
def __init__(self,):
_SOFTWOOD.__init__(self)
self.name = "SOFTWOODS"
self.specie = 'nouns'
self.basic = "softwood"
self.jsondata = {}
| [
"[email protected]"
] | |
850ecdca5bc07271e05e480aefeb8432c7ea8f48 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /EPXH424t2SSjMzms5_5.py | 6f2e5df2ee3869baab5543822cd15128d9777f20 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 901 | py | """
Create a function that takes both a string and a list of integers and
rearranges the letters in the string to be in the order specified by the index
numbers. Return the "remixed" string.
### Examples
remix("abcd", [0, 3, 1, 2]) ➞ "acdb"
The string you'll be returning will have:
* "a" at index 0
* "b" at index 3
* "c" at index 1
* "d" at index 2
... because the order of those characters maps to their corresponding numbers
in the index list.
remix("PlOt", [1, 3, 0, 2]) ➞ "OPtl"
remix("computer", [0, 2, 1, 5, 3, 6, 7, 4]) ➞ "cmourpte"
### Notes
* Be sure not to change the original case.
* Assume you'll be given a string and list of equal length, both containing valid characters (A-Z, a-z, or 0-9).
* The list of numbers could potentially be more than nine (i.e. double figures).
"""
remix=lambda t,l:''.join(y for _,y in sorted(zip(l,t)))
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.