blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
62363c4bcc24d91b45188cf7f657fda66070fe0d | 37146b1529bfb8094d5ef17734498aba1e701b33 | /python/python_course/pythonStudy4/FangWenKongZhiExample.py | 82883cdaa0bcd2c25e723093041cf2edf8fa576c | [] | no_license | nanfeng729/code-for-test | 9c8e3736ac4c86a43002a658faf37349817de130 | 28071453c38742bffd5b5bdf7461bffdaa6c96be | refs/heads/master | 2022-10-07T17:59:44.558278 | 2020-06-06T10:19:19 | 2020-06-06T10:19:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 289 | py | class fwkz:
a = 0
b = 0
_c = 0 # 定义受保护的属性
__d = 0 # 定义私有的属性
def jisuan(self):
return self.a + self.b
def jisuan2(self):
return self.a + self._c
def jisuan3(self):
return self.b + self.__d | [
"[email protected]"
] | |
c1bc4002b45701e9ddcbdae5fbd9f337effbe930 | 587dbdf730b6cc3e693efc5dca5d83d1dd35ee1a | /leetcode/1501-1800/1785.py | d13fff05ea446efdd48f2dcbc0f23ee12d81b53b | [] | no_license | Rivarrl/leetcode_python | 8db2a15646d68e4d84ab263d8c3b6e38d8e3ea99 | dbe8eb449e5b112a71bc1cd4eabfd138304de4a3 | refs/heads/master | 2021-06-17T15:21:28.321280 | 2021-03-11T07:28:19 | 2021-03-11T07:28:19 | 179,452,345 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 680 | py | # -*- coding: utf-8 -*-
# ======================================
# @File : 1785
# @Time : 2021/3/8 12:20
# @Author : Rivarrl
# ======================================
from algorithm_utils import *
class Solution:
"""
[1785. 构成特定和需要添加的最少元素](https://leetcode-cn.com/problems/minimum-elements-to-add-to-form-a-given-sum/)
"""
@timeit
def minElements(self, nums: List[int], limit: int, goal: int) -> int:
return (abs(sum(nums) - goal) + limit - 1) // limit
if __name__ == '__main__':
a = Solution()
a.minElements(nums = [1,-1,1], limit = 3, goal = -4)
a.minElements(nums = [1,-10,9,1], limit = 100, goal = 0) | [
"[email protected]"
] | |
6f4f236a04b08ff986588d8d74bf27e19b3776ce | a9958f7c7887a92ec9fc48b02ed8a5cb75a03311 | /db.py | 1b274e58f4478c7f209d2e9b19cf25ce7d613166 | [] | no_license | ahmedfadhil/Dynamically-Weighted-Bandwidth- | 816c18777b49f3520433e65accf9e179f64e0836 | 1c8821aec73f32e704d12cebffcda01d1319bc80 | refs/heads/master | 2021-01-02T09:39:58.814463 | 2017-08-03T21:55:26 | 2017-08-03T21:55:26 | 99,271,051 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,374 | py | import matplotlib.pyplot as plt
from matplotlib import style
import numpy as np
from sklearn.datasets.samples_generator import make_blobs
style.use('ggplot')
X, y = make_blobs(n_samples=15, centers=3, n_features=2)
##X = np.array([[1, 2],
## [1.5, 1.8],
## [5, 8],
## [8, 8],
## [1, 0.6],
## [9, 11],
## [8, 2],
## [10, 2],
## [9, 3]])
##plt.scatter(X[:, 0],X[:, 1], marker = "x", s=150, linewidths = 5, zorder = 10)
##plt.show()
'''
1. Start at every datapoint as a cluster center
2. take mean of radius around cluster, setting that as new cluster center
3. Repeat #2 until convergence.
'''
class Mean_Shift:
def __init__(self, radius=None, radius_norm_step=100):
self.radius = radius
self.radius_norm_step = radius_norm_step
def fit(self, data):
if self.radius == None:
all_data_centroid = np.average(data, axis=0)
all_data_norm = np.linalg.norm(all_data_centroid)
self.radius = all_data_norm / self.radius_norm_step
print(self.radius)
centroids = {}
for i in range(len(data)):
centroids[i] = data[i]
weights = [i for i in range(self.radius_norm_step)][::-1]
while True:
new_centroids = []
for i in centroids:
in_bandwidth = []
centroid = centroids[i]
for featureset in data:
distance = np.linalg.norm(featureset - centroid)
if distance == 0:
distance = 0.00000000001
weight_index = int(distance / self.radius)
if weight_index > self.radius_norm_step - 1:
weight_index = self.radius_norm_step - 1
to_add = (weights[weight_index] ** 2) * [featureset]
in_bandwidth += to_add
new_centroid = np.average(in_bandwidth, axis=0)
new_centroids.append(tuple(new_centroid))
uniques = sorted(list(set(new_centroids)))
to_pop = []
for i in uniques:
for ii in [i for i in uniques]:
if i == ii:
pass
elif np.linalg.norm(np.array(i) - np.array(ii)) <= self.radius:
# print(np.array(i), np.array(ii))
to_pop.append(ii)
break
for i in to_pop:
try:
uniques.remove(i)
except:
pass
prev_centroids = dict(centroids)
centroids = {}
for i in range(len(uniques)):
centroids[i] = np.array(uniques[i])
optimized = True
for i in centroids:
if not np.array_equal(centroids[i], prev_centroids[i]):
optimized = False
if optimized:
break
self.centroids = centroids
self.classifications = {}
for i in range(len(self.centroids)):
self.classifications[i] = []
for featureset in data:
# compare distance to either centroid
distances = [np.linalg.norm(featureset - self.centroids[centroid]) for centroid in self.centroids]
# print(distances)
classification = (distances.index(min(distances)))
# featureset that belongs to that cluster
self.classifications[classification].append(featureset)
def predict(self, data):
# compare distance to either centroid
distances = [np.linalg.norm(data - self.centroids[centroid]) for centroid in self.centroids]
classification = (distances.index(min(distances)))
return classification
clf = Mean_Shift()
clf.fit(X)
centroids = clf.centroids
print(centroids)
colors = 10 * ['r', 'g', 'b', 'c', 'k', 'y']
for classification in clf.classifications:
color = colors[classification]
for featureset in clf.classifications[classification]:
plt.scatter(featureset[0], featureset[1], marker="x", color=color, s=150, linewidths=5, zorder=10)
for c in centroids:
plt.scatter(centroids[c][0], centroids[c][1], color='k', marker="*", s=150, linewidths=5)
plt.show() | [
"[email protected]"
] | |
bca17f6f16c5c7b53f36b1772c1609844002a2d0 | 45a61af9028a1805c08b6f7638c7aebe8140bd2d | /Groundwater/mf6/autotest/test_z01_nightly_build_examples.py | 271101a6d317d1429ed62e0954a5f125c5a0fd18 | [] | no_license | gumilar19/Personal | 1c1fac036af3a4b9d4d425b7c8cb604271b94fd2 | c666b07c5184006aca8e6ad946cc98ef72dfe9fe | refs/heads/master | 2023-08-14T20:39:07.164849 | 2021-09-29T11:19:10 | 2021-09-29T11:19:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,397 | py | import os
import sys
import subprocess
import pathlib
try:
import pymake
except:
msg = 'Error. Pymake package is not available.\n'
msg += 'Try installing using the following command:\n'
msg += ' pip install https://github.com/modflowpy/pymake/zipball/master'
raise Exception(msg)
try:
import flopy
except:
msg = 'Error. FloPy package is not available.\n'
msg += 'Try installing using the following command:\n'
msg += ' pip install flopy'
raise Exception(msg)
from simulation import Simulation
def get_example_directory(base, fdir, subdir='mf6'):
exdir = None
for root, dirs, files in os.walk(base):
for d in dirs:
if d.startswith(fdir):
exdir = os.path.abspath(os.path.join(root, d, subdir))
break
if exdir is not None:
break
return exdir
# find path to modflow6-testmodels or modflow6-testmodels.git directory
home = os.path.expanduser('~')
print('$HOME={}'.format(home))
fdir = 'modflow6-testmodels'
exdir = get_example_directory(home, fdir, subdir='mf6')
if exdir is None:
p = pathlib.Path(os.getcwd())
home = os.path.abspath(pathlib.Path(*p.parts[:2]))
print('$HOME={}'.format(home))
exdir = get_example_directory(home, fdir, subdir='mf6')
if exdir is not None:
assert os.path.isdir(exdir)
def get_branch():
try:
# determine current buildstat branch
b = subprocess.Popen(("git", "status"),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).communicate()[0]
if isinstance(b, bytes):
b = b.decode('utf-8')
# determine current buildstat branch
for line in b.splitlines():
if 'On branch' in line:
branch = line.replace('On branch ', '').rstrip()
except:
branch = None
return branch
def get_mf6_models():
"""
Get a list of test models
"""
# determine if running on travis
is_travis = 'TRAVIS' in os.environ
is_github_action = 'CI' in os.environ
# get current branch
is_CI = False
if is_travis:
is_CI = True
branch = os.environ['BRANCH']
elif is_github_action:
is_CI = True
branch = os.path.basename(os.environ['GITHUB_REF'])
else:
branch = get_branch()
print('On branch {}'.format(branch))
# tuple of example files to exclude
exclude = (None,)
# update exclude
if is_CI:
exclude_CI = ('test022_MNW2_Fig28',
'test007_751x751_confined')
exclude = exclude + exclude_CI
exclude = list(exclude)
# write a summary of the files to exclude
print('list of tests to exclude:')
for idx, ex in enumerate(exclude):
print(' {}: {}'.format(idx + 1, ex))
# build list of directories with valid example files
if exdir is not None:
dirs = [d for d in os.listdir(exdir)
if 'test' in d and d not in exclude]
else:
dirs = []
# exclude dev examples on master or release branches
if 'master' in branch.lower() or 'release' in branch.lower():
drmv = []
for d in dirs:
if '_dev' in d.lower():
drmv.append(d)
for d in drmv:
dirs.remove(d)
# sort in numerical order for case sensitive os
if len(dirs) > 0:
dirs = sorted(dirs, key=lambda v: (v.upper(), v[0].islower()))
# determine if only a selection of models should be run
select_dirs = None
select_packages = None
for idx, arg in enumerate(sys.argv):
if arg.lower() == '--sim':
if len(sys.argv) > idx + 1:
select_dirs = sys.argv[idx + 1:]
break
elif arg.lower() == '--pak':
if len(sys.argv) > idx + 1:
select_packages = sys.argv[idx + 1:]
select_packages = [item.upper() for item in select_packages]
break
elif arg.lower() == '--match':
if len(sys.argv) > idx + 1:
like = sys.argv[idx + 1]
dirs = [item for item in dirs if like in item]
break
# determine if the selection of model is in the test models to evaluate
if select_dirs is not None:
found_dirs = []
for d in select_dirs:
if d in dirs:
found_dirs.append(d)
dirs = found_dirs
if len(dirs) < 1:
msg = 'Selected models not available in test'
print(msg)
# determine if the specified package(s) is in the test models to evaluate
if select_packages is not None:
found_dirs = []
for d in dirs:
pth = os.path.join(exdir, d)
namefiles = pymake.get_namefiles(pth)
ftypes = []
for namefile in namefiles:
ftype = pymake.get_mf6_ftypes(namefile, select_packages)
if ftype not in ftypes:
ftypes += ftype
if len(ftypes) > 0:
ftypes = [item.upper() for item in ftypes]
for pak in select_packages:
if pak in ftypes:
found_dirs.append(d)
break
dirs = found_dirs
if len(dirs) < 1:
msg = 'Selected packages not available ['
for pak in select_packages:
msg += ' {}'.format(pak)
msg += ']'
print(msg)
return dirs
def get_htol(dir):
htol = None
if dir == 'test059_mvlake_laksfr_tr':
if sys.platform.lower() == 'darwin':
htol = 0.002
return htol
def run_mf6(sim):
"""
Run the MODFLOW 6 simulation and compare to existing head file or
appropriate MODFLOW-2005, MODFLOW-NWT, MODFLOW-USG, or MODFLOW-LGR run.
"""
print(os.getcwd())
src = os.path.join(exdir, sim.name)
dst = os.path.join('temp', sim.name)
sim.setup(src, dst)
sim.run()
sim.compare()
sim.teardown()
def test_mf6model():
# determine if test directory exists
dirtest = dir_avail()
if not dirtest:
return
# get a list of test models to run
dirs = get_mf6_models()
# run the test models
for dir in dirs:
yield run_mf6, Simulation(dir, htol=get_htol(dir))
return
def dir_avail():
avail = False
if exdir is not None:
avail = os.path.isdir(exdir)
if not avail:
print('"{}" does not exist'.format(exdir))
print('no need to run {}'.format(os.path.basename(__file__)))
return avail
def main():
# write message
tnam = os.path.splitext(os.path.basename(__file__))[0]
msg = 'Running {} test'.format(tnam)
print(msg)
# determine if test directory exists
dirtest = dir_avail()
if not dirtest:
return
# get a list of test models to run
dirs = get_mf6_models()
# run the test models
for dir in dirs:
sim = Simulation(dir, htol=get_htol(dir))
run_mf6(sim)
return
if __name__ == "__main__":
print('standalone run of {}'.format(os.path.basename(__file__)))
delFiles = True
for idx, arg in enumerate(sys.argv):
if arg.lower() == '--keep':
if len(sys.argv) > idx + 1:
delFiles = False
break
# run main routine
main()
| [
"[email protected]"
] | |
2c84b88b2248d07bc3fdaaa1c84bb232af9210d9 | 6558766df338730772d02a318e65bfa46cff40b6 | /apps/openprofession/migrations/0037_simulizatordata.py | 856c0742ca3f953cf907bf71e7a9709af76ae251 | [] | no_license | ITOO-UrFU/openedu | 02dc265872e2de1d74b1e8eca0c6596c7860841a | 7c6507d671653fc0ccf35b5305f960eb32e7159f | refs/heads/master | 2021-01-20T21:16:39.987744 | 2019-08-07T10:02:12 | 2019-08-07T10:02:12 | 101,761,728 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,403 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-03-13 14:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('openprofession', '0036_pdavailable'),
]
operations = [
migrations.CreateModel(
name='SimulizatorData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fio', models.CharField(max_length=2048, verbose_name='ФИО')),
('email', models.EmailField(max_length=254, verbose_name='Email')),
('phone', models.CharField(max_length=255, verbose_name='Телефон')),
('username', models.CharField(blank=True, max_length=255, null=True, verbose_name='username')),
('password', models.CharField(blank=True, max_length=255, null=True, verbose_name='password')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
],
options={
'verbose_name': 'заявка на участие в симуляторе',
'verbose_name_plural': 'заявки на участие в симуляторе',
},
),
]
| [
"[email protected]"
] | |
bfff0b51dad9c1f7c57317b42ae7678d470157f9 | 5a977855c32226303bdec2585dc7d159a801b12f | /material/codigo/pygame-camera/macroscope/macroscope.py | d3adce138aba14217294e7b8d57667288bf88297 | [] | no_license | javacasm/RaspberryAvanzado | 5eecd62c3598b2e36bc4ee91c3e96b33734903bf | bbccc7b8af8c2c9b5e2e298b3e5c063d9aa056c1 | refs/heads/master | 2021-09-11T16:45:18.263481 | 2018-04-09T22:06:11 | 2018-04-09T22:06:11 | 108,258,204 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,612 | py | #!/usr/bin/env python
# RPi Macroscope by [email protected], December 2017
# https://www.raspberrypi.org/learning/push-button-stop-motion/worksheet/
# https://raspberrypi.stackexchange.com/questions/28302/python-script-with-loop-that-detects-keyboard-input
import os, sys, time
from picamera import PiCamera
from time import sleep
from datetime import datetime, timedelta
from gpiozero import Button, LED
import pygame
import RPi.GPIO as GPIO
from PIL import Image
# Pin assignments
zoomOutButton = 22
zoomInButton = 27
redButton = 17
LEDring = 18
greenLED = 4
# Flags / counters
zoomOutPressed = False
zoomInPressed = False
redButtonPressed = False
redPressCount = 0
LEDringOn = True
OUT = True
IN = False
timeLapseSeconds = 5
msgPostTime = datetime.now()
msgShowTime = 5
helpScreen = False
# GPIO setup
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(zoomOutButton, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(zoomInButton, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(redButton, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(LEDring, GPIO.OUT)
GPIO.setup(greenLED, GPIO.OUT)
GPIO.output(LEDring, GPIO.HIGH)
GPIO.output(greenLED, GPIO.LOW)
# Pygame / camera setup
pygame.init()
pygame.display.set_mode((1,1))
camera = PiCamera()
zoomfactor = 0
camera.start_preview()
camera.vflip = True
camera.hflip = True
os.chdir('/boot') # save folders to /boot directory
def zoom(direction):
global zoomfactor
if direction == IN:
zoomfactor = zoomfactor + 10
if zoomfactor > 40:
zoomfactor = 40
if direction == OUT:
zoomfactor = zoomfactor - 10
if zoomfactor < 0:
zoomfactor = 0
zoom1 = zoomfactor / 100.0
zoom2 = 1.0 - zoom1 * 2
camera.zoom = (zoom1, zoom1, zoom2, zoom2)
print(camera.zoom, zoomfactor)
def getFileName():
last_date = time.strftime("%Y%m%d", time.localtime())
img_count = 0
ds = time.strftime("%Y%m%d", time.localtime())
#Figure out if USB drive attached
dirList = os.listdir('/media/pi')
if dirList:
os.chdir('/media/pi/%s' % dirList[0])
else:
os.chdir('./')
if not os.path.isdir(ds):
os.mkdir(ds)
print("%s directory created." % ds)
else: # find highest number
dir_list = os.listdir(os.path.join(os.getcwd(), ds))
max_count = 0
for file_name in dir_list:
try:
count = int(file_name.split("_")[0])
except ValueError:
count = 0
if count >= max_count:
max_count = count + 1
img_count = max_count
print("img_count = %s" % img_count)
start_time = time.localtime()
ds = time.strftime("%Y%m%d", start_time)
ts = time.strftime("%H%M", start_time)
if last_date != ds and os.path.isdir(last_date):
img_count = 0
last_date = time.strftime("%Y%m%d", time.localtime())
if not os.path.isdir(ds):
os.mkdir(ds)
logging.debug("%s directory created." % ds)
img_count = 0
new_name = '%s/%04d_%s_%s' % (ds, img_count, ds, ts)
return new_name
def takePicture():
global msgPostTime
new_name = getFileName()
GPIO.output(greenLED, GPIO.HIGH)
camera.annotate_text = ''
camera.capture('%s.jpg' % new_name, use_video_port=True)
print('capture %s/%s.jpg' % (os.getcwd(), new_name))
camera.annotate_text = 'Saved %s/%s.jpg' % (os.getcwd(), new_name)
msgPostTime = datetime.now()
GPIO.output(greenLED, GPIO.LOW)
def takeVideo():
global msgPostTime
new_name = getFileName()
#o = camera.add_overlay(red_dot.tobytes(), size=red_dot.size, layer=3)
camera.annotate_text = ''
camera.start_recording('%s.h264' % new_name)
print('recording')
GPIO.output(greenLED, GPIO.HIGH)
while camera.recording:
camera.wait_recording(1)
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
camera.stop_recording()
GPIO.output(greenLED, GPIO.LOW)
print('video %s.h264' % new_name)
camera.annotate_text = 'Saved %s.h264' % new_name
camera.remove_overlay(o)
if event.key == pygame.K_q:
GPIO.output(LEDring, GPIO.LOW)
sys.exit()
os.system('sudo MP4Box -add %s.h264 %s.mp4' % (new_name, new_name))
camera.annotate_text = 'Converted to %s.mp4' % new_name
msgPostTime = datetime.now()
def takeSequence(seconds):
o = camera.add_overlay(green_dot.tobytes(), size=green_dot.size, layer=3)
print('starting sequence')
while True:
camera.annotate_text = ''
takePicture()
nextShot = (datetime.now() + timedelta(seconds=seconds)).replace(microsecond=0)
while datetime.now() < nextShot:
sleep(1)
txt = str(int((nextShot- datetime.now()).total_seconds()))
camera.annotate_text = txt
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
camera.remove_overlay(o)
camera.annotate_text = ''
if event.key == pygame.K_q:
GPIO.output(LEDring, GPIO.LOW)
sys.exit()
else:
print('end sequence')
return
#red_dot = Image.open('red_dot.png')
#green_dot = Image.open(os.path.join('macroscope', 'green_dot.png'))
#help = Image.open(os.path.join('macroscope', 'help.png'))
while True:
# button handling
if(not GPIO.input(zoomInButton)):
if not zoomInPressed:
zoomInPressed = True
zoom(IN)
else:
zoomInPressed = False
if(not GPIO.input(zoomOutButton)):
if not zoomOutPressed:
zoomOutPressed = True
zoom(OUT)
else:
zoomOutPressed = False
# Clear message
if (datetime.now() - timedelta(seconds=msgShowTime)) < msgPostTime:
pass
else:
camera.annotate_text = ''
if(not GPIO.input(redButton)):
while (not GPIO.input(redButton)): # button still pressed
redPressCount += 1
if redPressCount == 10:
if LEDringOn:
GPIO.output(LEDring, GPIO.LOW)
LEDringOn = False
else:
GPIO.output(LEDring, GPIO.HIGH)
LEDringOn = True
if redPressCount > 20:
camera.stop_preview()
GPIO.output(LEDring, GPIO.LOW)
os.system('sudo shutdown now')
sleep(0.1)
if redPressCount < 10:
takePicture()
else:
redPressCount = 0
# key handling
for event in pygame.event.get():
if event.type == pygame.QUIT:
GPIO.output(18, GPIO.LOW)
pygame.quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_h:
if not helpScreen:
o = camera.add_overlay(help.tobytes(), size=help.size, layer=3)
helpScreen = True
else:
camera.remove_overlay(o)
helpScreen = False
if event.key == pygame.K_SPACE:
takePicture()
if event.key == pygame.K_v:
takeVideo()
if event.key == pygame.K_t:
takeSequence(timeLapseSeconds)
if event.key == pygame.K_a:
print('camera.resolution: ', camera.resolution)
print('camera.iso: ', camera.iso)
print('camera.exposure_speed: ', camera.exposure_speed)
print('camera.awb_gains: ', camera.awb_gains)
print('camera.awb_mode: ', camera.awb_mode)
if event.key == pygame.K_q:
camera.stop_preview()
GPIO.output(18, GPIO.LOW)
dirList = os.listdir('/media/pi')
if dirList:
os.system('sync')
os.system('sudo umount /media/pi/%s' % dirList[0])
pygame.quit()
sys.exit()
if event.key == pygame.K_MINUS or event.key == pygame.K_EQUALS:
if event.key == pygame.K_EQUALS:
zoom(IN)
if event.key == pygame.K_MINUS:
zoom(OUT)
if event.key == pygame.K_f:
camera.hflip = not(camera.hflip)
camera.vflip = not(camera.vflip)
print("flip!")
| [
"[email protected]"
] | |
6230c89fbf90c5fe08760c737ce41aeee110b049 | fde10302616f4bbba5a67a33decb65e47765e268 | /misc/v1/reconstruction/meshroom_to_log.py | 798f63beb6c87bdb1d5544ec8dea13d120d761ec | [] | no_license | laurelkeys/ff | b1f562f2e3caf2cd0616ca93fff4fb3872e55cdc | bac774e1f7b3131f559ee3ff1662836c424ebaa5 | refs/heads/master | 2023-02-23T17:46:49.011034 | 2022-01-21T20:31:59 | 2022-01-21T20:31:59 | 214,757,656 | 1 | 1 | null | 2023-02-11T00:30:56 | 2019-10-13T03:58:59 | Python | UTF-8 | Python | false | false | 4,269 | py | import os
import glob
import json
import argparse
import collections
import numpy as np
# ref.:
# [1] https://www.tanksandtemples.org/tutorial/
# [2] https://colmap.github.io/format.html#images-txt
# [3] https://github.com/colmap/colmap/blob/dev/src/estimators/pose.h#L125
# [4] https://github.com/alicevision/meshroom/wiki/Using-known-camera-positions
# [5] https://github.com/colmap/colmap/blob/dev/scripts/python/read_write_model.py
# [6] https://github.com/alicevision/meshroom/issues/787
# FIXME rename, so it's not confused with trajectory_io
class CameraPose:
def __init__(self, pose_id, image_path, log_matrix):
self.id = pose_id
self.image_path = image_path
self.log_matrix = log_matrix
def write_SfM_log(T, i_map, filename):
with open(filename, 'w') as f:
for i, traj in enumerate(T):
metadata = i_map[i]
pose = traj.tolist()
f.write(' '.join(map(str, metadata)) + '\n')
f.write('\n'.join(' '.join(
map('{0:.12f}'.format, pose[i])
) for i in range(4)))
f.write('\n')
def convert_Meshroom_to_log(filename, logfile_out, input_images, formatp):
input_images_list = glob.glob(f"{input_images}/*.{formatp}")
if len(input_images_list) == 0:
print("Warning: no images were found (try setting --formatp)")
input_images_list.sort()
n_of_images = len(input_images_list)
T, i_map, TF, i_mapF = [], [], [], []
views = {}
camera_poses = []
with open(filename, 'r') as sfm_file:
sfm_data = json.load(sfm_file)
for view in sfm_data['views']:
views[view['poseId']] = view['path'] # NOTE equal to the 'viewId'
for camera_pose in sfm_data['poses']:
pose_id = camera_pose['poseId']
pose_transform = camera_pose['pose']['transform']
# 3x3 (column-major) rotation matrix
rotation = np.array(
[float(_) for _ in pose_transform['rotation']]
).reshape((3, 3))
rotation[:, 1:] *= -1 # ref.: [2]
# camera center in world coordinates
center = np.array([float(_) for _ in pose_transform['center']])
# homogeneous transformation matrix
mat = np.identity(4)
mat[:3, :3] = rotation
mat[:3, 3] = center
camera_poses.append(CameraPose(pose_id, views[pose_id], mat))
for pose in camera_poses:
A = np.matrix(pose.log_matrix)
T.append(A.I)
image_name = os.path.basename(pose.image_path)
matching = [i for i, s in enumerate(input_images_list) if image_name in s]
i_map.append([pose.id, matching[0], 0])
for k in range(n_of_images):
try:
# find the k-th view id
view_id = [i for i, item in enumerate(i_map) if k == item[1]][0]
i_mapF.append(np.array([k, k, 0], dtype='int'))
TF.append(T[view_id])
except IndexError:
# assign the identity matrix to the k-th view id
# as the log file needs an entry for every image
i_mapF.append(np.array([k, -1, 0], dtype='int'))
TF.append(np.identity(4))
write_SfM_log(TF, i_mapF, logfile_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Convert Meshroom .sfm data into the Tanks and Temples .log file format"
)
parser.add_argument("in_sfm_fname", help="Input .sfm filename")
parser.add_argument("out_log_fname", help="Output .log filename")
parser.add_argument("images_folder", help="Input images folder path")
parser.add_argument("--formatp", default="jpg", help="Images format")
args = parser.parse_args()
# NOTE .sfm is actually a JSON
_, ext = os.path.splitext(args.in_sfm_fname)
assert ext.lower() in [".sfm", ".json"]
assert os.path.isfile(args.in_sfm_fname)
assert os.path.isdir(args.images_folder)
convert_Meshroom_to_log(
args.in_sfm_fname,
args.out_log_fname,
args.images_folder, args.formatp
)
# e.g.: python meshroom_to_log.py models\Monstree6\Meshroom\publish\cameras.json models\Monstree6\pointcloud\Monstree6_Meshroom_SfM.log models\Monstree6\images\
| [
"[email protected]"
] | |
4a93f895c4f634e938a00892439e5aa761ecf1b5 | 3d61fe0f49f5d344fc32a6faa799f0a46deec9a5 | /2017/AoC-2017-13v2.py | 290341cd10f0980f65f036c7d6c15a02ddab3382 | [] | no_license | sbeaumont/AoC | 558296fd26cd5272e33d3cb9113c09e4945c98ac | 406eda614d8434d8feb71fe1262f1fda54972a12 | refs/heads/master | 2022-12-13T07:38:36.089775 | 2022-12-04T21:11:49 | 2022-12-04T21:11:49 | 75,467,985 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 578 | py | PUZZLE_INPUT_FILE_NAME = "AoC-2017-13-input.txt"
# Parse parent and children names
with open(PUZZLE_INPUT_FILE_NAME) as puzzle_input_file:
firewall = {int(line.split(":")[0]): int(line.split(":")[1]) for line in puzzle_input_file.readlines()}
max_depth = max(firewall, key=firewall.get)
def check_layers(wait_time):
severity = 0
for d, r in firewall.iteritems():
at_layer_time = wait_time + d
if at_layer_time % (2*r-2) == 0:
severity += d * r
return severity
print(check_layers(0))
# delay = 0
# sev = 1
# while sev:
# pass | [
"[email protected]"
] | |
2670d4a865a34c6b12557710f3b157b604b6bf68 | 148cb99e0f23679c20243470ad62dc4155aa5252 | /baseinfo/migrations/0016_auto_20191206_0806.py | 8d98453f8d32b5509559f2cb37242495b58c3609 | [] | no_license | Hamidnet220/tax | 46060f24b55a4f348194599d59247ff9435f4379 | 000051be5df6a98f679d13a94e37b9ee30efd5a9 | refs/heads/master | 2020-06-19T09:41:20.998214 | 2019-12-10T01:01:17 | 2019-12-10T01:01:17 | 196,666,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 608 | py | # Generated by Django 2.1.7 on 2019-12-06 08:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('baseinfo', '0015_guarantee_guarantee_file'),
]
operations = [
migrations.AlterField(
model_name='guarantee',
name='guarantee_type',
field=models.IntegerField(choices=[(1, ' ضمانت نامه شرکت در مناقصه'), (2, ' ضمانت نامه پیش پرداخت'), (3, ' ضمانت نامه حسن انجام کار')], verbose_name='عنوان ضمانت نامه'),
),
]
| [
"[email protected]"
] | |
b90a0305484644a6728e50d68732ee9e6989bb14 | 478fad340a97fc14d365b95bbd6f8ac1dcc71953 | /121/Solution.py | d76a39e78ef9cadd8e4004cc32002f4a3d0d5986 | [] | no_license | sandyg05/leetcode | 93cca3b3ce4f38cf1ea1c6d3e8400d7b6b776c37 | e9d8036e2be6dbd1b8c958431e07dc35b88ebfa8 | refs/heads/master | 2022-07-16T10:03:59.529470 | 2020-05-13T05:35:49 | 2020-05-13T05:35:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 994 | py | """
Say you have an array for which the ith element is the price of a given stock on day i.
If you were only permitted to complete at most one transaction (i.e., buy one and sell one share of the stock), design an algorithm to find the maximum profit.
Note that you cannot sell a stock before you buy one.
Example 1:
Input: [7,1,5,3,6,4]
Output: 5
Explanation: Buy on day 2 (price = 1) and sell on day 5 (price = 6), profit = 6-1 = 5.
Not 7-1 = 6, as selling price needs to be larger than buying price.
Example 2:
Input: [7,6,4,3,1]
Output: 0
Explanation: In this case, no transaction is done, i.e. max profit = 0.
"""
class Solution:
def maxProfit(self, prices):
if not prices:
return 0
min_price = prices[0]
max_profit = 0
for num in prices:
if num < min_price:
min_price = num
if num - min_price > max_profit:
max_profit = num - min_price
return max_profit | [
"[email protected]"
] | |
2bc1fcc7b2f69fdf2a3224d4812bd611106212fd | ca3a49676cdf1016b2d729f0432b451d35b7a281 | /bad-solutions/add.py | 698f358d6ce95ac0d0d3832d8c44a19f39928fd9 | [
"MIT"
] | permissive | SquareandCompass/code-align-evals-data | 3bb71b605316f56bb27466f23706a329f3fb4938 | 97446d992c3785d6605f1500b2c9b95d042e7b9c | refs/heads/main | 2023-06-19T12:47:56.277363 | 2021-07-21T00:22:56 | 2021-07-21T00:22:56 | 640,147,842 | 0 | 1 | null | 2023-05-13T06:22:30 | 2023-05-13T06:22:29 | null | UTF-8 | Python | false | false | 575 | py | def add(lst):
"""Given a non-empty list of integers lst. add the even elements that are at odd indices..
Examples:
add([4, 2, 6, 7]) ==> 2
"""
return sum([lst[i] for i in range(1, len(lst) / 2, 2) if lst[i] % 2 == 0])
def check(candidate):
# Check some simple cases
assert candidate([4, 88]) == 88
assert candidate([4, 5, 6, 7, 2, 122]) == 122
assert candidate([4, 0, 6, 7]) == 0
assert candidate([4, 4, 6, 8]) == 12
# Check some edge cases that are easy to work out by hand.
if __name__ == "__main__":
check(add)
| [
"[email protected]"
] | |
a5075c05b906fd9b22238fdec92901e48a23a4c7 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02817/s121273903.py | c74297c5df6c42af00d7dd1b1408fea1fb86e8a6 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 41 | py | x=list(input().split())
print(x[1]+x[0])
| [
"[email protected]"
] | |
0e9a38665795bd642e825d58f2ad24a34ebb9439 | f8c3c677ba536fbf5a37ac4343c1f3f3acd4d9b6 | /ICA_SDK/test/test_instrument.py | aeb50805e0f9f55fb14b9f8cfa35dbca74de8c92 | [] | no_license | jsialar/integrated_IAP_SDK | 5e6999b0a9beabe4dfc4f2b6c8b0f45b1b2f33eb | c9ff7685ef0a27dc4af512adcff914f55ead0edd | refs/heads/main | 2023-08-25T04:16:27.219027 | 2021-10-26T16:06:09 | 2021-10-26T16:06:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,573 | py | # coding: utf-8
"""
IAP Services
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import ICA_SDK
from ICA_SDK.models.instrument import Instrument # noqa: E501
from ICA_SDK.rest import ApiException
class TestInstrument(unittest.TestCase):
"""Instrument unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test Instrument
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = ICA_SDK.models.instrument.Instrument() # noqa: E501
if include_optional :
return Instrument(
id = '0',
name = '0',
description = '0',
serial_number = '0',
control_software_version = '0',
operating_software_version = '0',
instrument_type = '0'
)
else :
return Instrument(
)
def testInstrument(self):
"""Test Instrument"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
82e143ab368a2df624d5ba0dd94ba697a8484a59 | 59e87634c67508bf7eba8c8b9845354aefa57bc7 | /DL/yolo/YOLOV1/py_cpu_nms.py | 2ddfa1b49d7699c559417a89c11064070d65ca39 | [] | no_license | Caohengrui/MLAndDL | 48729b94b2232e628b699cf8d0d4a6c6e81a36f5 | d0637f58f45e9c091cd90bbfe9c207223d0994f3 | refs/heads/master | 2023-03-16T01:06:03.316463 | 2020-04-14T07:44:15 | 2020-04-14T07:44:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,060 | py | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
import numpy as np
def py_cpu_nms(dets,scores, thresh):
"""Pure Python NMS baseline."""
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
# scores = dets[:, 4]
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
keep = []
while order.size > 0:
i = order[0]
keep.append(i)
xx1 = np.maximum(x1[i], x1[order[1:]])
yy1 = np.maximum(y1[i], y1[order[1:]])
xx2 = np.minimum(x2[i], x2[order[1:]])
yy2 = np.minimum(y2[i], y2[order[1:]])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (areas[i] + areas[order[1:]] - inter)
inds = np.where(ovr <= thresh)[0]
order = order[inds + 1]
return keep
| [
"[email protected]"
] | |
62de88d56a77477d8991a96a5087929d2d3d2770 | 55d6de252e61c4b60688ebd8b1f637807acc1e7c | /usl_recived_forigin_purchased/models/inharitstockpicking.py | 0eebab826a9be7c89947980bd5f2d26cbf056f25 | [] | no_license | mosadiqit/eerna_erp_uslbd | b707a1d49a4fce7c1543b63e0120e8f9b77b26ce | 73e3994a9e32df7809d244eb6592513162ab7853 | refs/heads/main | 2023-06-30T14:53:04.837197 | 2021-08-04T11:30:46 | 2021-08-04T11:30:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,242 | py | from odoo import models, fields, api, _
from odoo.exceptions import UserError, ValidationError
from odoo.osv.osv import osv
from odoo.tools.float_utils import float_compare, float_is_zero, float_round
class StockPickingInharit(models.Model):
_inherit = 'stock.picking'
@api.onchange('commercial_invoice')
def onchange_commercial_invoice(self):
if self.commercial_invoice:
move_id = self.env['account.move'].search([('id','=',self.commercial_invoice.id)])
move_line_id = self.env['account.move.line'].search([('move_id','=',move_id.id),('account_internal_type','=','other')])
for rec in self:
lines = list()
for line in move_line_id:
vals = {
'product_id':line.product_id.id,
'branch_id':self.env.user.branch_id.id,
'product_uom_qty':line.quantity,
'reserved_availability':0,
'quantity_done':0,
'name':line.name,
'product_uom':line.product_id.uom_id.id
}
lines.append((0,0,vals))
rec.move_ids_without_package = lines
print('Hello')
def button_validate(self):
self.ensure_one()
if not self.move_lines and not self.move_line_ids:
raise UserError(_('Please add some items to move.'))
# Clean-up the context key at validation to avoid forcing the creation of immediate
# transfers.
# for rec in self.move_line_ids_without_package.lot_id:
# stock_reserved_check = self.env['stock.quant'].search([('lot_id','=',rec.id),('location_id','=',self.location_id.id)])
# if stock_reserved_check.reserved_quantity == 0:
# print(rec)
ctx = dict(self.env.context)
ctx.pop('default_immediate_transfer', None)
self = self.with_context(ctx)
# add user as a follower
self.message_subscribe([self.env.user.partner_id.id])
# If no lots when needed, raise error
picking_type = self.picking_type_id
precision_digits = self.env['decimal.precision'].precision_get('Product Unit of Measure')
no_quantities_done = all(float_is_zero(move_line.qty_done, precision_digits=precision_digits) for move_line in self.move_line_ids.filtered(lambda m: m.state not in ('done', 'cancel')))
no_reserved_quantities = all(float_is_zero(move_line.product_qty, precision_rounding=move_line.product_uom_id.rounding) for move_line in self.move_line_ids)
if no_reserved_quantities and no_quantities_done:
raise UserError(_('You cannot validate a transfer if no quantites are reserved nor done. To force the transfer, switch in edit more and encode the done quantities.'))
if picking_type.use_create_lots or picking_type.use_existing_lots:
lines_to_check = self.move_line_ids
if not no_quantities_done:
lines_to_check = lines_to_check.filtered(
lambda line: float_compare(line.qty_done, 0,
precision_rounding=line.product_uom_id.rounding)
)
for line in lines_to_check:
product = line.product_id
if product and product.tracking != 'none':
if not line.lot_name and not line.lot_id:
raise UserError(_('You need to supply a Lot/Serial number for product %s.') % product.display_name)
# Propose to use the sms mechanism the first time a delivery
# picking is validated. Whatever the user's decision (use it or not),
# the method button_validate is called again (except if it's cancel),
# so the checks are made twice in that case, but the flow is not broken
sms_confirmation = self._check_sms_confirmation_popup()
if sms_confirmation:
return sms_confirmation
if no_quantities_done:
view = self.env.ref('stock.view_immediate_transfer')
wiz = self.env['stock.immediate.transfer'].create({'pick_ids': [(4, self.id)]})
return {
'name': _('Immediate Transfer?'),
'type': 'ir.actions.act_window',
'view_mode': 'form',
'res_model': 'stock.immediate.transfer',
'views': [(view.id, 'form')],
'view_id': view.id,
'target': 'new',
'res_id': wiz.id,
'context': self.env.context,
}
if self._get_overprocessed_stock_moves() and not self._context.get('skip_overprocessed_check'):
view = self.env.ref('stock.view_overprocessed_transfer')
wiz = self.env['stock.overprocessed.transfer'].create({'picking_id': self.id})
return {
'type': 'ir.actions.act_window',
'view_mode': 'form',
'res_model': 'stock.overprocessed.transfer',
'views': [(view.id, 'form')],
'view_id': view.id,
'target': 'new',
'res_id': wiz.id,
'context': self.env.context,
}
# Check backorder should check for other barcodes
if self._check_backorder():
return self.action_generate_backorder_wizard()
self.action_done()
return
@api.onchange('is_nonsalealewarehouse_transfar')
def select_nonsale_ale_stock(self):
"""
this method is used for transfar page when select lim transfar then it show only lim transfar
:return:
"""
self.branch_id = self.env.user.branch_id
if self.is_nonsalealewarehouse_transfar:
self.is_nonsalealewarehouse_transfar = True
print('come to condition is_nonsalealewarehouse_transfar')
warehouse = self.env['stock.warehouse'].sudo().search([('is_non_saleable_warehouse', '=', True),('company_id', '=',self.env.user.company_id.id)], limit=1)
print(warehouse.id)
picking_type = self.env['stock.picking.type'].sudo().search(
[('warehouse_id', '=', warehouse.id), ('sequence_code', '=', 'INT')])
print(picking_type)
print(picking_type.warehouse_id.name)
self.picking_type_id = picking_type.id
return {
'domain': {
'picking_type_id': [('warehouse_id', '=', warehouse.id), ('sequence_code', '=', 'INT')]
},
# 'default_picking_type_id': [('warehouse_id', '=', warehouse.id), ('sequence_code', '=', 'INT')]
# lambda self: self.env['stock.picking.type'].browse(self._context.get('default_picking_type_id')).default_location_src_id
}
else:
return {
'domain': {
'picking_type_id': []
}
}
# def _do_partial_func_unreserved(self):
# print('_do_partial_unreserved')
# @api.onchange('fpo_order_id')
# def fpo_fall_into(self):
# print('work')
is_nonsalealewarehouse_transfar = fields.Boolean(string='Lim transfar ', default=False)
commercial_invoice = fields.Many2one('account.move',domain=[('type','=','in_invoice')],string="Commercial Invoice")
def action_assign(self):
""" Check availability of picking moves.
This has the effect of changing the state and reserve quants on available moves, and may
also impact the state of the picking as it is computed based on move's states.
@return: True
"""
res = {}
self.filtered(lambda picking: picking.state == 'draft').action_confirm()
moves = self.mapped('move_lines').filtered(lambda move: move.state not in ('draft', 'cancel', 'done'))
if not moves:
raise UserError(_('Nothing to check the availability for.'))
# If a package level is done when confirmed its location can be different than where it will be reserved.
# So we remove the move lines created when confirmed to set quantity done to the new reserved ones.
package_level_done = self.mapped('package_level_ids').filtered(
lambda pl: pl.is_done and pl.state == 'confirmed')
package_level_done.write({'is_done': False})
is_raise_validation_error = moves._action_assign()
package_level_done.write({'is_done': True})
if is_raise_validation_error:
# message = 'product is no available '
# raise osv.except_osv(_('warning'), _(message))
# res['warning'] = {'title': _('Warning'), 'message': message}
# raise ValueError('product not available')
raise ValidationError('product is no available ')
return True
# fpo_order_id = fields.Many2one('foreign.purchase.order', string= 'Foreign purchase order ')
# @api.onchange('move_ids_without_package.product_uom_qty')
# # def test(self):
# # print('***********************')
# # print('***********************')
# # print('***********************')
| [
"[email protected]"
] | |
95ea3a56c120bb0d2c831b76684d982b54b6c5aa | 68bad4b3d92872bb5b77b4ee503e588d20511a27 | /python/core/test_scripts_MPI/my_population_collect_spikes_mpi.py | ff1caea364466e952b5219ea999cbb2671552f87 | [] | no_license | mickelindahl/bgmodel | 647be626a7311a8f08f3dfc897c6dd4466fc0a92 | 78e6f2b73bbcbecd0dba25caf99f835313c914ee | refs/heads/master | 2023-08-29T13:57:04.122115 | 2022-02-11T14:28:23 | 2022-02-11T14:28:23 | 17,148,386 | 7 | 3 | null | null | null | null | UTF-8 | Python | false | false | 677 | py | '''
Created on Sep 22, 2014
@author: mikael
'''
import numpy
import pickle
import sys
from toolbox.data_to_disk import mkdir
from toolbox.my_nest import collect_spikes_mpi
from toolbox.parallelization import comm
print sys.argv
fileName, =sys.argv[1:]
fileName+='data'
s,e=numpy.ones(2)*comm.rank(),numpy.ones(2)*comm.rank()+1
s, e= collect_spikes_mpi(s, e)
mkdir('/'.join(fileName.split('/')[0:-1]))
if comm.rank()==0:
print 'File name'
print fileName
if 4<len(fileName) and fileName[-4:]!='.pkl':
fileName=fileName+'.pkl'
f=open(fileName, 'wb') #open in binary mode
pickle.dump([s,e], f, -1)
f.close()
| [
"[email protected]"
] | |
c64bb122fa1b142b05e8315ac85b8ea4cec85786 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /gaussiana/ch3_2019_03_08_14_00_41_432668.py | 4bdc1e00e92765b8d5b29e95dceff6a7256f3781 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 292 | py | import math
def calcula_gaussiana(x, mi, sigma):
if (sigma == 1 and x == 0 and mi == 0):
return 0
if (sigma == 0 or sigma == - math.sqrt(2*math.pi) or sigma == 1/math.sqrt(2*math.pi)):
return 0
return (1/sigma*math.sqrt(2*math.pi))**(-0.5((x - mi)/sigma)**2) | [
"[email protected]"
] | |
278be94dc86a4923595fc1db156514e63a55f1c3 | 9abc2f4fbf1b31b5a56507437b4a8d9c3f3db7e6 | /deals/migrations/0001_initial.py | 4a8460e46f0e4b39cc2b66694382c60ac4a670ac | [] | no_license | odbalogun/ticketr | e9fe8461d66dabe395f0e1af8fbecc67dbb16e97 | 94f24c82f407f861f1614a151feb3fdd62b283e5 | refs/heads/master | 2022-11-30T22:40:30.931160 | 2019-08-09T14:34:38 | 2019-08-09T14:34:38 | 188,833,600 | 0 | 0 | null | 2022-11-22T03:50:30 | 2019-05-27T11:50:07 | Python | UTF-8 | Python | false | false | 3,263 | py | # Generated by Django 2.2.1 on 2019-05-06 23:17
import deals.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Categories',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.DateTimeField(editable=False, null=True)),
('name', models.CharField(max_length=100, unique=True, verbose_name='name')),
('slug', models.SlugField(max_length=100, unique=True, verbose_name='slug')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='DealCategories',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.DateTimeField(editable=False, null=True)),
('price', models.FloatField(verbose_name='price')),
('description', models.TextField(verbose_name='description')),
('image', models.ImageField(upload_to=deals.models.deals_image_path, verbose_name='image')),
('quantity', models.IntegerField(null=True, verbose_name='quantity')),
('available_quantity', models.IntegerField(null=True, verbose_name='available quantity')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='deals.Categories')),
],
),
migrations.CreateModel(
name='Deals',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.DateTimeField(editable=False, null=True)),
('name', models.CharField(max_length=100, unique=True, verbose_name='name')),
('slug', models.SlugField(max_length=100, unique=True, verbose_name='slug')),
('description', models.TextField(verbose_name='description')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('expiry_date', models.DateField(null=True, verbose_name='expiry date')),
('is_active', models.BooleanField(default=True, verbose_name='is active')),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('options', models.ManyToManyField(to='deals.DealCategories')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='dealcategories',
name='deal',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='deals.Deals'),
),
migrations.AlterUniqueTogether(
name='dealcategories',
unique_together={('category', 'deal')},
),
]
| [
"[email protected]"
] | |
34c06dc74f45348f0075ae426c9ad58a2c008486 | 9bdc2e9f0382bd96ef3af4f9eca94fa58c5a4dc1 | /keras/mnist-privacy/model/pipeline_train.py | 0687d543d7075f6d1210e6bc5a96b7c003608086 | [
"Apache-2.0"
] | permissive | shicongisme/models | 90cf9a84b47c8d2a4de51fdfb7f6c4b9f796e317 | d8df07877aa8b10ce9b84983bb440af75e84dca7 | refs/heads/master | 2022-02-01T12:01:11.443827 | 2019-05-26T22:25:04 | 2019-05-26T22:25:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,112 | py | # Copyright 2018, The TensorFlow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Training a CNN on MNIST with differentially private SGD optimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from privacy.analysis.rdp_accountant import compute_rdp
from privacy.analysis.rdp_accountant import get_privacy_spent
from privacy.optimizers import dp_optimizer
tf.flags.DEFINE_boolean('dpsgd', True, 'If True, train with DP-SGD. If False,'
'train with vanilla SGD.')
tf.flags.DEFINE_float('learning_rate', 0.08, 'Learning rate for training')
tf.flags.DEFINE_float('noise_multiplier', 1.12,
'Ratio of the standard deviation to the clipping norm')
tf.flags.DEFINE_float('l2_norm_clip', 1.0, 'Clipping norm')
tf.flags.DEFINE_integer('batch_size', 32, 'Batch size')
tf.flags.DEFINE_integer('epochs', 1, 'Number of epochs')
tf.flags.DEFINE_integer('microbatches', 32,
'Number of microbatches (must evenly divide batch_size')
tf.flags.DEFINE_string('model_dir', None, 'Model directory')
tf.flags.DEFINE_string('export_dir', './pipeline_tfserving/0', 'Export dir')
FLAGS = tf.flags.FLAGS
def cnn_model_fn(features, labels, mode):
"""Model function for a CNN."""
# Define CNN architecture using tf.keras.layers.
input_layer = tf.reshape(features['x'], [-1, 28, 28, 1])
y = tf.keras.layers.Conv2D(16, 8,
strides=2,
padding='same',
kernel_initializer='he_normal').apply(input_layer)
y = tf.keras.layers.MaxPool2D(2, 1).apply(y)
y = tf.keras.layers.Conv2D(32, 4,
strides=2,
padding='valid',
kernel_initializer='he_normal').apply(y)
y = tf.keras.layers.MaxPool2D(2, 1).apply(y)
y = tf.keras.layers.Flatten().apply(y)
y = tf.keras.layers.Dense(32, kernel_initializer='he_normal').apply(y)
logits = tf.keras.layers.Dense(10, kernel_initializer='he_normal').apply(y)
# Calculate loss as a vector (to support microbatches in DP-SGD).
vector_loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits)
# Define mean of loss across minibatch (for reporting through tf.Estimator).
scalar_loss = tf.reduce_mean(vector_loss)
# Configure the training op (for TRAIN mode).
if mode == tf.estimator.ModeKeys.TRAIN:
if FLAGS.dpsgd:
# Use DP version of GradientDescentOptimizer. For illustration purposes,
# we do that here by calling make_optimizer_class() explicitly, though DP
# versions of standard optimizers are available in dp_optimizer.
dp_optimizer_class = dp_optimizer.make_optimizer_class(
tf.train.GradientDescentOptimizer)
optimizer = dp_optimizer_class(
learning_rate=FLAGS.learning_rate,
noise_multiplier=FLAGS.noise_multiplier,
l2_norm_clip=FLAGS.l2_norm_clip,
num_microbatches=FLAGS.microbatches)
opt_loss = vector_loss
else:
optimizer = tf.train.GradientDescentOptimizer(
learning_rate=FLAGS.learning_rate)
opt_loss = scalar_loss
global_step = tf.train.get_global_step()
train_op = optimizer.minimize(loss=opt_loss, global_step=global_step)
# In the following, we pass the mean of the loss (scalar_loss) rather than
# the vector_loss because tf.estimator requires a scalar loss. This is only
# used for evaluation and debugging by tf.estimator. The actual loss being
# minimized is opt_loss defined above and passed to optimizer.minimize().
return tf.estimator.EstimatorSpec(mode=mode,
loss=scalar_loss,
train_op=train_op)
# Add evaluation metrics (for EVAL mode).
elif mode == tf.estimator.ModeKeys.EVAL:
eval_metric_ops = {
'accuracy':
tf.metrics.accuracy(
labels=labels,
predictions=tf.argmax(input=logits, axis=1))
}
return tf.estimator.EstimatorSpec(mode=mode,
loss=scalar_loss,
eval_metric_ops=eval_metric_ops)
def load_mnist():
"""Loads MNIST and preprocesses to combine training and validation data."""
train, test = tf.keras.datasets.mnist.load_data()
train_data, train_labels = train
test_data, test_labels = test
train_data = np.array(train_data, dtype=np.float32) / 255
test_data = np.array(test_data, dtype=np.float32) / 255
train_labels = np.array(train_labels, dtype=np.int32)
test_labels = np.array(test_labels, dtype=np.int32)
assert train_data.min() == 0.
assert train_data.max() == 1.
assert test_data.min() == 0.
assert test_data.max() == 1.
assert len(train_labels.shape) == 1
assert len(test_labels.shape) == 1
return train_data, train_labels, test_data, test_labels
def main(unused_argv):
tf.logging.set_verbosity(tf.logging.INFO)
if FLAGS.batch_size % FLAGS.microbatches != 0:
raise ValueError('Number of microbatches should divide evenly batch_size')
# Load training and test data.
train_data, train_labels, test_data, test_labels = load_mnist()
# Instantiate the tf.Estimator.
mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn,
model_dir=FLAGS.model_dir)
# Create tf.Estimator input functions for the training and test data.
train_input_fn = tf.estimator.inputs.numpy_input_fn(
x={'x': train_data},
y=train_labels,
batch_size=FLAGS.batch_size,
num_epochs=FLAGS.epochs,
shuffle=True)
eval_input_fn = tf.estimator.inputs.numpy_input_fn(
x={'x': test_data},
y=test_labels,
num_epochs=1,
shuffle=False)
# Define a function that computes privacy budget expended so far.
def compute_epsilon(steps):
"""Computes epsilon value for given hyperparameters."""
if FLAGS.noise_multiplier == 0.0:
return float('inf')
orders = [1 + x / 10. for x in range(1, 100)] + list(range(12, 64))
sampling_probability = FLAGS.batch_size / 60000
rdp = compute_rdp(q=sampling_probability,
noise_multiplier=FLAGS.noise_multiplier,
steps=steps,
orders=orders)
# Delta is set to 1e-5 because MNIST has 60000 training points.
return get_privacy_spent(orders, rdp, target_delta=1e-5)[0]
# Training loop.
steps_per_epoch = 60000 // FLAGS.batch_size
for epoch in range(1, FLAGS.epochs + 1):
# Train the model for one epoch.
mnist_classifier.train(input_fn=train_input_fn, steps=steps_per_epoch)
# Evaluate the model and print results
eval_results = mnist_classifier.evaluate(input_fn=eval_input_fn)
test_accuracy = eval_results['accuracy']
print('Test accuracy after %d epochs is: %.3f' % (epoch, test_accuracy))
# Compute the privacy budget expended so far.
if FLAGS.dpsgd:
eps = compute_epsilon(epoch * steps_per_epoch)
print('For delta=1e-5, the current epsilon is: %.2f' % eps)
else:
print('Trained with vanilla non-private SGD optimizer')
# Export the model
if FLAGS.export_dir is not None:
# [-1, 28, 28, 1]
image = tf.placeholder(tf.float32, [None, 28, 28])
input_fn = tf.estimator.export.build_raw_serving_input_receiver_fn({
'x': image,
})
mnist_classifier.export_savedmodel(FLAGS.export_dir, input_fn)
if __name__ == '__main__':
tf.app.run()
| [
"[email protected]"
] | |
9eeb1275039275399a55eee9a0ae55d20a3de792 | 61a02aba5dde7c29ec65a87eb8a20af12d6c2b47 | /python basic/3118_최단경로2.py | ccce1ec174e94a7de53db843f1c74aeaad387cdd | [] | no_license | hksoftcorn/OfficialDocs | 0b4d0e2a71707e06ba7516e34ad176ee02726587 | cfd87d26efad484657f9493dead350cf0611a3e8 | refs/heads/master | 2023-06-30T07:09:33.641869 | 2021-07-31T14:35:28 | 2021-07-31T14:35:28 | 374,389,403 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 573 | py | V, E = map(int, input().split())
G = [[] for _ in range(V + 1)]
for _ in range(E):
u, v, w = map(int, input().split())
G[u].append((v, w))
visited = [False] * (V + 1)
dist = [0xfffffff] * (V + 1)
dist[1] = 0
p = [0] * (V + 1)
for _ in range(V):
u, min_key = 1, 0xfffffff
for i in range(1, V+1):
if not visited[i] and min_key > dist[i]:
u, min_key = i, dist[i]
visited[u] = True
for v, w in G[u]:
if not visited[v] and dist[v] > dist[u] + w:
dist[v] = dist[u] + w
p[v] = u
print(dist[V])
| [
"[email protected]"
] | |
6f4373a988fbcd023ca39c1755c9d361c3e7daff | 2fd14347b7f43864d8153bd1c6d79198302d21ea | /ex.002 root finding/nr_problem_case.py | 3d33bede021e71d689a6e8c5cd4a3b1edf781a2e | [] | no_license | family9od/ECAre | 0fe27ff290eaa702c754fedef8953260a67592fc | ea875ea14be9d99a5e4f2191382e6eedc702b557 | refs/heads/master | 2020-06-17T02:33:30.651909 | 2016-11-15T07:45:31 | 2016-11-15T07:45:31 | 75,047,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 791 | py | # -*- coding: utf8 -*-
# 2010112033 이상형 9/20
"""
1변수 방정식의 근을 찾느 방법 중 Newton-Raphson method 를 사용하여
어떤 함수 g(x) 의 근을 찾고자 함
아래 예는 newton_raphson method 를 사용하기 곤란한 경우임
"""
# 1 변수 방정식의 근을 찾는 함수를 모아둔 rootfinding 모듈을 불러들임
import rootfinding as rf
def g(x):
# 근을 구하고자 하는 함수
return x ** 3 - 2 * x + 2
def dgdx(x):
# g(x) 의 x 에 대한 미분
return 3.0 * x ** 2.0 - 2.0
if "__main__" == __name__:
# 주어진 초기값에서 시작하여 g(x) = 0 인 x를 찾고자 함
# 생각보다 시간이 많이 걸릴 수 있음
x_nr = rf.newton(g, dgdx, 0)
print('x = %g, f(%g) = %g' % (x_nr, x_nr, g(x_nr)))
| [
"CAD Client"
] | CAD Client |
9ae009652986c6b459f9d867a41a6f768070ebda | e28ce5cca66c56ee7446a46e18375430d0d404eb | /toys/12_go/gopy/go/location.py | 3b4f56c0d82834ba26f9afa924ca8d0bbcdfb3a8 | [
"MIT"
] | permissive | git4robot/PyKids | 4fb60c5b107527336d9e686a98988ba7a8354f31 | 866e45e13171322ad1892d604508cfee9f8086c8 | refs/heads/master | 2020-04-17T20:45:26.741363 | 2020-04-14T02:21:55 | 2020-04-14T02:21:55 | 166,919,523 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | from go.utils import bold
class LocationError(Exception):
pass
class Location(object):
TYPES = {
'black': bold('*'),
'white': bold('o'),
'empty': '.',
}
def __init__(self, type):
if type not in self.TYPES:
raise LocationError('Type must be one of the following: {0}'.format(
self.TYPES.keys(),
))
self._type = type
def __eq__(self, other):
return self._type == other._type
def __hash__(self):
return hash(self._type)
def __str__(self):
return self.TYPES[self._type]
def __repr__(self):
return self._type.title()
| [
"[email protected]"
] | |
ac07a53e15aef0bb493402d8d4c3712a747239bb | 3a6cbe6940b657ac6b608ce93d8d41ffeb6b9e65 | /rocon_python_comms/src/rocon_python_comms/service_pair_server.py | 8263080327cd5dc872bcd1d3fefb91715b3bd6bf | [] | no_license | robotics-in-concert/rocon_tools | cdfc4ccfc04b79262fb151640966a33bd0b5f498 | 1f182537b26e8622eefaf6737d3b3d18b1741ca6 | refs/heads/devel | 2021-01-17T01:58:12.163878 | 2018-02-06T15:20:29 | 2018-02-06T15:20:29 | 15,774,638 | 7 | 22 | null | 2017-08-16T06:39:47 | 2014-01-09T18:02:42 | Python | UTF-8 | Python | false | false | 6,195 | py | #
# License: BSD
# https://raw.github.com/robotics-in-concert/rocon_tools/license/LICENSE
#
##############################################################################
# Description
##############################################################################
"""
.. module:: service_pair_server
:platform: Unix
:synopsis: Server side api for communicating across a rocon service pair.
This module contains the server side api for communicating across a rocon
service pair. A `facade pattern`_ is used here to simplify the interaction with
the server side publisher and subscriber.
.. include:: weblinks.rst
----
"""
##############################################################################
# Imports
##############################################################################
import rospy
import threading
# Local imports
from .exceptions import ServicePairException
##############################################################################
# Server Class
##############################################################################
class ServicePairServer(object):
'''
The server side of a pubsub service pair. This class provides a simplified
api for handling requests/responses on the pubsub pair. There are two
modes of operation - 1) blocking and 2) threaded.
**Non-Threaded**
In the first, the users' callback function directly runs whenever an
incoming request is received. In this case, your callbacks should be
very minimal so that incoming requests don't get blocked and queued up.
.. code-block:: python
#!/usr/bin/env python
import rospy
from chatter.msg import ChatterRequest, ChatterResponse, ChatterPair
from rocon_python_comms import ServicePairServer
class ChatterServer(object):
def __init__(self):
self.server = ServicePairServer('chatter', self.callback, ChatterPair)
def callback(self, request_id, msg):
rospy.loginfo("Server : I heard %s" % msg.babble)
response = ChatterResponse()
response.reply = "I heard %s" % msg.babble
self.server.reply(request_id, response)
if __name__ == '__main__':
rospy.init_node('chatter_server', anonymous=True)
chatter_server = ChatterServer()
rospy.spin()
**Threaded**
In the second, we spawn a background thread and shunt the callback into this thread.
Just toggle the ``use_threads`` flag when constructing the server:
.. code-block:: python
self.server = ServicePairServer('chatter', self.callback, ChatterPair, use_threads=True)
'''
__slots__ = [
'_publisher',
'_subscriber',
'_callback',
'_use_threads',
#'_request_handlers', # initiate, track and execute requests with these { hex string ids : dic of RequestHandler objects (Blocking/NonBlocking) }
'ServicePairSpec',
'ServicePairRequest',
'ServicePairResponse',
]
##########################################################################
# Initialisation
##########################################################################
def __init__(self, name, callback, ServicePairSpec, use_threads=False, queue_size=5):
'''
:param str name: resource name of service pair (e.g. testies for pair topics testies/request, testies/response)
:param callback: function invoked when a request arrives
:param ServicePairSpec: the pair type (e.g. rocon_service_pair_msgs.msg.TestiesPair)
:param bool use_threads: put the callback function into a fresh background thread when a request arrives.
:param int queue_size: size of the queue to configure the publisher with.
'''
self._callback = callback
self._use_threads = use_threads
try:
p = ServicePairSpec()
self.ServicePairSpec = ServicePairSpec
"""Base message type for this pair."""
self.ServicePairRequest = type(p.pair_request)
"""Request msg type for this pair <ServicePairSpec>Request."""
self.ServicePairResponse = type(p.pair_response)
"""Response msg type for this pair <ServicePairSpec>Response."""
except AttributeError:
raise ServicePairException("Type is not an pair spec: %s" % str(ServicePairSpec))
self._subscriber = rospy.Subscriber(name + "/request", self.ServicePairRequest, self._internal_callback)
self._publisher = rospy.Publisher(name + "/response", self.ServicePairResponse, queue_size=queue_size)
##########################################################################
# Public Methods
##########################################################################
def reply(self, request_id, msg):
'''
Send a reply to a previously received request (identified by request_id). Use this
instead of writing directly to the publisher - just pass the content of the
response data and the id that was issued with the request.
:param uuid_msgs.UniqueID request_id: the request id to associate with this response.
:param ServiceResponse msg: the response
'''
pair_response = self.ServicePairResponse()
pair_response.id = request_id
pair_response.response = msg
self._publisher.publish(pair_response)
##########################################################################
# Callbacks
##########################################################################
def _internal_callback(self, msg):
'''
:param ServicePairRequest msg: message returned from the server (with pair id etc)
'''
# Check if it is a blocking call that has requested it.
if self._use_threads:
thread = threading.Thread(target=self._callback, args=(msg.id, msg.request))
thread.start()
else:
self._callback(msg.id, msg.request)
| [
"[email protected]"
] | |
e22da16a3630862721200de043c23202f838489d | e906fe8237e5b55b7bef1f7a87884c5924ccd8b1 | /contactmps/migrations/0024_committee.py | b8cb77c7198df94f7b6f8955173bff4743b0fb99 | [
"MIT"
] | permissive | OpenUpSA/contact-mps | ac9a88ef166769d6305e213f3d77191f385c962a | 63d7f86e1b6c9319a4d0344a6125cd22770f34c7 | refs/heads/master | 2022-12-11T07:22:20.942567 | 2020-01-15T13:11:59 | 2020-01-15T13:11:59 | 93,042,651 | 0 | 2 | MIT | 2022-12-08T02:08:08 | 2017-06-01T09:52:56 | JavaScript | UTF-8 | Python | false | false | 894 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-05-14 15:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contactmps', '0023_campaign_include_link_in_email'),
]
operations = [
migrations.CreateModel(
name='Committee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300, unique=True)),
('slug', models.CharField(max_length=300, unique=True)),
('email_address', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
]
| [
"[email protected]"
] | |
75204bbfc5d050883078af710ce97469e69c1335 | a089fab4b0e363ba48bff57b3948c32172570e8f | /home_connect_sdk/models/__init__.py | 311a2dad6bac50ae69888c78797c9a6745803fa0 | [] | no_license | jeroenvdwaal/home-connect-sdk | ed2e44a01b72d64d17d41af8400eb2e42792232c | 3c0ab6791bb0e9df95154f8f177d889ebef0c749 | refs/heads/master | 2022-04-23T01:20:32.621570 | 2020-04-26T09:40:16 | 2020-04-26T09:40:16 | 255,988,008 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,060 | py | # coding: utf-8
# flake8: noqa
"""
Home Connect API
This API provides access to home appliances enabled by Home Connect (https://home-connect.com). Through the API programs can be started and stopped, or home appliances configured and monitored. For instance, you can start a cotton program on a washer and get a notification when the cycle is complete. To get started with this web client, visit https://developer.home-connect.com and register an account. An application with a client ID for this API client will be automatically generated for you. In order to use this API in your own client, you need an OAuth 2 client implementing the authorization code grant flow (https://developer.home-connect.com/docs/authorization/flow). More details can be found here: https://www.rfc-editor.org/rfc/rfc6749.txt Authorization URL: https://api.home-connect.com/security/oauth/authorize Token URL: https://api.home-connect.com/security/oauth/token # noqa: E501
The version of the OpenAPI document: 1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
# import models into model package
from home_connect_sdk.models.active_program_not_set_error import ActiveProgramNotSetError
from home_connect_sdk.models.array_of_available_programs import ArrayOfAvailablePrograms
from home_connect_sdk.models.array_of_available_programs_data import ArrayOfAvailableProgramsData
from home_connect_sdk.models.array_of_available_programs_data_constraints import ArrayOfAvailableProgramsDataConstraints
from home_connect_sdk.models.array_of_available_programs_data_programs import ArrayOfAvailableProgramsDataPrograms
from home_connect_sdk.models.array_of_events import ArrayOfEvents
from home_connect_sdk.models.array_of_events_items import ArrayOfEventsItems
from home_connect_sdk.models.array_of_home_appliances import ArrayOfHomeAppliances
from home_connect_sdk.models.array_of_home_appliances_data import ArrayOfHomeAppliancesData
from home_connect_sdk.models.array_of_home_appliances_data_homeappliances import ArrayOfHomeAppliancesDataHomeappliances
from home_connect_sdk.models.array_of_images import ArrayOfImages
from home_connect_sdk.models.array_of_images_data import ArrayOfImagesData
from home_connect_sdk.models.array_of_images_data_images import ArrayOfImagesDataImages
from home_connect_sdk.models.array_of_options import ArrayOfOptions
from home_connect_sdk.models.array_of_options_data import ArrayOfOptionsData
from home_connect_sdk.models.array_of_programs import ArrayOfPrograms
from home_connect_sdk.models.array_of_programs_data import ArrayOfProgramsData
from home_connect_sdk.models.array_of_programs_data_constraints import ArrayOfProgramsDataConstraints
from home_connect_sdk.models.array_of_programs_data_programs import ArrayOfProgramsDataPrograms
from home_connect_sdk.models.array_of_settings import ArrayOfSettings
from home_connect_sdk.models.array_of_settings_data import ArrayOfSettingsData
from home_connect_sdk.models.array_of_settings_data_settings import ArrayOfSettingsDataSettings
from home_connect_sdk.models.array_of_status import ArrayOfStatus
from home_connect_sdk.models.array_of_status_data import ArrayOfStatusData
from home_connect_sdk.models.command import Command
from home_connect_sdk.models.command_data import CommandData
from home_connect_sdk.models.command_definition import CommandDefinition
from home_connect_sdk.models.command_definition_data import CommandDefinitionData
from home_connect_sdk.models.conflict import Conflict
from home_connect_sdk.models.conflict_error import ConflictError
from home_connect_sdk.models.forbidden_error import ForbiddenError
from home_connect_sdk.models.get_setting import GetSetting
from home_connect_sdk.models.get_setting_data import GetSettingData
from home_connect_sdk.models.get_setting_data_constraints import GetSettingDataConstraints
from home_connect_sdk.models.home_appliance import HomeAppliance
from home_connect_sdk.models.home_appliance_data import HomeApplianceData
from home_connect_sdk.models.interal_server_error import InteralServerError
from home_connect_sdk.models.no_program_active_error import NoProgramActiveError
from home_connect_sdk.models.no_program_selected_error import NoProgramSelectedError
from home_connect_sdk.models.not_acceptable_error import NotAcceptableError
from home_connect_sdk.models.not_found_error import NotFoundError
from home_connect_sdk.models.option import Option
from home_connect_sdk.models.option_data import OptionData
from home_connect_sdk.models.program import Program
from home_connect_sdk.models.program_data import ProgramData
from home_connect_sdk.models.program_data_options import ProgramDataOptions
from home_connect_sdk.models.program_definition import ProgramDefinition
from home_connect_sdk.models.program_definition_data import ProgramDefinitionData
from home_connect_sdk.models.program_definition_data_constraints import ProgramDefinitionDataConstraints
from home_connect_sdk.models.program_definition_data_options import ProgramDefinitionDataOptions
from home_connect_sdk.models.program_not_available_error import ProgramNotAvailableError
from home_connect_sdk.models.put_setting import PutSetting
from home_connect_sdk.models.put_setting_data import PutSettingData
from home_connect_sdk.models.put_setting_data_constraints import PutSettingDataConstraints
from home_connect_sdk.models.request_timeout_error import RequestTimeoutError
from home_connect_sdk.models.selected_program_not_set_error import SelectedProgramNotSetError
from home_connect_sdk.models.status import Status
from home_connect_sdk.models.status_data import StatusData
from home_connect_sdk.models.too_many_requests_error import TooManyRequestsError
from home_connect_sdk.models.unauthorized_error import UnauthorizedError
from home_connect_sdk.models.unauthorized_error_error import UnauthorizedErrorError
from home_connect_sdk.models.unsupported_media_type_error import UnsupportedMediaTypeError
from home_connect_sdk.models.wrong_operation_state_error import WrongOperationStateError
| [
"[email protected]"
] | |
b67056872a7437bd215bbd55010776a5e3c4c513 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/DECHUB900-HRIP-MIB-V3-0.py | 4affb4dd03a0dfee8d6e74ef3a888a878b9e33bf | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 11,491 | py | #
# PySNMP MIB module DECHUB900-HRIP-MIB-V3-0 (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DECHUB900-HRIP-MIB-V3-0
# Produced by pysmi-0.3.4 at Wed May 1 12:37:38 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, enterprises, Counter32, IpAddress, NotificationType, Counter64, TimeTicks, ModuleIdentity, Unsigned32, Integer32, Gauge32, MibIdentifier, ObjectIdentity, iso, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "enterprises", "Counter32", "IpAddress", "NotificationType", "Counter64", "TimeTicks", "ModuleIdentity", "Unsigned32", "Integer32", "Gauge32", "MibIdentifier", "ObjectIdentity", "iso", "Bits")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
dec = MibIdentifier((1, 3, 6, 1, 4, 1, 36))
ema = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2))
decMIBextension = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2, 18))
decHub900 = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2, 18, 11))
mgmtAgent = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1))
mgmtAgentVersion1 = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1))
hrip = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2))
hripPubRingCfgTable = MibTable((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 1), )
if mibBuilder.loadTexts: hripPubRingCfgTable.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubRingCfgTable.setDescription('Defines a table for ring speeds. The table has 2 rows. Row 1 defines ring speed for ring A and row 2 defines the ring speed for ring B.')
hripPubRingCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 1, 1), ).setIndexNames((0, "DECHUB900-HRIP-MIB-V3-0", "hripRingCfgIndex"))
if mibBuilder.loadTexts: hripPubRingCfgEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubRingCfgEntry.setDescription('An entry in the hripPubRingCfgTable.')
hripRingCfgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ringA", 1), ("ringB", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripRingCfgIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hripRingCfgIndex.setDescription('Identifies the ring being accessed ie the row of the table being referred to.')
hripRingCfgSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("speed4", 2), ("speed16", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hripRingCfgSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: hripRingCfgSpeed.setDescription('The speed of each of the token rings on the backplane. speed4(1) indicates a speed of 4 Mbits per second while speed16(2) indicates 16 Mbits per second. The value of this object is maintained across power cycles and resets.')
hripPubSlotCfgTable = MibTable((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 2), )
if mibBuilder.loadTexts: hripPubSlotCfgTable.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubSlotCfgTable.setDescription('Defines a table for Slot Configurations. Each row in the table corresponds to a backplane slot (hripSlotIndex).')
hripPubSlotCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 2, 1), ).setIndexNames((0, "DECHUB900-HRIP-MIB-V3-0", "hripSlotCfgIndex"))
if mibBuilder.loadTexts: hripPubSlotCfgEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubSlotCfgEntry.setDescription('An entry in the hripPubSlotCfgTable.')
hripSlotCfgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotCfgIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotCfgIndex.setDescription('Index into the table of slot configurations.')
hripSlotCfgDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("enabled-1", 1), ("disabled-1", 2), ("enabled-2", 3), ("disabled-4", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hripSlotCfgDisable.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotCfgDisable.setDescription('Locks out the corresponding backplane port in that slot. -2 is used for linecards like the MIPPY that have multiple physical token ring backplane ports. The default setting is enable (for ports 1 & 2) The value of this object is maintained across power cycles and resets.')
hripSlotCfgForce = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("noForce-1", 1), ("forceRingA-1", 2), ("forceRingB-1", 3), ("noForce-2", 4), ("forceRingA-2", 5), ("forceRingB-2", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hripSlotCfgForce.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotCfgForce.setDescription('Describes a slot/ring pairing. -2 is used for linecards like the MIPPY that have multiple physical token ring backplane ports. The value of this object is maintained across power cycles and resets.')
hripPubRingStatTable = MibTable((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 3), )
if mibBuilder.loadTexts: hripPubRingStatTable.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubRingStatTable.setDescription('A table describing the number of modules on each ring.')
hripPubRingStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 3, 1), ).setIndexNames((0, "DECHUB900-HRIP-MIB-V3-0", "hripRingStatIndex"))
if mibBuilder.loadTexts: hripPubRingStatEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubRingStatEntry.setDescription('An entry describing the number of modules on each ring.')
hripRingStatIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ringA", 1), ("ringB", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripRingStatIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hripRingStatIndex.setDescription('An index into the hripPubRingStatTable.')
hripRingStatNumModInserted = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripRingStatNumModInserted.setStatus('mandatory')
if mibBuilder.loadTexts: hripRingStatNumModInserted.setDescription('The number of modules inserted onto the ring.')
hripPubSlotStatTable = MibTable((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4), )
if mibBuilder.loadTexts: hripPubSlotStatTable.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubSlotStatTable.setDescription('The status of modules inserted on each slot of backplane.')
hripPubSlotStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1), ).setIndexNames((0, "DECHUB900-HRIP-MIB-V3-0", "hripSlotStatIndex"))
if mibBuilder.loadTexts: hripPubSlotStatEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubSlotStatEntry.setDescription('An entry in the hripPubSlotStatTable.')
hripSlotStatIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotStatIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotStatIndex.setDescription('The index into slot status table.')
hripSlotStatRingAInsertCount = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotStatRingAInsertCount.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotStatRingAInsertCount.setDescription('The number of times that the module has transitioned between inserted/wrapped states on backplane ring A, since the module was last reset/power-cycled.')
hripSlotStatRingBInsertCount = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotStatRingBInsertCount.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotStatRingBInsertCount.setDescription('The number of times that the module has transitioned between inserted/wrapped states on backplane ring B, since the module was last reset/power-cycled.')
hripSlotStatTcuA = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inserted", 1), ("wrapped", 2), ("notTR", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotStatTcuA.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotStatTcuA.setDescription('Status of the TCU on ring A. If there is a non Token Ring linecard plugged into the hub, the value reported should be nonTR(3). For a Token Ring line-card the value is inserted or wrapped')
hripSlotStatTcuB = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inserted", 1), ("wrapped", 2), ("notTR", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotStatTcuB.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotStatTcuB.setDescription('Status of the TCU on ring B. If there is a non Token Ring linecard plugged into the hub, the value reported should be nonTR(3). For a Token Ring line-card the value is inserted or wrapped ')
mibBuilder.exportSymbols("DECHUB900-HRIP-MIB-V3-0", hripRingStatIndex=hripRingStatIndex, hripRingCfgIndex=hripRingCfgIndex, hripPubSlotStatTable=hripPubSlotStatTable, decMIBextension=decMIBextension, hripPubSlotStatEntry=hripPubSlotStatEntry, mgmtAgentVersion1=mgmtAgentVersion1, hripRingStatNumModInserted=hripRingStatNumModInserted, dec=dec, hripPubRingStatTable=hripPubRingStatTable, hrip=hrip, hripSlotStatRingAInsertCount=hripSlotStatRingAInsertCount, hripSlotStatTcuB=hripSlotStatTcuB, mgmtAgent=mgmtAgent, hripSlotStatIndex=hripSlotStatIndex, ema=ema, hripSlotCfgDisable=hripSlotCfgDisable, hripRingCfgSpeed=hripRingCfgSpeed, hripSlotStatRingBInsertCount=hripSlotStatRingBInsertCount, hripPubSlotCfgEntry=hripPubSlotCfgEntry, hripSlotCfgForce=hripSlotCfgForce, hripPubRingStatEntry=hripPubRingStatEntry, decHub900=decHub900, hripPubRingCfgEntry=hripPubRingCfgEntry, hripSlotStatTcuA=hripSlotStatTcuA, hripPubSlotCfgTable=hripPubSlotCfgTable, hripSlotCfgIndex=hripSlotCfgIndex, hripPubRingCfgTable=hripPubRingCfgTable)
| [
"[email protected]"
] | |
3652f4d252652605a8f6ef2c32218b505955d203 | 627094b5e463bd113f626450eaceb01dfa4ff5d5 | /test/client/test_link_control.py | 0e4014b88ba456056e0e54eef493cfb4f701e752 | [
"MIT"
] | permissive | DaleChen0351/python-udsoncan | 49eefcb299e2a4fabe0bf168905cc86ef43d6f62 | c495e872c69c4ea05e3b477d2a1088cb83167a17 | refs/heads/master | 2020-04-20T06:10:25.252315 | 2019-03-28T07:38:17 | 2019-03-28T07:38:17 | 168,675,483 | 0 | 0 | MIT | 2019-03-28T07:38:19 | 2019-02-01T09:42:02 | Python | UTF-8 | Python | false | false | 6,890 | py | from udsoncan.client import Client
from udsoncan import services, Baudrate
from udsoncan.exceptions import *
from test.ClientServerTest import ClientServerTest
class TestLinkContorl(ClientServerTest):
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
def test_linkcontrol_verify_fixed(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x01\x11")
self.conn.fromuserqueue.put(b"\xC7\x01") # Positive response
def _test_linkcontrol_verify_fixed(self):
baudrate = Baudrate(250000, baudtype=Baudrate.Type.Fixed)
response = self.udsclient.link_control(control_type=1, baudrate=baudrate)
self.assertTrue(response.valid)
self.assertTrue(response.positive)
self.assertEqual(response.service_data.control_type_echo, 1)
def test_linkcontrol_verify_fixed_spr(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x81\x11")
self.conn.fromuserqueue.put("wait") # Synchronize
def _test_linkcontrol_verify_fixed_spr(self):
baudrate = Baudrate(250000, baudtype=Baudrate.Type.Fixed)
with self.udsclient.suppress_positive_response:
response = self.udsclient.link_control(control_type=1, baudrate=baudrate)
self.assertEqual(response, None)
self.conn.fromuserqueue.get(timeout=0.2) #Avoid closing connection prematurely
def test_linkcontrol_verify_fixed_from_specific(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x01\x11")
self.conn.fromuserqueue.put(b"\xC7\x01") # Positive response
def _test_linkcontrol_verify_fixed_from_specific(self):
baudrate = Baudrate(250000, baudtype=Baudrate.Type.Specific)
response = self.udsclient.link_control(control_type=1, baudrate=baudrate)
self.assertTrue(response.valid)
self.assertTrue(response.positive)
self.assertEqual(response.service_data.control_type_echo, 1)
def test_linkcontrol_verify_specific(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x02\x12\x34\x56")
self.conn.fromuserqueue.put(b"\xC7\x02") # Positive response
def _test_linkcontrol_verify_specific(self):
baudrate = Baudrate(0x123456, baudtype=Baudrate.Type.Specific)
response = self.udsclient.link_control(control_type=2, baudrate=baudrate)
self.assertTrue(response.valid)
self.assertTrue(response.positive)
self.assertEqual(response.service_data.control_type_echo, 2)
def test_linkcontrol_verify_specific_from_fixed(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x02\x07\xA1\x20")
self.conn.fromuserqueue.put(b"\xC7\x02") # Positive response
def _test_linkcontrol_verify_specific_from_fixed(self):
baudrate = Baudrate(500000, baudtype=Baudrate.Type.Fixed)
response = self.udsclient.link_control(control_type=2, baudrate=baudrate)
self.assertTrue(response.valid)
self.assertTrue(response.positive)
def test_linkcontrol_custom_control_type(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x55")
self.conn.fromuserqueue.put(b"\xC7\x55") # Positive response
def _test_linkcontrol_custom_control_type(self):
response = self.udsclient.link_control(control_type=0x55)
self.assertTrue(response.valid)
self.assertTrue(response.positive)
def test_linkcontrol_negative_response_exception(self):
self.wait_request_and_respond(b"\x7F\x87\x31") # Request Out Of Range
def _test_linkcontrol_negative_response_exception(self):
with self.assertRaises(NegativeResponseException):
self.udsclient.link_control(control_type=0x55)
def test_linkcontrol_negative_response_no_exception(self):
self.wait_request_and_respond(b"\x7F\x87\x31") # Request Out Of Range
def _test_linkcontrol_negative_response_no_exception(self):
self.udsclient.config['exception_on_negative_response'] = False
response = self.udsclient.link_control(control_type=0x55)
self.assertTrue(response.valid)
self.assertFalse(response.positive)
def test_linkcontrol_invalidservice_exception(self):
self.wait_request_and_respond(b"\x00\x22") # Request Out Of Range
def _test_linkcontrol_invalidservice_exception(self):
with self.assertRaises(InvalidResponseException):
self.udsclient.link_control(control_type=0x55)
def test_linkcontrol_invalidservice_no_exception(self):
self.wait_request_and_respond(b"\x00\x22") # Request Out Of Range
def _test_linkcontrol_invalidservice_no_exception(self):
self.udsclient.config['exception_on_invalid_response'] = False
response = self.udsclient.link_control(control_type=0x55)
self.assertFalse(response.valid)
def test_linkcontrol_wrongservice_exception(self):
self.wait_request_and_respond(b"\x7E\x22") # Valid but wrong service (Tester Present)
def _test_linkcontrol_wrongservice_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.link_control(control_type=0x55)
def test_linkcontrol_wrongservice_no_exception(self):
self.wait_request_and_respond(b"\x7E\x22") # Valid but wrong service (Tester Present)
def _test_linkcontrol_wrongservice_no_exception(self):
self.udsclient.config['exception_on_unexpected_response'] = False
response = self.udsclient.link_control(control_type=0x55)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_linkcontrol_bad_control_type_exception(self):
self.wait_request_and_respond(b"\xC7\x08") # Valid but bad control type
def _test_linkcontrol_bad_control_type_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.link_control(control_type=0x55)
def test_linkcontrol_bad_control_type_no_exception(self):
self.wait_request_and_respond(b"\xC7\x08") # Valid but bad control type
def _test_linkcontrol_bad_control_type_no_exception(self):
self.udsclient.config['exception_on_unexpected_response'] = False
response = self.udsclient.link_control(control_type=0x55)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_param(self):
pass
def _test_bad_param(self):
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type='x')
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=0x80)
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=1) # Missing Baudrate
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=2) # Missing Baudrate
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=0, baudrate=Baudrate(500000)) # Baudrate is not needed
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=1, baudrate=1) # Baudrate should be Baudrate instance
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=1, baudrate='x') # Baudrate should be Baudrate instance
| [
"[email protected]"
] | |
8d838ad1b17dd0480a189e316ae027e1fd5cb5b4 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /LR98GCwLGYPSv8Afb_1.py | 51545319e53c872c2a1520d669972a99be80e25f | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 77 | py |
def pluralize(lst):
return {i+'s' if lst.count(i)>1 else i for i in lst}
| [
"[email protected]"
] | |
ed64e352839fee277680c8be39d3058c38d029a5 | d570fc2e36f0842605ad6e9dda3cbd4910160a07 | /src/webdav/Resource.py | 5b3121865ca3ace9d66cf08ff6f649d0b1b59b89 | [
"ZPL-2.1"
] | permissive | zopefoundation/ZServer | 8540fc7c411a7857abf4034068f75f2f1c7ba98c | eb047c795a278c22ae77f5af4284411e4689025e | refs/heads/master | 2023-06-21T20:54:53.580461 | 2023-02-10T09:43:55 | 2023-02-10T09:43:55 | 65,092,325 | 6 | 9 | NOASSERTION | 2020-09-17T07:25:50 | 2016-08-06T16:47:48 | Python | UTF-8 | Python | false | false | 27,157 | py | ##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""WebDAV support - resource objects.
"""
import mimetypes
import sys
import re
from urllib import unquote
from AccessControl import getSecurityManager
from AccessControl import ClassSecurityInfo
from AccessControl.class_init import InitializeClass
from AccessControl.Permissions import delete_objects
from AccessControl.Permissions import manage_properties
from AccessControl.Permissions import view as View
from AccessControl.Permissions import webdav_lock_items
from AccessControl.Permissions import webdav_unlock_items
from AccessControl.Permissions import webdav_access
from Acquisition import aq_base
from Acquisition import aq_inner
from Acquisition import aq_parent
from App.Common import rfc1123_date
from ExtensionClass import Base
from OFS.event import ObjectClonedEvent
from OFS.event import ObjectWillBeMovedEvent
from OFS.interfaces import IWriteLock
from OFS.Lockable import LockableItem
from OFS.Lockable import wl_isLockable
from OFS.Lockable import wl_isLocked
from OFS.subscribers import compatibilityCall
from zExceptions import BadRequest
from zExceptions import Forbidden
from zExceptions import MethodNotAllowed
from zExceptions import NotFound
from zExceptions import Unauthorized
import ZServer.Zope2.Startup.config
from ZPublisher.HTTPRangeSupport import HTTPRangeInterface
from zope.interface import implementer
from zope.event import notify
from zope.lifecycleevent import ObjectCopiedEvent
from zope.lifecycleevent import ObjectMovedEvent
from zope.container.contained import notifyContainerModified
from webdav.common import absattr
from webdav.common import Conflict
from webdav.common import IfParser
from webdav.common import isDavCollection
from webdav.common import Locked
from webdav.common import PreconditionFailed
from webdav.common import tokenFinder
from webdav.common import urlbase
from webdav.common import urlfix
from webdav.interfaces import IDAVResource
ms_dav_agent = re.compile("Microsoft.*Internet Publishing.*")
@implementer(IDAVResource)
class Resource(Base, LockableItem):
"""The Resource mixin class provides basic WebDAV support for
non-collection objects. It provides default implementations
for most supported WebDAV HTTP methods, however certain methods
such as PUT should be overridden to ensure correct behavior in
the context of the object type."""
__dav_resource__ = 1
__http_methods__ = ('GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'OPTIONS',
'TRACE', 'PROPFIND', 'PROPPATCH', 'MKCOL', 'COPY',
'MOVE', 'LOCK', 'UNLOCK',
)
security = ClassSecurityInfo()
security.setPermissionDefault(webdav_access, ('Authenticated', 'Manager'))
def dav__init(self, request, response):
# Init expected HTTP 1.1 / WebDAV headers which are not
# currently set by the base response object automagically.
#
# We sniff for a ZServer response object, because we don't
# want to write duplicate headers (since ZS writes Date
# and Connection itself).
if not hasattr(response, '_server_version'):
response.setHeader('Connection', 'close')
response.setHeader('Date', rfc1123_date(), 1)
# HTTP Range support
if HTTPRangeInterface.providedBy(self):
response.setHeader('Accept-Ranges', 'bytes')
else:
response.setHeader('Accept-Ranges', 'none')
def dav__validate(self, object, methodname, REQUEST):
msg = ('<strong>You are not authorized '
'to access this resource.</strong>')
method = None
if hasattr(object, methodname):
method = getattr(object, methodname)
else:
try:
method = object.aq_acquire(methodname)
except Exception:
method = None
if method is not None:
try:
return getSecurityManager().validate(None, object,
methodname,
method)
except Exception:
pass
raise Unauthorized(msg)
def dav__simpleifhandler(self, request, response, method='PUT',
col=0, url=None, refresh=0):
ifhdr = request.get_header('If', None)
lockable = wl_isLockable(self)
if not lockable:
# degenerate case, we shouldnt have even called this method.
return None
locked = self.wl_isLocked()
if locked and (not ifhdr):
raise Locked('Resource is locked.')
if not ifhdr:
return None
# Since we're a simple if handler, and since some clients don't
# pass in the port information in the resource part of an If
# header, we're only going to worry about if the paths compare
if url is None:
url = urlfix(request['URL'], method)
url = urlbase(url) # Gets just the path information
# if 'col' is passed in, an operation is happening on a submember
# of a collection, while the Lock may be on the parent. Lob off
# the final part of the URL (ie '/a/b/foo.html' becomes '/a/b/')
if col:
url = url[:url.rfind('/') + 1]
found = 0
resourcetagged = 0
taglist = IfParser(ifhdr)
for tag in taglist:
if not tag.resource:
# There's no resource (url) with this tag
tag_list = map(tokenFinder, tag.list)
wehave = [t for t in tag_list if self.wl_hasLock(t)]
if not wehave:
continue
if tag.NOTTED:
continue
if refresh:
for token in wehave:
self.wl_getLock(token).refresh()
resourcetagged = 1
found = 1
break
elif urlbase(tag.resource) == url:
resourcetagged = 1
tag_list = map(tokenFinder, tag.list)
wehave = [t for t in tag_list if self.wl_hasLock(t)]
if not wehave:
continue
if tag.NOTTED:
continue
if refresh:
for token in wehave:
self.wl_getLock(token).refresh()
found = 1
break
if resourcetagged and (not found):
raise PreconditionFailed('Condition failed.')
elif resourcetagged and found:
return 1
else:
return 0
# WebDAV class 1 support
security.declareProtected(View, 'HEAD')
def HEAD(self, REQUEST, RESPONSE):
"""Retrieve resource information without a response body."""
self.dav__init(REQUEST, RESPONSE)
content_type = None
if hasattr(self, 'content_type'):
content_type = absattr(self.content_type)
if content_type is None:
url = urlfix(REQUEST['URL'], 'HEAD')
name = unquote(filter(None, url.split('/')[-1]))
content_type, encoding = mimetypes.guess_type(name)
if content_type is None:
if hasattr(self, 'default_content_type'):
content_type = absattr(self.default_content_type)
if content_type is None:
content_type = 'application/octet-stream'
RESPONSE.setHeader('Content-Type', content_type.lower())
if hasattr(aq_base(self), 'get_size'):
RESPONSE.setHeader('Content-Length', absattr(self.get_size))
if hasattr(self, '_p_mtime'):
mtime = rfc1123_date(self._p_mtime)
RESPONSE.setHeader('Last-Modified', mtime)
if hasattr(aq_base(self), 'http__etag'):
etag = self.http__etag(readonly=1)
if etag:
RESPONSE.setHeader('Etag', etag)
RESPONSE.setStatus(200)
return RESPONSE
def PUT(self, REQUEST, RESPONSE):
"""Replace the GET response entity of an existing resource.
Because this is often object-dependent, objects which handle
PUT should override the default PUT implementation with an
object-specific implementation. By default, PUT requests
fail with a 405 (Method Not Allowed)."""
self.dav__init(REQUEST, RESPONSE)
raise MethodNotAllowed('Method not supported for this resource.')
security.declarePublic('OPTIONS')
def OPTIONS(self, REQUEST, RESPONSE):
"""Retrieve communication options."""
self.dav__init(REQUEST, RESPONSE)
RESPONSE.setHeader('Allow', ', '.join(self.__http_methods__))
RESPONSE.setHeader('Content-Length', 0)
RESPONSE.setHeader('DAV', '1,2', 1)
# Microsoft Web Folders compatibility, only enabled if
# User-Agent matches.
if ms_dav_agent.match(REQUEST.get_header('User-Agent', '')):
if ZServer.Zope2.Startup.config.ZSERVER_ENABLE_MS_PUBLIC_HEADER:
RESPONSE.setHeader('Public', ', '.join(self.__http_methods__))
RESPONSE.setStatus(200)
return RESPONSE
security.declarePublic('TRACE')
def TRACE(self, REQUEST, RESPONSE):
"""Return the HTTP message received back to the client as the
entity-body of a 200 (OK) response. This will often usually
be intercepted by the web server in use. If not, the TRACE
request will fail with a 405 (Method Not Allowed), since it
is not often possible to reproduce the HTTP request verbatim
from within the Zope environment."""
self.dav__init(REQUEST, RESPONSE)
raise MethodNotAllowed('Method not supported for this resource.')
security.declareProtected(delete_objects, 'DELETE')
def DELETE(self, REQUEST, RESPONSE):
"""Delete a resource. For non-collection resources, DELETE may
return either 200 or 204 (No Content) to indicate success."""
self.dav__init(REQUEST, RESPONSE)
ifhdr = REQUEST.get_header('If', '')
url = urlfix(REQUEST['URL'], 'DELETE')
name = unquote(filter(None, url.split('/')[-1]))
parent = aq_parent(aq_inner(self))
# Lock checking
if wl_isLocked(self):
if ifhdr:
self.dav__simpleifhandler(REQUEST, RESPONSE, 'DELETE')
else:
# We're locked, and no if header was passed in, so
# the client doesn't own a lock.
raise Locked('Resource is locked.')
elif IWriteLock.providedBy(parent) and parent.wl_isLocked():
if ifhdr:
parent.dav__simpleifhandler(REQUEST, RESPONSE, 'DELETE', col=1)
else:
# Our parent is locked, and no If header was passed in.
# When a parent is locked, members cannot be removed
raise PreconditionFailed(
'Resource is locked, and no condition was passed in.')
# Either we're not locked, or a succesful lock token was submitted
# so we can delete the lock now.
# ajung: Fix for Collector # 2196
if parent.manage_delObjects([name], REQUEST=None) is None:
RESPONSE.setStatus(204)
else:
RESPONSE.setStatus(403)
return RESPONSE
security.declareProtected(webdav_access, 'PROPFIND')
def PROPFIND(self, REQUEST, RESPONSE):
"""Retrieve properties defined on the resource."""
from webdav.davcmds import PropFind
self.dav__init(REQUEST, RESPONSE)
cmd = PropFind(REQUEST)
result = cmd.apply(self)
# work around MSIE DAV bug for creation and modified date
if (REQUEST.get_header('User-Agent') ==
'Microsoft Data Access Internet Publishing Provider DAV 1.1'):
result = result.replace('<n:getlastmodified xmlns:n="DAV:">',
'<n:getlastmodified xmlns:n="DAV:" xmlns:b="urn:uuid:c2f41010-65b3-11d1-a29f-00aa00c14882/" b:dt="dateTime.rfc1123">') # NOQA
result = result.replace('<n:creationdate xmlns:n="DAV:">',
'<n:creationdate xmlns:n="DAV:" xmlns:b="urn:uuid:c2f41010-65b3-11d1-a29f-00aa00c14882/" b:dt="dateTime.tz">') # NOQA
RESPONSE.setStatus(207)
RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"')
RESPONSE.setBody(result)
return RESPONSE
security.declareProtected(manage_properties, 'PROPPATCH')
def PROPPATCH(self, REQUEST, RESPONSE):
"""Set and/or remove properties defined on the resource."""
from webdav.davcmds import PropPatch
self.dav__init(REQUEST, RESPONSE)
if not hasattr(aq_base(self), 'propertysheets'):
raise MethodNotAllowed(
'Method not supported for this resource.')
# Lock checking
ifhdr = REQUEST.get_header('If', '')
if wl_isLocked(self):
if ifhdr:
self.dav__simpleifhandler(REQUEST, RESPONSE, 'PROPPATCH')
else:
raise Locked('Resource is locked.')
cmd = PropPatch(REQUEST)
result = cmd.apply(self)
RESPONSE.setStatus(207)
RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"')
RESPONSE.setBody(result)
return RESPONSE
def MKCOL(self, REQUEST, RESPONSE):
"""Create a new collection resource. If called on an existing
resource, MKCOL must fail with 405 (Method Not Allowed)."""
self.dav__init(REQUEST, RESPONSE)
raise MethodNotAllowed('The resource already exists.')
security.declarePublic('COPY')
def COPY(self, REQUEST, RESPONSE):
"""Create a duplicate of the source resource whose state
and behavior match that of the source resource as closely
as possible. Though we may later try to make a copy appear
seamless across namespaces (e.g. from Zope to Apache), COPY
is currently only supported within the Zope namespace."""
self.dav__init(REQUEST, RESPONSE)
if not hasattr(aq_base(self), 'cb_isCopyable') or \
not self.cb_isCopyable():
raise MethodNotAllowed('This object may not be copied.')
depth = REQUEST.get_header('Depth', 'infinity')
if depth not in ('0', 'infinity'):
raise BadRequest('Invalid Depth header.')
dest = REQUEST.get_header('Destination', '')
while dest and dest[-1] == '/':
dest = dest[:-1]
if not dest:
raise BadRequest('Invalid Destination header.')
try:
path = REQUEST.physicalPathFromURL(dest)
except ValueError:
raise BadRequest('Invalid Destination header')
name = path.pop()
oflag = REQUEST.get_header('Overwrite', 'F').upper()
if oflag not in ('T', 'F'):
raise BadRequest('Invalid Overwrite header.')
try:
parent = self.restrictedTraverse(path)
except ValueError:
raise Conflict('Attempt to copy to an unknown namespace.')
except NotFound:
raise Conflict('Object ancestors must already exist.')
except Exception:
raise
if hasattr(parent, '__null_resource__'):
raise Conflict('Object ancestors must already exist.')
existing = hasattr(aq_base(parent), name)
if existing and oflag == 'F':
raise PreconditionFailed('Destination resource exists.')
try:
parent._checkId(name, allow_dup=1)
except Exception:
raise Forbidden(sys.exc_info()[1])
try:
parent._verifyObjectPaste(self)
except Unauthorized:
raise
except Exception:
raise Forbidden(sys.exc_info()[1])
# Now check locks. The If header on a copy only cares about the
# lock on the destination, so we need to check out the destinations
# lock status.
ifhdr = REQUEST.get_header('If', '')
if existing:
# The destination itself exists, so we need to check its locks
destob = aq_base(parent)._getOb(name)
if IWriteLock.providedBy(destob) and destob.wl_isLocked():
if ifhdr:
itrue = destob.dav__simpleifhandler(
REQUEST, RESPONSE, 'COPY', refresh=1)
if not itrue:
raise PreconditionFailed()
else:
raise Locked('Destination is locked.')
elif IWriteLock.providedBy(parent) and parent.wl_isLocked():
if ifhdr:
parent.dav__simpleifhandler(REQUEST, RESPONSE, 'COPY',
refresh=1)
else:
raise Locked('Destination is locked.')
self._notifyOfCopyTo(parent, op=0)
ob = self._getCopy(parent)
ob._setId(name)
if depth == '0' and isDavCollection(ob):
for id in ob.objectIds():
ob._delObject(id)
notify(ObjectCopiedEvent(ob, self))
if existing:
object = getattr(parent, name)
self.dav__validate(object, 'DELETE', REQUEST)
parent._delObject(name)
parent._setObject(name, ob)
ob = parent._getOb(name)
ob._postCopy(parent, op=0)
compatibilityCall('manage_afterClone', ob, ob)
notify(ObjectClonedEvent(ob))
# We remove any locks from the copied object because webdav clients
# don't track the lock status and the lock token for copied resources
ob.wl_clearLocks()
RESPONSE.setStatus(existing and 204 or 201)
if not existing:
RESPONSE.setHeader('Location', dest)
RESPONSE.setBody('')
return RESPONSE
security.declarePublic('MOVE')
def MOVE(self, REQUEST, RESPONSE):
"""Move a resource to a new location. Though we may later try to
make a move appear seamless across namespaces (e.g. from Zope
to Apache), MOVE is currently only supported within the Zope
namespace."""
self.dav__init(REQUEST, RESPONSE)
self.dav__validate(self, 'DELETE', REQUEST)
if not hasattr(aq_base(self), 'cb_isMoveable') or \
not self.cb_isMoveable():
raise MethodNotAllowed('This object may not be moved.')
dest = REQUEST.get_header('Destination', '')
try:
path = REQUEST.physicalPathFromURL(dest)
except ValueError:
raise BadRequest('No destination given')
flag = REQUEST.get_header('Overwrite', 'F')
flag = flag.upper()
name = path.pop()
parent_path = '/'.join(path)
try:
parent = self.restrictedTraverse(path)
except ValueError:
raise Conflict('Attempt to move to an unknown namespace.')
except 'Not Found':
raise Conflict('The resource %s must exist.' % parent_path)
except Exception:
raise
if hasattr(parent, '__null_resource__'):
raise Conflict('The resource %s must exist.' % parent_path)
existing = hasattr(aq_base(parent), name)
if existing and flag == 'F':
raise PreconditionFailed('Resource %s exists.' % dest)
try:
parent._checkId(name, allow_dup=1)
except Exception:
raise Forbidden(sys.exc_info()[1])
try:
parent._verifyObjectPaste(self)
except Unauthorized:
raise
except Exception:
raise Forbidden(sys.exc_info()[1])
# Now check locks. Since we're affecting the resource that we're
# moving as well as the destination, we have to check both.
ifhdr = REQUEST.get_header('If', '')
if existing:
# The destination itself exists, so we need to check its locks
destob = aq_base(parent)._getOb(name)
if IWriteLock.providedBy(destob) and destob.wl_isLocked():
if ifhdr:
itrue = destob.dav__simpleifhandler(
REQUEST, RESPONSE, 'MOVE', url=dest, refresh=1)
if not itrue:
raise PreconditionFailed
else:
raise Locked('Destination is locked.')
elif IWriteLock.providedBy(parent) and parent.wl_isLocked():
# There's no existing object in the destination folder, so
# we need to check the folders locks since we're changing its
# member list
if ifhdr:
itrue = parent.dav__simpleifhandler(REQUEST, RESPONSE, 'MOVE',
col=1, url=dest, refresh=1)
if not itrue:
raise PreconditionFailed('Condition failed.')
else:
raise Locked('Destination is locked.')
if wl_isLocked(self):
# Lastly, we check ourselves
if ifhdr:
itrue = self.dav__simpleifhandler(REQUEST, RESPONSE, 'MOVE',
refresh=1)
if not itrue:
raise PreconditionFailed('Condition failed.')
else:
raise Locked('Source is locked and no condition was passed in')
orig_container = aq_parent(aq_inner(self))
orig_id = self.getId()
self._notifyOfCopyTo(parent, op=1)
notify(ObjectWillBeMovedEvent(self, orig_container, orig_id,
parent, name))
# try to make ownership explicit so that it gets carried
# along to the new location if needed.
self.manage_changeOwnershipType(explicit=1)
ob = self._getCopy(parent)
ob._setId(name)
orig_container._delObject(orig_id, suppress_events=True)
if existing:
object = getattr(parent, name)
self.dav__validate(object, 'DELETE', REQUEST)
parent._delObject(name)
parent._setObject(name, ob, set_owner=0, suppress_events=True)
ob = parent._getOb(name)
notify(ObjectMovedEvent(ob, orig_container, orig_id, parent, name))
notifyContainerModified(orig_container)
if aq_base(orig_container) is not aq_base(parent):
notifyContainerModified(parent)
ob._postCopy(parent, op=1)
# try to make ownership implicit if possible
ob.manage_changeOwnershipType(explicit=0)
RESPONSE.setStatus(existing and 204 or 201)
if not existing:
RESPONSE.setHeader('Location', dest)
RESPONSE.setBody('')
return RESPONSE
# WebDAV Class 2, Lock and Unlock
security.declareProtected(webdav_lock_items, 'LOCK')
def LOCK(self, REQUEST, RESPONSE):
"""Lock a resource"""
from webdav.davcmds import Lock
self.dav__init(REQUEST, RESPONSE)
security = getSecurityManager()
creator = security.getUser()
body = REQUEST.get('BODY', '')
ifhdr = REQUEST.get_header('If', None)
depth = REQUEST.get_header('Depth', 'infinity')
alreadylocked = wl_isLocked(self)
if body and alreadylocked:
# This is a full LOCK request, and the Resource is
# already locked, so we need to raise the alreadylocked
# exception.
RESPONSE.setStatus(423)
elif body:
# This is a normal lock request with an XML payload
cmd = Lock(REQUEST)
token, result = cmd.apply(self, creator, depth=depth)
if result:
# Return the multistatus result (there were multiple
# errors. Note that davcmds.Lock.apply aborted the
# transaction already.
RESPONSE.setStatus(207)
RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"')
RESPONSE.setBody(result)
else:
# Success
lock = self.wl_getLock(token)
RESPONSE.setStatus(200)
RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"')
RESPONSE.setHeader('Lock-Token', 'opaquelocktoken:' + token)
RESPONSE.setBody(lock.asXML())
else:
# There's no body, so this likely to be a refresh request
if not ifhdr:
raise PreconditionFailed('If Header Missing')
taglist = IfParser(ifhdr)
found = 0
for tag in taglist:
for listitem in tag.list:
token = tokenFinder(listitem)
if token and self.wl_hasLock(token):
lock = self.wl_getLock(token)
timeout = REQUEST.get_header('Timeout', 'Infinite')
lock.setTimeout(timeout) # automatically refreshes
found = 1
RESPONSE.setStatus(200)
RESPONSE.setHeader('Content-Type',
'text/xml; charset="utf-8"')
RESPONSE.setBody(lock.asXML())
break
if found:
break
if not found:
RESPONSE.setStatus(412) # Precondition failed
return RESPONSE
security.declareProtected(webdav_unlock_items, 'UNLOCK')
def UNLOCK(self, REQUEST, RESPONSE):
"""Remove an existing lock on a resource."""
from webdav.davcmds import Unlock
self.dav__init(REQUEST, RESPONSE)
token = REQUEST.get_header('Lock-Token', '')
url = REQUEST['URL']
token = tokenFinder(token)
cmd = Unlock()
result = cmd.apply(self, token, url)
if result:
RESPONSE.setStatus(207)
RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"')
RESPONSE.setBody(result)
else:
RESPONSE.setStatus(204) # No Content response code
return RESPONSE
security.declareProtected(webdav_access, 'manage_DAVget')
def manage_DAVget(self):
"""Gets the document source"""
# The default implementation calls manage_FTPget
return self.manage_FTPget()
security.declareProtected(webdav_access, 'listDAVObjects')
def listDAVObjects(self):
return []
InitializeClass(Resource)
| [
"[email protected]"
] | |
3e865ff8ba54efeccf0945858bdb43e9be54a743 | 837762524db70b805fbf46f62a14be32e32dabd9 | /scripts/train.py | df35cdfb4e3c068ebba443e31d700f6c49358b2b | [
"Apache-2.0"
] | permissive | jordancaraballo/nga-deep-learning | 832e54afb978a84875d1c09a7c00055e698f2a7b | 752266ccc06efacdef2423214998ecfced7eafb7 | refs/heads/master | 2023-06-30T14:39:49.448265 | 2021-07-27T20:00:52 | 2021-07-27T20:00:52 | 343,627,410 | 23 | 4 | null | null | null | null | UTF-8 | Python | false | false | 5,992 | py | # --------------------------------------------------------------------------
# Preprocessing and dataset creation from NGA data. This assumes you provide
# a configuration file with required parameters and files.
# --------------------------------------------------------------------------
import os # system modifications
import sys # system modifications
import time # tracking time
import numpy as np # for arrays modifications
import cupy as cp # for arrays modifications
import tensorflow as tf # deep learning framework
from core.unet import unet_batchnorm # unet network to work with
from core.utils import get_training_dataset # getting training dataset
from core.utils import get_tensorslices # getting tensor slices
from core.utils import gen_callbacks # generate callbacks
# tensorflow imports
# from tensorflow.keras.mixed_precision import experimental as mixed_precision
from tensorflow.keras import mixed_precision
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.optimizers import Adadelta
# define configuration object
from config import Config
config = Config.Configuration()
__author__ = "Jordan A Caraballo-Vega, Science Data Processing Branch"
__email__ = "[email protected]"
__status__ = "Development"
# Define some environment variables to help refining randomness.
# Note: there might still be some randomness since most of the code
# is ran on GPU and sometimes parallelization brings changes.
np.random.seed(config.SEED)
tf.random.set_seed(config.SEED)
cp.random.seed(config.SEED)
print(f"Tensorflow ver. {tf.__version__}")
# verify GPU devices are available and ready
os.environ['CUDA_VISIBLE_DEVICES'] = config.CUDA
devices = tf.config.list_physical_devices('GPU')
assert len(devices) != 0, "No GPU devices found."
# ------------------------------------------------------------------
# System Configurations
# ------------------------------------------------------------------
if config.MIRROR_STRATEGY:
strategy = tf.distribute.MirroredStrategy()
print('Multi-GPU enabled')
if config.MIXED_PRECISION:
policy = mixed_precision.Policy('mixed_float16')
mixed_precision.set_global_policy(policy)
print('Mixed precision enabled')
if config.XLA_ACCELERATE:
tf.config.optimizer.set_jit(True)
print('Accelerated Linear Algebra enabled')
# Disable AutoShard, data lives in memory, use in memory options
options = tf.data.Options()
options.experimental_distribute.auto_shard_policy = \
tf.data.experimental.AutoShardPolicy.OFF
# ---------------------------------------------------------------------------
# script train.py
# ---------------------------------------------------------------------------
def main():
# Main function to collect configuration file and run the script
print(f'GPU REPLICAS: {strategy.num_replicas_in_sync}')
t0 = time.time()
print(f'Train dir: {config.TRAIN_DATADIR}')
print(f'Validation dir: {config.VAL_DATADIR}')
# Initialize Callbacks
callbacks = gen_callbacks(config, config.CALLBACKS_METADATA)
# open files and get dataset tensor slices
train_images, train_labels = get_tensorslices(
data_dir=config.TRAIN_DATADIR, img_id='x', label_id='y'
)
# open files and get dataset tensor slices
val_images, val_labels = get_tensorslices(
data_dir=config.VAL_DATADIR, img_id='x', label_id='y'
)
# extract values for training
NUM_TRAINING_IMAGES = train_images.shape[0]
NUM_VALIDATION_IMAGES = val_images.shape[0]
STEPS_PER_EPOCH = NUM_TRAINING_IMAGES // config.BATCH_SIZE
print(f'{NUM_TRAINING_IMAGES} training images')
print(f'{NUM_VALIDATION_IMAGES} validation images')
# generate training dataset
train_dataset = \
tf.data.Dataset.from_tensor_slices((train_images, train_labels))
# generate validation dataset
val_dataset = tf.data.Dataset.from_tensor_slices((val_images, val_labels))
val_dataset = val_dataset.batch(config.VAL_BATCH_SIZE)
# Create model output directory
os.system(f'mkdir -p {config.MODEL_SAVEDIR}')
# Initialize and compile model
with strategy.scope():
# initialize UNet model
model = unet_batchnorm(
nclass=config.N_CLASSES, input_size=config.INPUT_SIZE,
maps=config.MODEL_METADATA['network_maps']
)
# initialize optimizer, exit of not valid optimizer
if config.MODEL_METADATA['optimizer_name'] == 'Adadelta':
optimizer = Adadelta(lr=config.MODEL_METADATA['lr'])
elif config.MODEL_METADATA['optimizer_name'] == 'Adam':
optimizer = Adam(lr=config.MODEL_METADATA['lr'])
else:
sys.exit('Optimizer provided is not supported.')
# enabling mixed precision to avoid underflow
optimizer = mixed_precision.LossScaleOptimizer(optimizer)
# compile model to start training
model.compile(
optimizer,
loss=config.MODEL_METADATA['loss'],
metrics=config.MODEL_METADATA['metrics']
)
model.summary()
# Disable AutoShard, data lives in memory, use in memory options
train_dataset = train_dataset.with_options(options)
val_dataset = val_dataset.with_options(options)
# Train the model and save to disk
model.fit(
get_training_dataset(
train_dataset,
config,
do_aug=config.MODEL_METADATA['do_aug']
),
initial_epoch=config.START_EPOCH,
epochs=config.N_EPOCHS,
steps_per_epoch=STEPS_PER_EPOCH,
validation_data=val_dataset,
callbacks=callbacks,
verbose=2
)
print(f'Execution time: {time.time() - t0}')
# -------------------------------------------------------------------------------
# main
# -------------------------------------------------------------------------------
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
72111d9069d5463f365998e1f2428329f7f7f195 | a79ab025913ba5a96b11bd506d9915f4533f4029 | /golfProj/golf_app/templatetags/golf_extras.py | 0dd0f7aa73defb8cb95847a41ca1632adbae8a5b | [] | no_license | jflynn87/golf_game | 2533548b1b8313661216446ddfa7927b63717118 | a24f710fbc39d25cc93b5b4c5c4d6575ef38c6bb | refs/heads/master | 2022-07-11T00:27:46.765936 | 2019-04-17T03:07:45 | 2019-04-17T03:07:45 | 174,344,305 | 0 | 0 | null | 2022-07-06T20:01:36 | 2019-03-07T12:57:45 | Python | UTF-8 | Python | false | false | 1,733 | py | from django import template
from golf_app.models import Picks, mpScores, Field, Tournament, Group
from django.db.models import Count
register = template.Library()
@register.filter
def model_name(obj):
return obj._meta.verbose_name
@register.filter
def currency(dollars):
dollars = int(dollars)
return '$' + str(dollars)
@register.filter
def line_break(count):
user_cnt = Picks.objects.filter(playerName__tournament__current=True).values('playerName__tournament').annotate(Count('user', distinct=True))
if (count -1) % (user_cnt[0].get('user__count')) == 0 or count == 0:
return True
else:
return False
@register.filter
def first_round(pick):
field = Field.objects.get(tournament__pga_tournament_num='470', playerName=pick)
wins = mpScores.objects.filter(player=field, round__lt=4, result="Yes").count()
losses = mpScores.objects.filter(player=field, round__lt=4, result="No").exclude(score="AS").count()
ties = mpScores.objects.filter(player=field, round__lt=4, score="AS").count()
return str(wins) + '-' + str(losses) + '-' + str(ties)
@register.filter
def leader(group):
#print ('group', group)
tournament = Tournament.objects.get(pga_tournament_num="470")
grp = Group.objects.get(tournament=tournament,number=group)
field = Field.objects.filter(tournament=tournament, group=grp)
golfer_dict = {}
for golfer in field:
golfer_dict[golfer.playerName] = int(first_round(golfer.playerName)[0]) + (.5*int(first_round(golfer.playerName)[4]))
#print ('leader', [k for k, v in golfer_dict.items() if v == max(golfer_dict.values())])
winner= [k for k, v in golfer_dict.items() if v == max(golfer_dict.values())]
return winner
| [
"[email protected]"
] | |
43897fd79e93876b6bb01c316ff69f8ac715aa83 | 4de0c6d3a820d7669fcef5fd035416cf85b35f23 | /ITcoach/爬虫课件/第三章:数据解析/6.xpath解析案例-58二手房.py | d01d163b95860803cf0863b3b681c3a5e230439b | [
"AFL-3.0"
] | permissive | ww35133634/chenxusheng | 5e1b7391a94387b73bcd7c4d12f1247b79be8016 | 666e0eb3aedde46342faf0d4030f5c72b10c9732 | refs/heads/master | 2022-11-12T03:46:47.953680 | 2020-07-02T20:50:56 | 2020-07-02T20:50:56 | 275,168,080 | 0 | 0 | AFL-3.0 | 2020-07-02T20:58:37 | 2020-06-26T13:54:48 | HTML | UTF-8 | Python | false | false | 800 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import requests
from lxml import etree
#需求:爬取58二手房中的房源信息
if __name__ == "__main__":
headers = {
'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36'
}
#爬取到页面源码数据
url = 'https://bj.58.com/ershoufang/'
page_text = requests.get(url=url,headers=headers).text
#数据解析
tree = etree.HTML(page_text)
#存储的就是li标签对象
li_list = tree.xpath('//ul[@class="house-list-wrap"]/li')
fp = open('58.txt','w',encoding='utf-8')
for li in li_list:
#局部解析
title = li.xpath('./div[2]/h2/a/text()')[0]
print(title)
fp.write(title+'\n')
| [
"[email protected]"
] | |
04e63b41a7f0e2b684daa0deadb5d48becf59923 | 8fd2e5d53d7a91d35288ccefdb0c7ef00d927a0a | /book_06_Python黑帽子/Chapter03/网口嗅探多网段版(Bug).py | a690d64efc8c84b89fe615b495c918e4ec44349e | [] | no_license | atlasmao/Python-book-code | 03501f9ca2e81bc1f47464b3227c7f9cda0d387c | 03b6848a15a7e4c2ffebdc3528c24a8b101d9f41 | refs/heads/master | 2022-01-06T23:45:21.951307 | 2019-07-11T10:32:22 | 2019-07-11T10:32:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,745 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import socket
import os
import struct
import threading
import time
from netaddr import IPNetwork, IPAddress
from ctypes import *
# 监听的主机
host = '10.0.76.1'
# 扫描的目标子网
subnet = '10.0.10.0/24'
subnet_list = []
host_up_num = 0
# 自定义的字符串, 我们将在 ICMP 响应中进行核对
magic_message = "PYTHONRULES!"
# 生成子网列表
def add_subnet(subnet):
temp_list = subnet.split(".")
for i in range(256):
temp_list[2] = str(i)
subnet_list.append(".".join(temp_list))
# 批量发送 UDP 数据包
def udp_sender(subnet, magic_message):
time.sleep(5)
sender = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
for ip in IPNetwork(subnet):
try:
sender.sendto(magic_message, ("{}".format(ip), 65212))
except:
pass
finally:
sender.close()
# ip 头定义
class IP(Structure):
_fields_ = [
("ihl", c_ubyte, 4),
("version", c_ubyte, 4),
("tos", c_ubyte),
("len", c_ushort),
("id", c_ushort),
("offset", c_ushort),
("ttl", c_ubyte),
("protocol_num", c_ubyte),
("sum", c_ushort),
# ("src", c_ulong),
("src", c_uint32),
# ("dst", c_ulong)
("dst", c_uint32)
]
def __new__(self, socket_buffer=None):
return self.from_buffer_copy(socket_buffer)
def __init__(self, socket_buffer=None):
# 协议字段与协议名称对应
self.protocol_map = {1: "ICMP", 6: "TCP", 17: "UDP"}
# 可读性更强的 ip 地址
# self.src_address = socket.inet_ntoa(struct.pack("<L", self.src))
self.src_address = socket.inet_ntoa(struct.pack("@I", self.src))
# self.dst_address = socket.inet_ntoa(struct.pack("<L", self.dst))
self.dst_address = socket.inet_ntoa(struct.pack("@I", self.dst))
# 协议类型
try:
self.protocol = self.protocol_map[self.protocol_num]
except:
self.protocol = str(self.protocol_num)
class ICMP(Structure):
_fields_ = [
("type", c_ubyte),
("code", c_ubyte),
("checksum", c_ushort),
("unused", c_ushort),
("next_hop_mtu", c_ushort)
]
def __new__(self, socket_buffer):
return self.from_buffer_copy(socket_buffer)
def __init__(self, socket_buffer):
pass
# 创建原始套接字, 然后绑定在公开接口上
if os.name == "nt":
socket_protocol = socket.IPPROTO_IP
else:
socket_protocol = socket.IPPROTO_ICMP
sniffer = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket_protocol)
sniffer.bind((host, 0))
# 设置在捕获的数据包中包含 ip 头
sniffer.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
# 在 Windows 平台上, 我们需要设置 IOCTL 以启动混杂模式
if os.name == "nt":
sniffer.ioctl(socket.SIO_RCVALL, socket.RCVALL_ON)
add_subnet(subnet)
for new_subnet in subnet_list:
print(new_subnet)
# 开始发送数据包
t = threading.Thread(target=udp_sender, args=(new_subnet, magic_message))
t.start()
try:
while True:
# 读取数据包
raw_buffer = sniffer.recvfrom(65565)[0]
# 将缓冲区的前 20 个字节按 ip 头进行解析
ip_header = IP(raw_buffer[0:20])
# TODO: 可以开启或关闭
# 输出协议和通信双方 ip 地址
# print "Protocol: {} {} -> {}".format(ip_header.protocol, ip_header.src_address, ip_header.dst_address)
# 如果为 ICMP, 进行处理
if ip_header.protocol == "ICMP":
# 计算 ICMP 包的起始位置
offset = ip_header.ihl * 4
buf = raw_buffer[offset:offset + sizeof(ICMP)]
# 解析 ICMP 数据
icmp_header = ICMP(buf)
# print "ICMP -> Type: {} Code: {}".format(icmp_header.type, icmp_header.code)
# 检查类型和代码值是否为 3
if icmp_header.code == 3 and icmp_header.type == 3:
# 确认响应的主机在我们的目标子网之内
if IPAddress(ip_header.src_address) in IPNetwork(subnet):
# 确认 ICMP 数据中包含我们发送的自定义的字符串
if raw_buffer[len(raw_buffer) - len(magic_message):] == magic_message:
print("Host Up: {}".format(ip_header.src_address))
host_up_num += 1
print("Host Up Number: {}".format(host_up_num))
# 处理 CTRL-C
except KeyboardInterrupt:
# 如果运行在 Windows 上, 关闭混杂模式
if os.name == "nt":
sniffer.ioctl(socket.SIO_RCVALL, socket.RCVALL_OFF)
| [
"[email protected]"
] | |
6a05561304bd78df0efc71b62b3659469610fd24 | f38193df76e7f86ad4017ec62dd7c90ce92e9b91 | /_src/om2py3w/3wex0/diary-server.py | 20b7585eed6f3dee62bf2c7213284c338396cdce | [
"MIT"
] | permissive | isynch/OMOOC2py | dcf54f9d2012d018c3b280d28d65058e6ae1dc08 | cc7fafc106b56553306988d07f0a4ab61bc39201 | refs/heads/master | 2020-04-25T23:30:15.410512 | 2015-12-05T07:31:56 | 2015-12-05T07:31:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,220 | py | # -*- coding: utf-8 -*-
from datetime import datetime
import socket
import sys
HOST = '' # Symbolic name meaning all available interfaces
PORT = 1234 # Arbitary non-privileged port
# Datagram(udp) socket
try:
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
print '创建中...'
except socket.error, msg:
print 'Failed to create socket. Error Code : '+str(msg[0])+' Message ' +msg[1]
sys.exit()
# Bind socket to local host and port
try:
s.bind((HOST,PORT))
except socket.error, msg:
print 'Bind failed. Error Code: '+str(msg[0])+' Message '+msg[1]
sys.exit()
print '连接中...'
#now keep taling with the client
while 1:
# receive data from client(data, addr)
d=s.recvfrom(1024)
data=d[0]
addr=d[1]
if not data:
break
today=datetime.now()
diary=data.strip()
print diary
diaryFile = open('diary.txt','a')
diaryFile.write('\n'+today.strftime("%y/%m/%d")+' client['+str(addr[1])+'] '+ diary)
diaryFile.close()
diaryFile = open('diary.txt')
diary = diaryFile.read()
print('============日记============')
print(diary)
reply='帮你记录下来啦。日记:'+data
s.sendto(reply,addr)
s.close()
| [
"[email protected]"
] | |
49f7dbbdfffd887a721bcc1a2ee1ced7e8de18d3 | 26bd175ffb3bd204db5bcb70eec2e3dfd55fbe9f | /exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/network/netvisor/pn_cpu_class.py | fadbed03e41b7d154a3530d1d8ce9f13d78ed446 | [
"MIT",
"GPL-3.0-only",
"GPL-3.0-or-later",
"CC0-1.0",
"GPL-1.0-or-later"
] | permissive | tr3ck3r/linklight | 37814ed19173d893cdff161355d70a1cf538239b | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | refs/heads/master | 2021-04-11T04:33:02.727318 | 2020-03-25T17:38:41 | 2020-03-25T17:38:41 | 248,992,437 | 0 | 0 | MIT | 2020-03-21T14:26:25 | 2020-03-21T14:26:25 | null | UTF-8 | Python | false | false | 5,894 | py | #!/usr/bin/python
# Copyright: (c) 2018, Pluribus Networks
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: pn_cpu_class
author: "Pluribus Networks (@rajaspachipulusu17)"
short_description: CLI command to create/modify/delete cpu-class
description:
- This module can be used to create, modify and delete CPU class information.
options:
pn_cliswitch:
description:
- Target switch to run the CLI on.
required: False
type: str
state:
description:
- State the action to perform. Use C(present) to create cpu-class and
C(absent) to delete cpu-class C(update) to modify the cpu-class.
required: True
type: str
choices: ['present', 'absent', 'update']
pn_scope:
description:
- scope for CPU class.
required: false
choices: ['local', 'fabric']
pn_hog_protect:
description:
- enable host-based hog protection.
required: False
type: str
choices: ['disable', 'enable', 'enable-and-drop']
pn_rate_limit:
description:
- rate-limit for CPU class.
required: False
type: str
pn_name:
description:
- name for the CPU class.
required: False
type: str
'''
EXAMPLES = """
- name: create cpu class
pn_cpu_class:
pn_cliswitch: 'sw01'
state: 'present'
pn_name: 'icmp'
pn_rate_limit: '1000'
pn_scope: 'local'
- name: delete cpu class
pn_cpu_class:
pn_cliswitch: 'sw01'
state: 'absent'
pn_name: 'icmp'
- name: modify cpu class
pn_cpu_class:
pn_cliswitch: 'sw01'
state: 'update'
pn_name: 'icmp'
pn_rate_limit: '2000'
"""
RETURN = """
command:
description: the CLI command run on the target node.
returned: always
type: str
stdout:
description: set of responses from the cpu-class command.
returned: always
type: list
stderr:
description: set of error responses from the cpu-class command.
returned: on error
type: list
changed:
description: indicates whether the CLI caused changes on the target.
returned: always
type: bool
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.general.plugins.module_utils.network.netvisor.pn_nvos import pn_cli, run_cli
from ansible_collections.community.general.plugins.module_utils.network.netvisor.netvisor import run_commands
def check_cli(module, cli):
"""
This method checks for idempotency using the cpu-class-show command.
If a user with given name exists, return True else False.
:param module: The Ansible module to fetch input parameters
:param cli: The CLI string
"""
name = module.params['pn_name']
clicopy = cli
cli += ' system-settings-show format cpu-class-enable no-show-headers'
out = run_commands(module, cli)[1]
out = out.split()
if 'on' not in out:
module.fail_json(
failed=True,
msg='Enable CPU class before creating or deleting'
)
cli = clicopy
cli += ' cpu-class-show format name no-show-headers'
out = run_commands(module, cli)[1]
if out:
out = out.split()
return True if name in out else False
def main():
""" This section is for arguments parsing """
state_map = dict(
present='cpu-class-create',
absent='cpu-class-delete',
update='cpu-class-modify'
)
module = AnsibleModule(
argument_spec=dict(
pn_cliswitch=dict(required=False, type='str'),
state=dict(required=True, type='str',
choices=state_map.keys()),
pn_scope=dict(required=False, type='str',
choices=['local', 'fabric']),
pn_hog_protect=dict(required=False, type='str',
choices=['disable', 'enable',
'enable-and-drop']),
pn_rate_limit=dict(required=False, type='str'),
pn_name=dict(required=False, type='str'),
),
required_if=(
['state', 'present', ['pn_name', 'pn_scope', 'pn_rate_limit']],
['state', 'absent', ['pn_name']],
['state', 'update', ['pn_name']],
)
)
# Accessing the arguments
cliswitch = module.params['pn_cliswitch']
state = module.params['state']
scope = module.params['pn_scope']
hog_protect = module.params['pn_hog_protect']
rate_limit = module.params['pn_rate_limit']
name = module.params['pn_name']
command = state_map[state]
# Building the CLI command string
cli = pn_cli(module, cliswitch)
NAME_EXISTS = check_cli(module, cli)
cli += ' %s name %s ' % (command, name)
if command == 'cpu-class-modify':
if NAME_EXISTS is False:
module.fail_json(
failed=True,
msg='cpu class with name %s does not exist' % name
)
if command == 'cpu-class-delete':
if NAME_EXISTS is False:
module.exit_json(
skipped=True,
msg='cpu class with name %s does not exist' % name
)
if command == 'cpu-class-create':
if NAME_EXISTS is True:
module.exit_json(
skipped=True,
msg='cpu class with name %s already exists' % name
)
if scope:
cli += ' scope %s ' % scope
if command != 'cpu-class-delete':
if hog_protect:
cli += ' hog-protect %s ' % hog_protect
if rate_limit:
cli += ' rate-limit %s ' % rate_limit
run_cli(module, cli, state_map)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
dfeaef8960d9c3c78351dc377c9805836cc90639 | 69cfe57220f789eb1d1966ed22c6823f0beeb8ce | /covid_venv/lib/python3.7/site-packages/dash_html_components/Pre.py | 46214932ac801cfab78d16fc03ee6f01f0cbd582 | [
"MIT"
] | permissive | paulsavala/Covid19-model | 664e31780ee1c8e4ef2115af2f41b27e832e5e50 | 41aa96d7c9abc117550f904af11815f507f0f0a0 | refs/heads/master | 2022-07-15T17:39:05.842619 | 2020-11-16T20:42:22 | 2020-11-16T20:42:22 | 252,545,888 | 2 | 0 | MIT | 2022-06-22T01:37:35 | 2020-04-02T19:19:25 | Python | UTF-8 | Python | false | false | 4,767 | py | # AUTO GENERATED FILE - DO NOT EDIT
from dash.development.base_component import Component, _explicitize_args
class Pre(Component):
"""A Pre component.
Pre is a wrapper for the <pre> HTML5 element.
For detailed attribute info see:
https://developer.mozilla.org/en-US/docs/Web/HTML/Element/pre
Keyword arguments:
- children (a list of or a singular dash component, string or number; optional): The children of this component
- id (string; optional): The ID of this component, used to identify dash components
in callbacks. The ID needs to be unique across all of the
components in an app.
- n_clicks (number; default 0): An integer that represents the number of times
that this element has been clicked on.
- n_clicks_timestamp (number; default -1): An integer that represents the time (in ms since 1970)
at which n_clicks changed. This can be used to tell
which button was changed most recently.
- key (string; optional): A unique identifier for the component, used to improve
performance by React.js while rendering components
See https://reactjs.org/docs/lists-and-keys.html for more info
- role (string; optional): The ARIA role attribute
- data-* (string; optional): A wildcard data attribute
- aria-* (string; optional): A wildcard aria attribute
- accessKey (string; optional): Keyboard shortcut to activate or add focus to the element.
- className (string; optional): Often used with CSS to style elements with common properties.
- contentEditable (string; optional): Indicates whether the element's content is editable.
- contextMenu (string; optional): Defines the ID of a <menu> element which will serve as the element's context menu.
- dir (string; optional): Defines the text direction. Allowed values are ltr (Left-To-Right) or rtl (Right-To-Left)
- draggable (string; optional): Defines whether the element can be dragged.
- hidden (a value equal to: 'hidden', 'HIDDEN' | boolean; optional): Prevents rendering of given element, while keeping child elements, e.g. script elements, active.
- lang (string; optional): Defines the language used in the element.
- spellCheck (string; optional): Indicates whether spell checking is allowed for the element.
- style (dict; optional): Defines CSS styles which will override styles previously set.
- tabIndex (string; optional): Overrides the browser's default tab order and follows the one specified instead.
- title (string; optional): Text to be displayed in a tooltip when hovering over the element.
- loading_state (dict; optional): Object that holds the loading state object coming from dash-renderer. loading_state has the following type: dict containing keys 'is_loading', 'prop_name', 'component_name'.
Those keys have the following types:
- is_loading (boolean; optional): Determines if the component is loading or not
- prop_name (string; optional): Holds which property is loading
- component_name (string; optional): Holds the name of the component that is loading"""
@_explicitize_args
def __init__(self, children=None, id=Component.UNDEFINED, n_clicks=Component.UNDEFINED, n_clicks_timestamp=Component.UNDEFINED, key=Component.UNDEFINED, role=Component.UNDEFINED, accessKey=Component.UNDEFINED, className=Component.UNDEFINED, contentEditable=Component.UNDEFINED, contextMenu=Component.UNDEFINED, dir=Component.UNDEFINED, draggable=Component.UNDEFINED, hidden=Component.UNDEFINED, lang=Component.UNDEFINED, spellCheck=Component.UNDEFINED, style=Component.UNDEFINED, tabIndex=Component.UNDEFINED, title=Component.UNDEFINED, loading_state=Component.UNDEFINED, **kwargs):
self._prop_names = ['children', 'id', 'n_clicks', 'n_clicks_timestamp', 'key', 'role', 'data-*', 'aria-*', 'accessKey', 'className', 'contentEditable', 'contextMenu', 'dir', 'draggable', 'hidden', 'lang', 'spellCheck', 'style', 'tabIndex', 'title', 'loading_state']
self._type = 'Pre'
self._namespace = 'dash_html_components'
self._valid_wildcard_attributes = ['data-', 'aria-']
self.available_properties = ['children', 'id', 'n_clicks', 'n_clicks_timestamp', 'key', 'role', 'data-*', 'aria-*', 'accessKey', 'className', 'contentEditable', 'contextMenu', 'dir', 'draggable', 'hidden', 'lang', 'spellCheck', 'style', 'tabIndex', 'title', 'loading_state']
self.available_wildcard_properties = ['data-', 'aria-']
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(Pre, self).__init__(children=children, **args)
| [
"[email protected]"
] | |
95e81629ec5b165d02943f34a71fc1f1080bcef5 | cfb4e8721137a096a23d151f2ff27240b218c34c | /mypower/matpower_ported/mp-opt-model/lib/@opt_model/solve.py | 4ebc7aba4174ad01cb668380102ca885eae9fb2c | [
"Apache-2.0"
] | permissive | suryo12/mypower | eaebe1d13f94c0b947a3c022a98bab936a23f5d3 | ee79dfffc057118d25f30ef85a45370dfdbab7d5 | refs/heads/master | 2022-11-25T16:30:02.643830 | 2020-08-02T13:16:20 | 2020-08-02T13:16:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | def solve(*args,nout=5,oc=None):
if oc == None:
from .....oc_matpower import oc_matpower
oc = oc_matpower()
return oc.solve(*args,nout=nout)
| [
"[email protected]"
] | |
3c6dc99ca36a539efb2e696f6b57cbd205a83f8b | ae7ba9c83692cfcb39e95483d84610715930fe9e | /baidu/Paddle/paddle/trainer/tests/config_parser_test.py | 5ca874cec7914a20f79c2c7b1873c5bd04f60dca | [
"Apache-2.0"
] | permissive | xenron/sandbox-github-clone | 364721769ea0784fb82827b07196eaa32190126b | 5eccdd8631f8bad78eb88bb89144972dbabc109c | refs/heads/master | 2022-05-01T21:18:43.101664 | 2016-09-12T12:38:32 | 2016-09-12T12:38:32 | 65,951,766 | 5 | 7 | null | null | null | null | UTF-8 | Python | false | false | 1,002 | py | # Copyright (c) 2016 Baidu, Inc. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.trainer.config_parser import parse_config_and_serialize
if __name__ == '__main__':
parse_config_and_serialize('trainer/tests/test_config.conf', '')
parse_config_and_serialize(
'trainer/tests/sample_trainer_config.conf',
'extension_module_name=paddle.trainer.config_parser_extension')
parse_config_and_serialize('gserver/tests/pyDataProvider/trainer.conf', '')
| [
"[email protected]"
] | |
1f4e873eab2fcd41f293bcb80c7e8ae1e5eb4377 | 0019ea5621577ab9a9a694e3ef91d913e981a28e | /missing_data_complementor/__init__.py | 47ea50802fb334e4e0fffda9b4c61d9c2aa1527b | [] | no_license | sungc1/fake-news-framework_Py3 | 676710b3bf7b8feb4c237ffed7d1d280f4967890 | e3552b5bc2a30dbd52ad893ce8dd29aa2242f864 | refs/heads/main | 2023-01-19T23:42:13.294446 | 2020-12-01T18:38:31 | 2020-12-01T18:38:31 | 428,178,049 | 1 | 0 | null | 2021-11-15T08:18:23 | 2021-11-15T08:18:23 | null | UTF-8 | Python | false | false | 47 | py | #
# Created by Aviad on 03-Jun-16 11:40 AM.
# | [
"[email protected]"
] | |
7d8115df6fa61bc6f721bc8db8bd47858dc75982 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/primu.py | 74ed5f7f4b48b1b61044808885c34bd9dce48229 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 255 | py | ii = [('WilbRLW.py', 1), ('WilkJMC3.py', 3), ('ClarGE2.py', 2), ('GellWPT2.py', 1), ('WilkJMC2.py', 1), ('LyelCPG.py', 1), ('SoutRD.py', 3), ('WilkJMC.py', 3), ('WestJIT.py', 1), ('FitzRNS.py', 1), ('DibdTRL.py', 1), ('EvarJSP.py', 1), ('SadlMLP2.py', 1)] | [
"[email protected]"
] | |
ae27520913674390e809620c54463d13c4e88d63 | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /GIT-USERS/TOM-Lambda/CS35_IntroPython_GP/day3/intro/11_args.py | 2ec2eca832f454921138650bfb137e422a0c4711 | [
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 2,852 | py | # Experiment with positional arguments, arbitrary arguments, and keyword
# arguments.
# Write a function f1 that takes two integer positional arguments and returns
# the sum. This is what you'd consider to be a regular, normal function.
<<<<<<< HEAD
def f1(a, b):
return a + b
=======
def f1(a, b):
return a + b
>>>>>>> 23fb4d348bb9c7b7b370cb2afcd785793e3816ea
print(f1(1, 2))
# Write a function f2 that takes any number of integer arguments and prints the
# sum. Google for "python arbitrary arguments" and look for "*args"
<<<<<<< HEAD
def f2(*args):
sum = 0
for i in args:
sum += i
return sum
print(f2(1)) # Should print 1
print(f2(1, 3)) # Should print 4
print(f2(1, 4, -12)) # Should print -7
=======
def f2(*args):
sum = 0
for i in args:
sum += i
return sum
print(f2(1)) # Should print 1
print(f2(1, 3)) # Should print 4
print(f2(1, 4, -12)) # Should print -7
>>>>>>> 23fb4d348bb9c7b7b370cb2afcd785793e3816ea
print(f2(7, 9, 1, 3, 4, 9, 0)) # Should print 33
a = [7, 6, 5, 4]
# What thing do you have to add to make this work?
<<<<<<< HEAD
print(f2(*a)) # Should print 22
=======
print(f2(*a)) # Should print 22
>>>>>>> 23fb4d348bb9c7b7b370cb2afcd785793e3816ea
# Write a function f3 that accepts either one or two arguments. If one argument,
# it returns that value plus 1. If two arguments, it returns the sum of the
# arguments. Google "python default arguments" for a hint.
<<<<<<< HEAD
def f3(a, b=1):
return a + b
print(f3(1, 2)) # Should print 3
print(f3(8)) # Should print 9
=======
def f3(a, b=1):
return a + b
print(f3(1, 2)) # Should print 3
print(f3(8)) # Should print 9
>>>>>>> 23fb4d348bb9c7b7b370cb2afcd785793e3816ea
# Write a function f4 that accepts an arbitrary number of keyword arguments and
# prints out the keys and values like so:
#
# key: foo, value: bar
# key: baz, value: 12
#
# Google "python keyword arguments".
<<<<<<< HEAD
def f4(**kwargs):
for k, v in kwargs.items():
print(f'key: {k}, value: {v}')
# Alternate:
# for k in kwargs:
# print(f'key: {k}, value: {kwargs[k]}')
=======
def f4(**kwargs):
for k, v in kwargs.items():
print(f"key: {k}, value: {v}")
# Alternate:
# for k in kwargs:
# print(f'key: {k}, value: {kwargs[k]}')
>>>>>>> 23fb4d348bb9c7b7b370cb2afcd785793e3816ea
# Should print
# key: a, value: 12
# key: b, value: 30
f4(a=12, b=30)
# Should print
# key: city, value: Berkeley
# key: population, value: 121240
# key: founded, value: "March 23, 1868"
f4(city="Berkeley", population=121240, founded="March 23, 1868")
<<<<<<< HEAD
d = {
"monster": "goblin",
"hp": 3
}
=======
d = {"monster": "goblin", "hp": 3}
>>>>>>> 23fb4d348bb9c7b7b370cb2afcd785793e3816ea
# What thing do you have to add to make this work?
f4(**d)
| [
"[email protected]"
] | |
7accaa8ad9e3c45b158dd9537e55e683338dea29 | 70e1159856750f04e58c0ffc3f54d094a4602c07 | /booktest/views.py | 84958fd19d5631e83ebfd2b20bac16190adc186f | [] | no_license | wxp19940506/django_test | 032e78a4eb45eb0c54dbafd43dfd0e463d455bb5 | c586cb62d1bb1a21f3430155b3d82ab7b2a65da6 | refs/heads/master | 2021-05-10T11:52:54.186422 | 2018-01-22T07:55:11 | 2018-01-22T07:55:11 | 118,424,555 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | from django.shortcuts import render
from django.http import *
from django.template import RequestContext,loader
from .models import *
# Create your views here.
def index(request):
# temp = loader.get_template("booktest/index.html")
#
# return HttpResponse(temp.render())
booklist = BookInfo.objects.all()
context = {'lists':booklist}
return render(request,'booktest/index.html',context)
def show(request,id):
book = BookInfo.objects.get(pk=id)
herolist = book.heroinfo_set.all()
context = {'list':herolist}
return render(request,'booktest/show.html',context)
| [
"[email protected]"
] | |
e7fc2c8eede38ab1d057f2930410a29a6191871a | f14946892dcc62732cffd0dba364d2098e6de607 | /converter.py | 40fb58d5f55a79226720f093675f16897083b36c | [] | no_license | DollaR84/notes | 556368c12b0ead9901b05b95a5691138b588eb86 | a74ec7cf41b842501d1c24ec3b180d76be1fbef1 | refs/heads/master | 2023-03-26T21:04:37.415037 | 2020-06-18T14:11:15 | 2020-06-18T14:11:15 | 223,773,925 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,300 | py | """
Converter database from old versions to new.
Created on 19.04.2020
@author: Ruslan Dolovanyuk
"""
from copy import deepcopy
from datetime import datetime
import os
from database import Database
import tables
import updates
class DBConverter:
"""Converter database on new update versions."""
def __init__(self, db_name):
"""initialization converter database."""
self.__db_name = db_name
self.__db = Database()
self.__update_functions = [
'update_db2',
'update_db3',
'update_db4',
'update_db5',
]
def __get_old_data(self, tables_list):
"""Get all data from old database."""
self.__old_data = {table: self.__db.get("SELECT * FROM %s" % table) for table in tables_list}
def checker(self, db, tables_dict):
"""Check and return version input database."""
tables_db = db.get_tables_names()
tables_cr = tables.get_tables_names(tables_dict)
diff_tables = list(set(tables_cr) - set(tables_db))
if not diff_tables:
for table in tables_cr:
columns_db = db.get_columns_names(table)
diff_columns = list(set(tables.get_columns_names(tables_dict[table])) - set(columns_db))
if 'order_sort' in diff_columns:
return 1
elif 'readonly' in diff_columns:
return 2
elif ('date_create' in diff_columns) and ('date_update' in diff_columns):
return 3
elif ('state_check' in diff_columns) and ('state' in diff_columns):
return 4
else:
pass
elif 'states' in diff_tables:
return 4
else:
pass
return tables.VERSION
def __save_old_db(self, db_name, version):
"""Saving old databases before updates."""
date = datetime.strftime(datetime.now(), "%d.%m.%Y")
time = datetime.strftime(datetime.now(), "%H.%M.%S")
try:
os.rename(''.join([db_name, '.db']), ''.join([db_name, '.v{}.'.format(version), date, '.db']))
except:
os.rename(''.join([db_name, '.db']), ''.join([db_name, '.v{}.'.format(version), date, '.', time, '.db']))
def update_db(self, db_ver, tables_dict_default, update_func):
"""Run update database tables."""
self.__db.connect(self.__db_name + '.db')
self.__get_old_data(self.__db.get_tables_names())
self.__db.disconnect()
self.__save_old_db(self.__db_name, db_ver)
self.__db.connect(self.__db_name + '.db')
tables_dict = deepcopy(tables_dict_default)
for table in tables_dict.keys():
tables_dict[table].extend(updates.columns_all(table, db_ver+1))
script = 'CREATE TABLE {} ({}) WITHOUT ROWID'.format(table,
', '.join([' '.join(row) for row in tables_dict[table]]))
self.__db.put(script)
columns = tables.get_columns_names(tables_dict[table])
rows = self.__old_data.get(table, [])
update_func(table, columns, rows)
self.__db.commit()
self.__db.disconnect()
def update_db2(self, table, columns, rows):
"""Update database tables from version database 1 to version 2."""
counter = {}
for row in rows:
if table == 'notes':
parent = row[-1]
if parent not in counter:
counter[parent] = 0
counter[parent] += 1
script = 'INSERT INTO {} ({}) VALUES ({}, {})'.format(table,
', '.join(columns),
', '.join(['?' for _ in range(len(row))]),
counter[parent])
else:
script = 'INSERT INTO {} ({}) VALUES ({})'.format(table,
', '.join(columns),
', '.join(['?' for _ in range(len(row))]))
self.__db.put(script, *row)
def update_db3(self, table, columns, rows):
"""Update database tables from version database 2 to version 3."""
for row in rows:
if table == 'notes':
script = 'INSERT INTO {} ({}) VALUES ({}, 0)'.format(table,
', '.join(columns),
', '.join(['?' for _ in range(len(row))]))
else:
script = 'INSERT INTO {} ({}) VALUES ({})'.format(table,
', '.join(columns),
', '.join(['?' for _ in range(len(row))]))
self.__db.put(script, *row)
def update_db4(self, table, columns, rows):
"""Update database tables from version database 3 to version 4."""
for row in rows:
if table == 'notes':
script = 'INSERT INTO {} ({}) VALUES ({}, "", "")'.format(table,
', '.join(columns),
', '.join(['?' for _ in range(len(row))]))
else:
script = 'INSERT INTO {} ({}) VALUES ({})'.format(table,
', '.join(columns),
', '.join(['?' for _ in range(len(row))]))
self.__db.put(script, *row)
def update_db5(self, table, columns, rows):
"""Update database tables from version database 4 to version 5."""
for row in rows:
if table == 'notes':
script = 'INSERT INTO {} ({}) VALUES ({}, 0, "")'.format(table,
', '.join(columns),
', '.join(['?' for _ in range(len(row))]))
else:
script = 'INSERT INTO {} ({}) VALUES ({})'.format(table,
', '.join(columns),
', '.join(['?' for _ in range(len(row))]))
self.__db.put(script, *row)
def check_rows(self, db, tables_dict):
"""Add rows in updates databases."""
for table in list(tables_dict.keys()):
update_dict = updates.ROWS.get(table, {})
for version, rows in update_dict.items():
if version <= tables.VERSION:
if db.get_last_id(table) < int(rows[-1].split(', ')[0]):
columns = tables.get_columns_names(tables_dict[table])
for row in rows:
script = 'INSERT INTO {} ({}) VALUES ({})'.format(table, ', '.join(columns), row)
db.put(script)
db.commit()
def run(self, tables_dict_default, tables_dict):
"""Run convert data from old database to new."""
try:
self.__db.connect(self.__db_name + '.db')
db_ver = self.checker(self.__db, tables_dict)
self.__db.disconnect()
for index in range(db_ver-1, tables.VERSION-1):
self.update_db(index+1, tables_dict_default, getattr(self, self.__update_functions[index]))
except Exception as e:
print(e)
return False
return True
def main():
"""Main running this script."""
dbconv = DBConverter('notes')
dbconv.run()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
8ef6b58674a55f6236df4da9f882ab9310c12fb8 | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/GstGL/GLMemoryAllocatorClass.py | 09d94c14d0970b096b8877bf3ada06ef684d53ce | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 5,176 | py | # encoding: utf-8
# module gi.repository.GstGL
# from /usr/lib64/girepository-1.0/GstGL-1.0.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.repository.Gst as __gi_repository_Gst
import gi.repository.GstBase as __gi_repository_GstBase
import gobject as __gobject
class GLMemoryAllocatorClass(__gi.Struct):
"""
:Constructors:
::
GLMemoryAllocatorClass()
"""
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
copy = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
map = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
parent_class = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
unmap = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
_padding = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(GLMemoryAllocatorClass), '__module__': 'gi.repository.GstGL', '__gtype__': <GType void (4)>, '__dict__': <attribute '__dict__' of 'GLMemoryAllocatorClass' objects>, '__weakref__': <attribute '__weakref__' of 'GLMemoryAllocatorClass' objects>, '__doc__': None, 'parent_class': <property object at 0x7f56a4000bd0>, 'map': <property object at 0x7f56a4000cc0>, 'copy': <property object at 0x7f56a4000db0>, 'unmap': <property object at 0x7f56a4000ea0>, '_padding': <property object at 0x7f56a4000f90>})"
__gtype__ = None # (!) real value is '<GType void (4)>'
__info__ = StructInfo(GLMemoryAllocatorClass)
| [
"[email protected]"
] | |
8a344aae06dbeb32785b94bf82c33c8f84c20b41 | 55d13d3e41d8651facf7c26d60de5e8b8ace4be5 | /piedpiper/crab/multicrab-0.py | ac77e6ce915307348ba0838f04a1b7373744c932 | [] | no_license | phylsix/Firefighter | e8ab5fdbde2dab341a67740aa62c5710683e9bab | 8f1d8d6e59b443a8216c70ebdd334b48945aeed0 | refs/heads/master | 2020-12-19T18:31:14.312639 | 2020-08-04T00:35:45 | 2020-08-04T00:35:45 | 235,812,142 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,190 | py | #!/usr/bin/env python
from __future__ import print_function
import os
import sys
import time
from os.path import basename, join
import yaml
from CRABAPI.RawCommand import crabCommand
from crabConfig_0 import *
from Firefighter.piedpiper.utils import *
verbose = False
alwaysDoCmd = True
if os.environ["CMSSW_BASE"] not in os.path.abspath(__file__):
print("$CMSSW_BASE: ", os.environ["CMSSW_BASE"])
print("__file__: ", os.path.abspath(__file__))
sys.exit("Inconsistant release environment!")
BASEDIR = join(os.environ["CMSSW_BASE"], "src/Firefighter/piedpiper")
CONFIG_NAME = sys.argv[1]
assert os.path.isfile(CONFIG_NAME)
def main():
multiconf = yaml.load(open(CONFIG_NAME).read())
gridpacks = multiconf["gridpacks"]
njobs = multiconf["njobs"]
year = multiconf["year"]
lxy = multiconf["lxy"]
ctaus = multiconf.get("ctaus", None)
assert len(gridpacks) == len(ctaus)
ctaumap = dict(zip(gridpacks, ctaus))
config.Data.totalUnits = config.Data.unitsPerJob * njobs
config.Data.outLFNDirBase += "/{0}".format(year)
# loop through
donelist = list()
for gridpack in gridpacks:
print("gridpack:", gridpack)
#'SIDM_XXTo2ATo4Mu_mXX-1000_mA-0p25_slc6_amd64_gcc481_CMSSW_7_1_30_tarball.tar.xz'
gridpack_name = basename(gridpack)
## outputPrimaryDataset: SIDM_XXTo2ATo4Mu or SIDM_XXTo2ATo2Mu2e
config.Data.outputPrimaryDataset = gridpack_name.split("_mXX")[0]
## outputDatasetTag: mXX-1000_mA-0p25_lxy-0p3_ctau-0p001875_GENSIM_2018
mxxma = gridpack_name.split("_", 2)[-1].split("_slc")[0]
lxystr = str(lxy).replace(".", "p")
ctaustr = str(ctaumap[gridpack]).replace(".", "p")
config.Data.outputDatasetTag = "{}_lxy-{}_ctau-{}_GENSIM_{}".format(
mxxma, lxystr, ctaustr, year
)
## requestName
config.General.requestName = "_".join(
[
config.Data.outputPrimaryDataset,
config.Data.outputDatasetTag,
time.strftime("%y%m%d-%H%M%S"),
]
)
if gridpack.startswith("root://"):
cpcmd = "xrdcp -f {0} {1}".format(gridpack, join(BASEDIR, "cfg/gridpack.tar.xz"))
elif gridpack.startswith("http"):
cpcmd = "wget -q {} -O {}".format(gridpack, join(BASEDIR, "cfg/gridpack.tar.xz"))
else:
cpcmd = "cp {0} {1}".format(gridpack, join(BASEDIR, "cfg/gridpack.tar.xz"))
if verbose:
print("$", cpcmd)
print(
"$ cat", join(BASEDIR, "python/externalLHEProducer_and_PYTHIA8_Hadronizer_cff.py")
)
print(get_gentemplate(year).format(CTAU=ctaumap[gridpack]))
print("------------------------------------------------------------")
print(config)
print("------------------------------------------------------------")
doCmd = True if alwaysDoCmd else raw_input("OK to go? [y/n]").lower() in ["y", "yes"]
if doCmd:
# 1. copy gridpack
os.system(cpcmd)
# 2. write genfrag_cfi
with open(
join(BASEDIR, "python/externalLHEProducer_and_PYTHIA8_Hadronizer_cff.py"), "w"
) as genfrag_cfi:
genfrag_cfi.write(get_gentemplate(year).format(CTAU=ctaumap[gridpack]))
# 3. write gen_cfg
cfgcmd = get_command("GEN-SIM", year, rand=False)
os.system(cfgcmd)
# 4. crab submit
crabCommand("submit", config=config)
donelist.append(gridpack)
print("submitted: ", len(donelist))
for x in donelist:
print(x)
print("------------------------------------------------------------")
undonelist = [x for x in gridpacks if x not in donelist]
print("unsubmitted: ", len(undonelist))
for x in undonelist:
print(x)
if undonelist:
with open("unsubmitted-0.yml.log", "w") as outf:
yaml.dump(
{"gridpacks": undonelist, "njobs": njobs, "year": year},
outf,
default_flow_style=False,
)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
aa36fc5578e1ff9d3e2ca3774590d9e2ac4b034b | 353def93fa77384ee3a5e3de98cfed318c480634 | /.history/week01/hoework01/gettop10frommaoyam01_20200626091702.py | d2722f72cb3c318b6baafd5cd7fd7285bc7c6d98 | [] | no_license | ydbB/Python001-class01 | d680abc3ea1ccaeb610751e3488421417d381156 | ad80037ccfc68d39125fa94d2747ab7394ac1be8 | refs/heads/master | 2022-11-25T11:27:45.077139 | 2020-07-19T12:35:12 | 2020-07-19T12:35:12 | 272,783,233 | 0 | 0 | null | 2020-06-16T18:28:15 | 2020-06-16T18:28:15 | null | UTF-8 | Python | false | false | 2,407 | py | # 使用requests,bs4库,爬取猫眼电影top10的电影名称、电影类型、上映时间,并以utf-8的字符集保存到csv文件中
import requests
from bs4 import BeautifulSoup as bs
maoyanUrl = "https://maoyan.com/board/4";
user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'
header = {
'Content-Type': 'text/plain; charset=UTF-8',
'Cookie' : '__mta=251934006.1593072991075.1593100662316.1593100664951.15; uuid_n_v=v1; uuid=2395D3F0B6BC11EA9F28E30FF5FFF73C9A16AE2FA53A448DA75AEAA9D715CB59; _csrf=8557626db9b655cf9050ae7e5b2aab69278c8061c21eca95e1c3cf2130b0b64c; _lxsdk_cuid=172ea8cb247c8-0a73066b1c0a8b-4353760-100200-172ea8cb248c8; _lxsdk=2395D3F0B6BC11EA9F28E30FF5FFF73C9A16AE2FA53A448DA75AEAA9D715CB59; mojo-uuid=c457eacb7c1eb59d3d2f6c1f8d75b9c9; Hm_lvt_703e94591e87be68cc8da0da7cbd0be2=1593072989,1593073002; _lx_utm=utm_source%3Dgoogle%26utm_medium%3Dorganic; __mta=251934006.1593072991075.1593075275703.1593078726963.7; mojo-session-id={"id":"435818e6a726415f46defffa27f7abc6","time":1593100221937}; Hm_lpvt_703e94591e87be68cc8da0da7cbd0be2=1593100665; mojo-trace-id=17; _lxsdk_s=172ec2bff67-0c2-e9f-c64%7C%7C24__mta=251934006.1593072991075.1593100690175.1593100868002.17; uuid_n_v=v1; uuid=2395D3F0B6BC11EA9F28E30FF5FFF73C9A16AE2FA53A448DA75AEAA9D715CB59; _csrf=8557626db9b655cf9050ae7e5b2aab69278c8061c21eca95e1c3cf2130b0b64c; _lxsdk_cuid=172ea8cb247c8-0a73066b1c0a8b-4353760-100200-172ea8cb248c8; _lxsdk=2395D3F0B6BC11EA9F28E30FF5FFF73C9A16AE2FA53A448DA75AEAA9D715CB59; mojo-uuid=c457eacb7c1eb59d3d2f6c1f8d75b9c9; Hm_lvt_703e94591e87be68cc8da0da7cbd0be2=1593072989,1593073002; _lx_utm=utm_source%3Dgoogle%26utm_medium%3Dorganic; __mta=251934006.1593072991075.1593075275703.1593078726963.7; Hm_lpvt_703e94591e87be68cc8da0da7cbd0be2=1593100868; _lxsdk_s=172ee2f4a3e-1c2-3a1-5a4%7C%7C1',
# 'Host' : 'http://www.baidu.com',
'Origin': 'https://maoyan.com',
'Referer': 'https://maoyan.com/board/4',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36',
}
response = requests.get(maoyanUrl,headers=header)
response.encoding = 'utf-8'
bs_info = bs(response.text,"html.parser")
# print(response.text)
for tag in bs_info.find_all('div',attrs={'class' : 'movie-item-content'}):
print(tag)
| [
"[email protected]"
] | |
03622786a4de2d5c12beee1a16d5fba75dcf2347 | 29ad9caf139fab91580d7be36c9bd07623c4ca4d | /py/edu_freq_min.py | fc3b361beeafe60bea31d57a072936492e1f99f0 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-unknown"
] | permissive | bcgov/flatfile-tools | f324687389a508aad641131f70bb66c533917bbe | 749071129cab7a598bd4c2edf050dce59324a97f | refs/heads/master | 2021-06-10T15:14:08.266856 | 2021-05-13T14:23:48 | 2021-05-13T14:23:48 | 183,680,156 | 2 | 0 | Apache-2.0 | 2020-04-15T02:21:47 | 2019-04-26T18:58:46 | Python | UTF-8 | Python | false | false | 3,247 | py | # 20190315 take pharmanet dispensations, look for earliest dispense date of drug type, as well as dispense frequency
# output has same data, with freq and min_srv_date added
import os
import sys
import time
from misc import*
def expected(f_name, lookup):
if f_name not in lookup:
err("expected field: " + str(f_name))
def freq_min(fn):
f = open(fn)
if f == None:
err("failed to open file: " + str(fn))
fields = f.readline().strip().split(",")
print fields
lookup = {}
for i in range(0, len(fields)):
lookup[fields[i].lower()] = i
print " ", lookup
for i in ["studyid", "hp.din_pin"]:
expected(i, lookup)
#mindate, freq = f(studyid, hp.din_pin)
dat = {}
ci = 0
f_size = os.stat(fn).st_size
tt = ttt = t_0 = time.time()
while True:
words = None
try:
words = f.readline().strip().split(",")
except:
break
if words == ['']:
continue
for i in range(0, len(words)):
words[i] = words[i].strip().lower()
if len(words) != len(fields):
print words
err("wrong number of fields, check csv file")
key = words[lookup["studyid"]] + "," + words[lookup["hp.gen_drug"]]
if key not in dat:
# freq = 1, min(serv_date) = serve_date
dat[key] = [1, words[lookup["srv_date"]]]
else:
freq, min_serv_date = dat[key]
freq += 1
date = words[lookup["srv_date"]]
min_serv_date = min_serv_date if min_serv_date < date else date
dat[key] = [freq, min_serv_date]
ci += 1
if ci % 100000 == 0:
ttt = tt
tt = time.time()
print "file", " %: ", 100. * (float(f.tell()) / float(f_size)), " MB/s:", (float(f.tell()) / 1000000.) / (tt- t_0)#
f.close()
f = open(fn)
if f is None:
err("failed to open file: " + str(fn))
print " +r " + fn
g_n = fn + "_freq-min.csv"
print " +w " + g_n
g = open(g_n, "wb")
print " +w " + g_n
if g is None:
err("failed to open file: " + str(g_n))
fields.append("freq")
fields.append("min_srv_date")
g.write(",".join(fields))
f.readline() # fields
ci = 0
while True:
line, words = None, None
try:
line = f.readline().strip()
except:
break
if line == "":
continue
words = line.split(",")
for i in range(0, len(words)):
words[i] = words[i].strip().lower()
key = words[lookup["studyid"]] + "," + words[lookup["hp.gen_drug"]]
if key not in dat:
err("key should have been found")
freq, min_serv_date = dat[key]
g.write("\n" + line + "," + str(freq) + "," + str(min_serv_date))
ci += 1
if ci % 100000 == 0:
ttt = tt
tt = time.time()
print "file", " %: ", 100. * (float(f.tell()) / float(f_size)), " MB/s:", (float(f.tell()) / 1000000.) / (tt- t_0)#
f.close()
g.close()
freq_min("dsp_rpt.dat_slice.csv_select-STUDY.csv_lookup.csv")
freq_min("dsp_rpt.dat_slice.csv_select-CONTROL.csv_lookup.csv") | [
"[email protected]"
] | |
e0887b70f4b7024270a588e59d6a5d81ec0959c3 | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/signalrservice/v20210601preview/get_signal_r.py | e126d745e3c8f5f3fc1a5876c117c9fc8754627f | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,073 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetSignalRResult',
'AwaitableGetSignalRResult',
'get_signal_r',
'get_signal_r_output',
]
@pulumi.output_type
class GetSignalRResult:
"""
A class represent a resource.
"""
def __init__(__self__, cors=None, disable_aad_auth=None, disable_local_auth=None, external_ip=None, features=None, host_name=None, id=None, identity=None, kind=None, location=None, name=None, network_acls=None, private_endpoint_connections=None, provisioning_state=None, public_network_access=None, public_port=None, server_port=None, shared_private_link_resources=None, sku=None, system_data=None, tags=None, tls=None, type=None, upstream=None, version=None):
if cors and not isinstance(cors, dict):
raise TypeError("Expected argument 'cors' to be a dict")
pulumi.set(__self__, "cors", cors)
if disable_aad_auth and not isinstance(disable_aad_auth, bool):
raise TypeError("Expected argument 'disable_aad_auth' to be a bool")
pulumi.set(__self__, "disable_aad_auth", disable_aad_auth)
if disable_local_auth and not isinstance(disable_local_auth, bool):
raise TypeError("Expected argument 'disable_local_auth' to be a bool")
pulumi.set(__self__, "disable_local_auth", disable_local_auth)
if external_ip and not isinstance(external_ip, str):
raise TypeError("Expected argument 'external_ip' to be a str")
pulumi.set(__self__, "external_ip", external_ip)
if features and not isinstance(features, list):
raise TypeError("Expected argument 'features' to be a list")
pulumi.set(__self__, "features", features)
if host_name and not isinstance(host_name, str):
raise TypeError("Expected argument 'host_name' to be a str")
pulumi.set(__self__, "host_name", host_name)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if identity and not isinstance(identity, dict):
raise TypeError("Expected argument 'identity' to be a dict")
pulumi.set(__self__, "identity", identity)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if network_acls and not isinstance(network_acls, dict):
raise TypeError("Expected argument 'network_acls' to be a dict")
pulumi.set(__self__, "network_acls", network_acls)
if private_endpoint_connections and not isinstance(private_endpoint_connections, list):
raise TypeError("Expected argument 'private_endpoint_connections' to be a list")
pulumi.set(__self__, "private_endpoint_connections", private_endpoint_connections)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_network_access and not isinstance(public_network_access, str):
raise TypeError("Expected argument 'public_network_access' to be a str")
pulumi.set(__self__, "public_network_access", public_network_access)
if public_port and not isinstance(public_port, int):
raise TypeError("Expected argument 'public_port' to be a int")
pulumi.set(__self__, "public_port", public_port)
if server_port and not isinstance(server_port, int):
raise TypeError("Expected argument 'server_port' to be a int")
pulumi.set(__self__, "server_port", server_port)
if shared_private_link_resources and not isinstance(shared_private_link_resources, list):
raise TypeError("Expected argument 'shared_private_link_resources' to be a list")
pulumi.set(__self__, "shared_private_link_resources", shared_private_link_resources)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if tls and not isinstance(tls, dict):
raise TypeError("Expected argument 'tls' to be a dict")
pulumi.set(__self__, "tls", tls)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if upstream and not isinstance(upstream, dict):
raise TypeError("Expected argument 'upstream' to be a dict")
pulumi.set(__self__, "upstream", upstream)
if version and not isinstance(version, str):
raise TypeError("Expected argument 'version' to be a str")
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def cors(self) -> Optional['outputs.SignalRCorsSettingsResponse']:
"""
Cross-Origin Resource Sharing (CORS) settings.
"""
return pulumi.get(self, "cors")
@property
@pulumi.getter(name="disableAadAuth")
def disable_aad_auth(self) -> Optional[bool]:
"""
DisableLocalAuth
Enable or disable aad auth
When set as true, connection with AuthType=aad won't work.
"""
return pulumi.get(self, "disable_aad_auth")
@property
@pulumi.getter(name="disableLocalAuth")
def disable_local_auth(self) -> Optional[bool]:
"""
DisableLocalAuth
Enable or disable local auth with AccessKey
When set as true, connection with AccessKey=xxx won't work.
"""
return pulumi.get(self, "disable_local_auth")
@property
@pulumi.getter(name="externalIP")
def external_ip(self) -> str:
"""
The publicly accessible IP of the resource.
"""
return pulumi.get(self, "external_ip")
@property
@pulumi.getter
def features(self) -> Optional[Sequence['outputs.SignalRFeatureResponse']]:
"""
List of the featureFlags.
FeatureFlags that are not included in the parameters for the update operation will not be modified.
And the response will only include featureFlags that are explicitly set.
When a featureFlag is not explicitly set, its globally default value will be used
But keep in mind, the default value doesn't mean "false". It varies in terms of different FeatureFlags.
"""
return pulumi.get(self, "features")
@property
@pulumi.getter(name="hostName")
def host_name(self) -> str:
"""
FQDN of the service instance.
"""
return pulumi.get(self, "host_name")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource Id for the resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def identity(self) -> Optional['outputs.ManagedIdentityResponse']:
"""
The managed identity response
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
The kind of the service - e.g. "SignalR" for "Microsoft.SignalRService/SignalR"
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
The GEO location of the resource. e.g. West US | East US | North Central US | South Central US.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkACLs")
def network_acls(self) -> Optional['outputs.SignalRNetworkACLsResponse']:
"""
Network ACLs
"""
return pulumi.get(self, "network_acls")
@property
@pulumi.getter(name="privateEndpointConnections")
def private_endpoint_connections(self) -> Sequence['outputs.PrivateEndpointConnectionResponse']:
"""
Private endpoint connections to the resource.
"""
return pulumi.get(self, "private_endpoint_connections")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Provisioning state of the resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publicNetworkAccess")
def public_network_access(self) -> Optional[str]:
"""
Enable or disable public network access. Default to "Enabled".
When it's Enabled, network ACLs still apply.
When it's Disabled, public network access is always disabled no matter what you set in network ACLs.
"""
return pulumi.get(self, "public_network_access")
@property
@pulumi.getter(name="publicPort")
def public_port(self) -> int:
"""
The publicly accessible port of the resource which is designed for browser/client side usage.
"""
return pulumi.get(self, "public_port")
@property
@pulumi.getter(name="serverPort")
def server_port(self) -> int:
"""
The publicly accessible port of the resource which is designed for customer server side usage.
"""
return pulumi.get(self, "server_port")
@property
@pulumi.getter(name="sharedPrivateLinkResources")
def shared_private_link_resources(self) -> Sequence['outputs.SharedPrivateLinkResourceResponse']:
"""
The list of shared private link resources.
"""
return pulumi.get(self, "shared_private_link_resources")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.ResourceSkuResponse']:
"""
The billing information of the resource.(e.g. Free, Standard)
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Metadata pertaining to creation and last modification of the resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Tags of the service which is a list of key value pairs that describe the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def tls(self) -> Optional['outputs.SignalRTlsSettingsResponse']:
"""
TLS settings.
"""
return pulumi.get(self, "tls")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource - e.g. "Microsoft.SignalRService/SignalR"
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def upstream(self) -> Optional['outputs.ServerlessUpstreamSettingsResponse']:
"""
Upstream settings when the service is in server-less mode.
"""
return pulumi.get(self, "upstream")
@property
@pulumi.getter
def version(self) -> str:
"""
Version of the resource. Probably you need the same or higher version of client SDKs.
"""
return pulumi.get(self, "version")
class AwaitableGetSignalRResult(GetSignalRResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSignalRResult(
cors=self.cors,
disable_aad_auth=self.disable_aad_auth,
disable_local_auth=self.disable_local_auth,
external_ip=self.external_ip,
features=self.features,
host_name=self.host_name,
id=self.id,
identity=self.identity,
kind=self.kind,
location=self.location,
name=self.name,
network_acls=self.network_acls,
private_endpoint_connections=self.private_endpoint_connections,
provisioning_state=self.provisioning_state,
public_network_access=self.public_network_access,
public_port=self.public_port,
server_port=self.server_port,
shared_private_link_resources=self.shared_private_link_resources,
sku=self.sku,
system_data=self.system_data,
tags=self.tags,
tls=self.tls,
type=self.type,
upstream=self.upstream,
version=self.version)
def get_signal_r(resource_group_name: Optional[str] = None,
resource_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSignalRResult:
"""
A class represent a resource.
:param str resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param str resource_name: The name of the resource.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['resourceName'] = resource_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:signalrservice/v20210601preview:getSignalR', __args__, opts=opts, typ=GetSignalRResult).value
return AwaitableGetSignalRResult(
cors=__ret__.cors,
disable_aad_auth=__ret__.disable_aad_auth,
disable_local_auth=__ret__.disable_local_auth,
external_ip=__ret__.external_ip,
features=__ret__.features,
host_name=__ret__.host_name,
id=__ret__.id,
identity=__ret__.identity,
kind=__ret__.kind,
location=__ret__.location,
name=__ret__.name,
network_acls=__ret__.network_acls,
private_endpoint_connections=__ret__.private_endpoint_connections,
provisioning_state=__ret__.provisioning_state,
public_network_access=__ret__.public_network_access,
public_port=__ret__.public_port,
server_port=__ret__.server_port,
shared_private_link_resources=__ret__.shared_private_link_resources,
sku=__ret__.sku,
system_data=__ret__.system_data,
tags=__ret__.tags,
tls=__ret__.tls,
type=__ret__.type,
upstream=__ret__.upstream,
version=__ret__.version)
@_utilities.lift_output_func(get_signal_r)
def get_signal_r_output(resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetSignalRResult]:
"""
A class represent a resource.
:param str resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param str resource_name: The name of the resource.
"""
...
| [
"[email protected]"
] | |
c8705454f5b80ca5aca9c2228cd462665605112d | f8e03a0724516b7cc2299f6c7a8cef544fa32484 | /source/pic2card/mystique/group_design_objects.py | e59231d840bf9b34a839f234137c4999867a8772 | [
"MIT"
] | permissive | isabella232/AdaptiveCards | cc3904f0782bd94087ae0a0df0ee2db954facdde | 766750517196d05f4466941647e07a8a298257b2 | refs/heads/main | 2023-03-07T22:13:55.327587 | 2020-11-17T02:02:15 | 2020-11-17T02:02:15 | 313,699,024 | 0 | 0 | MIT | 2021-02-23T16:14:48 | 2020-11-17T17:51:17 | null | UTF-8 | Python | false | false | 21,437 | py | """Module for grouping deisgn objects into different containers"""
from operator import itemgetter
from typing import List, Dict, Callable, Tuple, Optional
from mystique import config
from mystique.extract_properties import CollectProperties
class GroupObjects:
"""
Handles the grouping of given list of objects for any set conditions that
is passed.
"""
def object_grouping(self, design_objects: List[Dict],
condition: Callable[[Dict, Dict],
bool]) -> List[List[Dict]]:
"""
Groups the given List of design objects for the any given condition.
@param design_objects: objects
@param condition: Grouping condition function
@return: Grouped list of design objects.
"""
groups = []
grouped_positions = []
for ctr1, design_object1 in enumerate(design_objects):
temp_list = []
for ctr2, design_object2 in enumerate(design_objects):
if condition(design_object1, design_object2):
present = False
present_position = -1
append_object = False
append_position = -1
for ctr, gr in enumerate(groups):
if design_object2 in gr:
present = True
present_position = ctr
if design_object1 in gr:
append_object = True
append_position = ctr
if not present and not append_object:
temp_list.append(design_object2)
grouped_positions.append(ctr2)
elif not present and append_object:
groups[append_position].append(design_object2)
grouped_positions.append(ctr2)
elif present and not append_object:
groups[present_position].append(design_object1)
grouped_positions.append(ctr1)
elif (present and append_object and
present_position != append_position):
groups[present_position] += groups[append_position]
del groups[append_position]
if temp_list:
groups.append(temp_list)
for ctr, design_object in enumerate(design_objects):
if ctr not in grouped_positions:
groups.append([design_object])
return groups
class ImageGrouping(GroupObjects):
"""
Groups the image objects of the adaptive card objects into a imagesets or
individual image objects.
"""
# Image objects within the 10px ymin range and 100px range difference are
# grouped into imagesets.
IMAGE_SET_YMIN_RANGE = 10.0
IMAGE_SET_X_RANGE = 100.0
def __init__(self, card_arrange):
self.card_arrange = card_arrange
def imageset_condition(self, design_object1: Dict,
design_object2: Dict) -> bool:
"""
Returns a condition boolean value for grouping image objects into
imagesets
@param design_object1: image object
@param design_object2: image object
@return: boolean value
"""
if design_object1.get("xmin") < design_object2.get("xmin"):
xmax = design_object1.get("xmax")
xmin = design_object2.get("xmin")
else:
xmax = design_object2.get("xmax")
xmin = design_object1.get("xmin")
ymin_diff = abs(
design_object1.get("ymin") - design_object2.get("ymin")
)
x_diff = abs(xmax - xmin)
return (ymin_diff <= self.IMAGE_SET_YMIN_RANGE
and x_diff <= self.IMAGE_SET_X_RANGE)
def group_image_objects(self, image_objects, body, objects, ymins=None,
is_column=None) -> [List, Optional[Tuple]]:
"""
Groups the image objects into imagesets which are in
closer ymin range.
@param image_objects: list of image objects
@param body: list card deisgn elements.
@param ymins: list of ymins of card design
elements
@param objects: list of all design objects
@param is_column: boolean value to check if an object is inside a
columnset or not
@return: List of remaining image objects after the grouping if the
grouping is done outside the columnset container
else returned list of remaining image objects along
with its coordinate values.
"""
# group the image objects based on ymin
groups = self.object_grouping(image_objects, self.imageset_condition)
delete_positions = []
design_object_coords = []
for group in groups:
group = [dict(t) for t in {tuple(d.items()) for d in group}]
# group = self.remove_duplicates(group)
if len(group) > 1:
group = sorted(group, key=lambda i: i["xmin"])
image_set = {
"type": "ImageSet",
"imageSize": "Auto",
"images": []
}
sizes = []
alignment = []
image_xmins = []
for ctr, design_object in enumerate(group):
index = objects.index(design_object)
if index not in delete_positions:
delete_positions.append(index)
sizes.append(design_object.get("size", "Auto"))
alignment.append(design_object.get(
"horizontal_alignment", "Left"))
image_xmins.append(design_object.get("xmin"))
self.card_arrange.append_objects(design_object,
image_set["images"])
image_set["images"] = [x for _, x in sorted(
zip(image_xmins,
image_set["images"]),
key=lambda x: x[0])]
# Assign the imageset's size and alignment property based on
# each image's alignment and size properties inside the imgaeset
image_set["imageSize"] = max(set(sizes), key=sizes.count)
preference_order = ["Left", "Center", "Right"]
if len(alignment) == len(list(set(alignment))):
alignment.sort(key=(preference_order + alignment).index)
image_set["horizontalAlignment"] = alignment[0]
else:
image_set["horizontalAlignment"] = max(set(alignment),
key=alignment.count)
image_set["coords"] = str(group[0].get("coords"))
body.append(image_set)
if ymins:
ymins.append(design_object.get("ymin"))
if is_column:
design_object_coords.append(group[0].get("xmin"))
design_object_coords.append(group[0].get("ymin"))
design_object_coords.append(group[0].get("xmax"))
design_object_coords.append(group[0].get("ymax"))
objects = [design_objects for ctr, design_objects in enumerate(objects)
if ctr not in delete_positions]
if is_column:
return objects, design_object_coords
else:
return objects
class ColumnsGrouping(GroupObjects):
"""
Groups the design objects into different columns of a columnset
"""
def __init__(self, card_arrange):
self.card_arrange = card_arrange
def horizontal_inclusive(self, object_one: Dict, object_two: Dict) -> bool:
"""
Returns the horizonral inclusive condition
@param object_one: design object one
@param object_two: design object two
@return: the boolean value of the inclusive condition
"""
return (((object_one and object_two) and (
(object_one.get("xmin") <= object_two.get(
"xmin") <= object_one.get(
"xmax") and object_one.get(
"xmin") <= object_two.get(
"xmax") <= object_one.get(
"xmax"))
or (object_two.get("xmin") <= object_one.get(
"xmin") <= object_two.get(
"xmax") <= object_one.get("xmax") and
object_two.get(
"xmax") <= object_one.get(
"xmax")
) or (object_one.get(
"xmin") <= object_two.get(
"xmin") <= object_one.get(
"xmax") <= object_two.get(
"xmax") and object_two.get(
"xmax") >= object_one.get("xmin")
))
) or ((object_two and object_one) and
((object_two.get("xmin")
<= object_one.get("xmin")
<= object_two.get("xmax")
and object_two.get("xmin")
<= object_one.get("xmax")
<= object_two.get("xmax"))
or (object_one.get("xmin")
<= object_one.get("xmin")
and object_one.get("xmax")
<= object_two.get("xmax")
and object_two.get("xmin")
<= object_one.get("xmax")
<= object_two.get("xmax"))
or (object_two.get("xmin")
<= object_one.get("xmin")
<= object_two.get("xmax")
<= object_one.get("xmax")
and object_one.get("xmax")
>= object_two.get("xmin"))))
)
def vertical_inclusive(self, object_one: Dict, object_two: Dict) -> bool:
"""
Returns the vertical inclusive condition
@param object_one: design object one
@param object_two: design object two
@return: the boolean value of the inclusive condition
"""
return (
((object_one and object_two) and
((object_one.get("ymin")
<= object_two.get("ymin") <= object_one.get("ymax")
and object_one.get("ymin") <= object_two.get("ymax")
<= object_one.get("ymax"))
or (object_two.get("ymin") <= object_one.get(
"ymin") <= object_two.get(
"ymax") <= object_one.get("ymax")
and object_two.get("ymax") <= object_one.get("ymax"))
or (object_one.get("ymin") <= object_two.get("ymin")
<= object_one.get("ymax") <= object_two.get("ymax")
and object_two.get("ymax") >= object_one.get("ymin"))
))
or ((object_two and object_one)
and ((object_two.get("ymin") <= object_one.get("ymin")
<= object_two.get("ymax") and object_two.get("ymin")
<= object_one.get("ymax") <= object_two.get("ymax"))
or (object_one.get("ymin") <= object_one.get("ymin")
and object_one.get("ymax")
<= object_two.get("ymax")
and object_two.get("ymin")
<= object_one.get("ymax")
<= object_two.get("ymax"))
or (object_two.get("ymin") <= object_one.get("ymin")
<= object_two.get("ymax")
<= object_one.get("ymax")
and object_one.get("ymax")
>= object_two.get("ymin"))
))
)
def max_min_difference(self, design_object1: Dict,
design_object2: Dict, way: str) -> float:
"""
Returns the ymax-ymin difference of the 2 deisgn objects
@param design_object1: design object one
@param design_object2: design object two
@param way: xmax-xmin or ymax-ymin difference
@return: rounded ymax-ymin difference
"""
max = "ymax"
min = "ymin"
if way == "x":
max = "xmax"
min = "xmin"
if design_object1.get(min) < design_object2.get(min):
return round(abs(design_object2.get(min) - design_object1.get(max)))
else:
return round(abs(design_object1.get(min) - design_object2.get(max)))
def columns_condition(self, design_object1: Dict,
design_object2: Dict) -> bool:
"""
Returns a condition boolean value for grouping objects into
columnsets
@param design_object1: design object
@param design_object2: design object
@return: boolean value
"""
y_diff = self.max_min_difference(design_object1, design_object2,
way="y")
object_one = None
object_two = None
if (design_object1.get("object") == "image"
and design_object2.get("object") != "image"):
object_one = design_object1
object_two = design_object2
elif (design_object2.get("object") == "image"
and design_object1.get("object") != "image"):
object_one = design_object2
object_two = design_object1
elif (design_object2.get("object") == "image"
and design_object1.get("object") == "image"):
object_one = design_object1
object_two = design_object2
return (design_object1 != design_object2 and (
(abs(design_object1.get("ymin", 0)
- design_object2.get("ymin", 0))
<= config.COLUMNSET_GROUPING.get("ymin_difference", ""))
or self.vertical_inclusive(object_one, object_two)
or (y_diff <
config.COLUMNSET_GROUPING.get("ymax-ymin_difference", "")
and self.horizontal_inclusive(object_one, object_two)
)))
def columns_row_condition(self, design_object1: Dict,
design_object2: Dict) -> bool:
"""
Returns a condition boolean value for grouping columnset grouped
objects into different columns.
@param design_object1: design object
@param design_object2: design object
@return: boolean value
"""
extract_properites = CollectProperties()
x_diff = self.max_min_difference(design_object1, design_object2,
way="x")
point1 = (design_object1.get("xmin"), design_object1.get("ymin"),
design_object1.get("xmax"), design_object1.get("ymax"))
point2 = (design_object2.get("xmin"), design_object2.get("ymin"),
design_object2.get("xmax"), design_object2.get("ymax"))
if design_object1.get("ymin") < design_object2.get("ymin"):
object_one = design_object1
object_two = design_object2
else:
object_one = design_object2
object_two = design_object1
condition = (design_object1 != design_object2
and ((design_object1.get("object") == "image"
and design_object2.get("object") == "image"
and abs(design_object1.get("ymin")
- design_object2.get("ymin"))
<= config.COLUMNSET_GROUPING.get("ymin_difference")
and x_diff <= config.COLUMNSET_GROUPING.get(
"xmax-xmin_difference", ""))
or self.horizontal_inclusive(object_one, object_two)
)
)
intersection = extract_properites.find_iou(point1, point2,
columns_group=True)[0]
if intersection and point1 != point2:
condition = condition and (
intersection
and (
(object_one.get("xmin") <=
object_two.get("xmin") <= object_one.get("xmax")
and object_one.get("xmin") <=
object_two.get("xmax") <= object_one.get("xmax")
)
or (object_two.get("xmin") <=
object_one.get("xmin") <= object_two.get("xmax")
and object_two.get("xmin") <=
object_one.get("xmax") <= object_two.get("xmax")
)
)
)
return condition
class ChoicesetGrouping(GroupObjects):
"""
Groups the radiobutton objects of the adaptive card objects into a
choiceset or individual radiobuttion objects.
"""
# The design objects are grouped in choicesets based on 2 conditions:
# If the radiobuttons are within the range of 10px of ymax - ymin
# If the radiobuttons are within the rnage of 30px of ymins.
CHOICESET_Y_RANGE = 10
CHOICESET_YMIN_RANGE = 30
def __init__(self, card_arrange):
self.card_arrange = card_arrange
def choiceset_condition(self, design_object1: Dict,
design_object2: Dict) -> bool:
"""
Returns a condition boolean value for grouping radio buttion objects
into choiceset
@param design_object1: image object
@param design_object2: image object
@return: boolean value
"""
design_object1_ymin = float(design_object1.get("ymin"))
design_object2_ymin = float(design_object2.get("ymin"))
difference_in_ymin = abs(design_object1_ymin - design_object2_ymin)
if design_object1_ymin > design_object2_ymin:
difference_in_y = float(
design_object2.get("ymax")) - design_object1_ymin
else:
difference_in_y = float(
design_object1.get("ymax")) - design_object2_ymin
return (abs(difference_in_y) <= self.CHOICESET_Y_RANGE
and difference_in_ymin <= self.CHOICESET_YMIN_RANGE)
def group_choicesets(self, radiobutton_objects: Dict, body: List[Dict],
ymins=None) -> None:
"""
Groups the choice elements into choicesets based on
the closer ymin range
@param radiobutton_objects: list of individual choice
elements
@param body: list of card deisgn elements
@param ymins: list of ymin of deisgn elements
"""
groups = []
radio_buttons = []
if isinstance(radiobutton_objects, dict):
for key, values in radiobutton_objects.items():
radio_buttons.append(values)
radiobutton_objects = radio_buttons
if len(radiobutton_objects) == 1:
# radiobutton_objects = [radiobutton_objects]
groups = [radiobutton_objects]
if not groups:
groups = self.object_grouping(radiobutton_objects,
self.choiceset_condition)
for group in groups:
group = sorted(group, key=itemgetter("ymin"))
choice_set = {
"type": "Input.ChoiceSet",
"choices": [],
"style": "expanded"
}
alignment = []
for design_object in group:
self.card_arrange.append_objects(design_object,
choice_set["choices"])
alignment.append(design_object.get("horizontal_alignment",
"Left"))
preference_order = ["Left", "Center", "Right"]
if len(alignment) == len(list(set(alignment))):
alignment.sort(key=(preference_order + alignment).index)
choice_set["horizontalAlignment"] = alignment[0]
else:
choice_set["horizontalAlignment"] = max(set(alignment),
key=alignment.count)
body.append(choice_set)
if ymins is not None and len(group) > 0:
ymins.append(design_object.get("ymin"))
| [
"[email protected]"
] | |
fedf9a4adf68e18a4d492e204426b009e4c03540 | 299c07abf832ba8b0a4181c526f95d6f861c3623 | /pages/views.py | 483e3caba3bab343c52bb5dfe15734215146eb40 | [] | no_license | ananyajana/hello-world | 37640880b8df2b170a4d64a7893eced35cf07293 | c498ec70016e22978f2c3f0365d6a38522254b72 | refs/heads/master | 2023-08-14T22:52:25.634216 | 2020-05-25T11:38:28 | 2020-05-25T11:38:28 | 266,763,806 | 0 | 0 | null | 2021-09-22T19:09:50 | 2020-05-25T11:39:56 | Python | UTF-8 | Python | false | false | 166 | py | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def homePageView(request):
return HttpResponse('Hello, World!')
| [
"[email protected]"
] | |
5504d599f5231dfb970d783217327010a3757c72 | e5e2b7da41fda915cb849f031a0223e2ac354066 | /sdk/python/pulumi_azure_native/netapp/v20201201/snapshot.py | 5dc6e64c2d7c5f24cdb196fcb956b80495f2cc6e | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | johnbirdau/pulumi-azure-native | b7d3bdddeb7c4b319a7e43a892ddc6e25e3bfb25 | d676cc331caa0694d8be99cb90b93fa231e3c705 | refs/heads/master | 2023-05-06T06:48:05.040357 | 2021-06-01T20:42:38 | 2021-06-01T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,959 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = ['SnapshotArgs', 'Snapshot']
@pulumi.input_type
class SnapshotArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
pool_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
volume_name: pulumi.Input[str],
location: Optional[pulumi.Input[str]] = None,
snapshot_name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Snapshot resource.
:param pulumi.Input[str] account_name: The name of the NetApp account
:param pulumi.Input[str] pool_name: The name of the capacity pool
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] volume_name: The name of the volume
:param pulumi.Input[str] location: Resource location
:param pulumi.Input[str] snapshot_name: The name of the mount target
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "pool_name", pool_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "volume_name", volume_name)
if location is not None:
pulumi.set(__self__, "location", location)
if snapshot_name is not None:
pulumi.set(__self__, "snapshot_name", snapshot_name)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
The name of the NetApp account
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="poolName")
def pool_name(self) -> pulumi.Input[str]:
"""
The name of the capacity pool
"""
return pulumi.get(self, "pool_name")
@pool_name.setter
def pool_name(self, value: pulumi.Input[str]):
pulumi.set(self, "pool_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="volumeName")
def volume_name(self) -> pulumi.Input[str]:
"""
The name of the volume
"""
return pulumi.get(self, "volume_name")
@volume_name.setter
def volume_name(self, value: pulumi.Input[str]):
pulumi.set(self, "volume_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="snapshotName")
def snapshot_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the mount target
"""
return pulumi.get(self, "snapshot_name")
@snapshot_name.setter
def snapshot_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_name", value)
class Snapshot(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
pool_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
snapshot_name: Optional[pulumi.Input[str]] = None,
volume_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Snapshot of a Volume
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the NetApp account
:param pulumi.Input[str] location: Resource location
:param pulumi.Input[str] pool_name: The name of the capacity pool
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] snapshot_name: The name of the mount target
:param pulumi.Input[str] volume_name: The name of the volume
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SnapshotArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Snapshot of a Volume
:param str resource_name: The name of the resource.
:param SnapshotArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SnapshotArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
pool_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
snapshot_name: Optional[pulumi.Input[str]] = None,
volume_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SnapshotArgs.__new__(SnapshotArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["location"] = location
if pool_name is None and not opts.urn:
raise TypeError("Missing required property 'pool_name'")
__props__.__dict__["pool_name"] = pool_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["snapshot_name"] = snapshot_name
if volume_name is None and not opts.urn:
raise TypeError("Missing required property 'volume_name'")
__props__.__dict__["volume_name"] = volume_name
__props__.__dict__["created"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["snapshot_id"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:netapp/v20201201:Snapshot"), pulumi.Alias(type_="azure-native:netapp:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20170815:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20170815:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20190501:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20190501:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20190601:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20190601:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20190701:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20190701:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20190801:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20190801:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20191001:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20191001:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20191101:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20191101:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20200201:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20200201:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20200301:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20200301:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20200501:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20200501:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20200601:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20200601:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20200701:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20200701:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20200801:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20200801:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20200901:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20200901:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20201101:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20201101:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20210201:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20210201:Snapshot"), pulumi.Alias(type_="azure-native:netapp/v20210401preview:Snapshot"), pulumi.Alias(type_="azure-nextgen:netapp/v20210401preview:Snapshot")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Snapshot, __self__).__init__(
'azure-native:netapp/v20201201:Snapshot',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Snapshot':
"""
Get an existing Snapshot resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = SnapshotArgs.__new__(SnapshotArgs)
__props__.__dict__["created"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["snapshot_id"] = None
__props__.__dict__["type"] = None
return Snapshot(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def created(self) -> pulumi.Output[str]:
"""
The creation date of the snapshot
"""
return pulumi.get(self, "created")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
Azure lifecycle management
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="snapshotId")
def snapshot_id(self) -> pulumi.Output[str]:
"""
UUID v4 used to identify the Snapshot
"""
return pulumi.get(self, "snapshot_id")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
| [
"[email protected]"
] | |
e1ca10f66fe6e4a01b92ace526335679d0427751 | 42f4238073a70d1494537f8c8b07835b531e73a9 | /benchmarks/beach/redist_beach_erosion_board_waves_3d_c0p1_n.py | e0d724264d63efa1c4516fe87fb96968f2ac296f | [] | no_license | erdc/proteus-mprans | bd99257af7b3bbe08386533faf072dba22e93a61 | f8f4d20bc870b361c64c8ca2ceb99f045b373323 | refs/heads/master | 2022-09-11T13:18:39.973962 | 2022-08-11T16:27:29 | 2022-08-11T16:27:29 | 2,303,947 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,767 | py | from proteus import *
from proteus.default_n import *
from redist_beach_erosion_board_waves_3d_p import *
from beach_erosion_board_waves_3d import *
if rdtimeIntegration == 'newton':
timeIntegration = NoIntegration
stepController = Newton_controller
elif rdtimeIntegration == 'tte':
timeIntegration = BackwardEuler_cfl
timeIntegration = PsiTCtte
elif rdtimeIntegration == 'osher-fmm':
timeIntegration = BackwardEuler_cfl
stepController = Osher_FMM_controller
runCFL=1.0
else:
timeIntegration = BackwardEuler_cfl
stepController = Osher_PsiTC_controller
#stepController = Osher_controller
runCFL=1.0
# timeIntegration = PsiTCtte
# stepController = PsiTCtte_controller
# rtol_res[0] = 0.0
# atol_res[0] = 0.1*L[0]/(nn-1.0)#10% of he
#runCFL=1.0
#DT=None
if spaceOrder == 1:
femSpaces = {0:C0_AffineLinearOnSimplexWithNodalBasis}
if spaceOrder == 2:
femSpaces = {0:C0_AffineQuadraticOnSimplexWithNodalBasis}
elementQuadrature = SimplexGaussQuadrature(nd,sloshbox_quad_order)
elementBoundaryQuadrature = SimplexGaussQuadrature(nd-1,sloshbox_quad_order)
subgridErrorType = HamiltonJacobi_ASGS
if LevelModelType == RDLS.LevelModel:#RDLSV2.OneLevelRDLSV2 and not RDLSV2.debugRDLS:
subgridErrorType = HamiltonJacobi_ASGS_opt
if rdtimeIntegration == 'newton':
subgridError = subgridErrorType(coefficients,nd,stabFlag='2',lag=False)
else:
subgridError = subgridErrorType(coefficients,nd,stabFlag='2',lag=True)
#subgridError = HamiltonJacobi_ASGS(coefficients,nd,lag=True)
shockCapturing = None
#shockCapturing = ResGrad_SC(coefficients,nd,shockCapturingFactor=0.9,lag=False)
if rdtimeIntegration == 'newton':
shockCapturing = ResGradQuad_SC(coefficients,nd,shockCapturingFactor=rd_shockCapturingFactor,lag=False)
else:
shockCapturing = ResGradQuad_SC(coefficients,nd,shockCapturingFactor=rd_shockCapturingFactor,lag=True)
massLumping = False
#multilevelNonlinearSolver = MultilevelEikonalSolver
#levelNonlinearSolver = UnstructuredFMMandFSWsolvers.FMMEikonalSolver
multilevelNonlinearSolver = NLNI
levelNonlinearSolver = Newton
if rdtimeIntegration != 'newton':
maxLineSearches = 0
nonlinearSmoother = NLGaussSeidel
fullNewtonFlag = True
#this needs to be set appropriately for pseudo-transient
tolFac = 0.0
nl_atol_res = 0.01*L[0]/nn
atol_res[0] = 1.0e-6 #for pseudo transient
rtol_res[0] = 0.0
numericalFluxType = DoNothing
maxNonlinearIts = 50 #1 for PTC
matrix = SparseMatrix
if usePETSc:
numericalFluxType = DoNothing
multilevelLinearSolver = PETSc
levelLinearSolver = PETSc
else:
multilevelLinearSolver = LU
levelLinearSolver = LU
linearSmoother = GaussSeidel
linTolFac = 0.001
conservativeFlux = None
| [
"[email protected]"
] | |
2c730e56bc9a5b4983cd6e076bc899c0964737a6 | 58e15fca047ece58bfa391a6efb88ec1b46997b2 | /venv/lib/python3.8/site-packages/selenium/webdriver/common/actions/wheel_input.py | 64bb6db22df1194f5b148b59bd4fabff2289bdc4 | [] | no_license | su2708/WebScraper | 6594f102336de0bb0ebc2c3feafdb4d87da52ad3 | a084afce4a8dacb4d4beb43924aa85a831c3b4ec | refs/heads/main | 2023-02-20T15:43:37.246448 | 2023-01-15T17:26:28 | 2023-01-15T17:26:28 | 323,025,342 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | /home/runner/.cache/pip/pool/f6/5c/a4/99012d79ffc30cf1b681f61f329c7ebd8c280d8c5fcc537385cf91bd63 | [
"[email protected]"
] | |
9dca95f0eadc9b7576cb73579313ffa2ab36aaa3 | 444670e6d73ae9d95c0bb0459c8e02423876d2fb | /pycharm/LoginSite/mylogin/migrations/0001_initial.py | 08c4cb3c5cfd13d3c86c5e92dc2a59b4d175f342 | [] | no_license | zhangxingxing12138/web-pycharm | c8b6822be95bfb904f81f772185fe9e17fc77fc3 | 5f212e6805b0734aa3c791830526a95b24a930f4 | refs/heads/master | 2020-04-04T18:03:45.458309 | 2018-11-08T12:03:51 | 2018-11-08T12:03:51 | 156,148,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,056 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-11-06 00:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, unique=True)),
('password', models.CharField(max_length=256)),
('email', models.EmailField(max_length=254, unique=True)),
('sex', models.CharField(choices=[('male', '男'), ('female', '女')], default='男', max_length=32)),
('c_time', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ['-c_time'],
'verbose_name': '用户',
'verbose_name_plural': '用户',
},
),
]
| [
"[email protected]"
] | |
450e45abb2e6f78121b9289dfc49ce668ece839a | 5fa293d0ef6f3bdc4791d869cf503b107cc3a5fb | /soap_client/urls.py | 5e70df6a51ac6d70d04e1a6e01da2fd63ec1b6aa | [
"MIT"
] | permissive | alldevic/nav_info | 0779ab116dd7718ac1d63fecfbc2d47dd8863c22 | 32681d1cd3ad43472c8f7fb49922094c4045111c | refs/heads/master | 2023-08-27T00:23:57.233994 | 2021-11-05T15:24:48 | 2021-11-05T15:24:48 | 278,404,502 | 0 | 0 | MIT | 2021-11-05T15:24:49 | 2020-07-09T15:37:41 | Python | UTF-8 | Python | false | false | 317 | py | from django.urls import path, include
from rest_framework.routers import DefaultRouter
from soap_client import views
router = DefaultRouter()
router.register('raw', views.RawViewSet, basename='raw')
router.register('data', views.DataViewSet, basename='data')
urlpatterns = [
path('', include(router.urls)),
]
| [
"[email protected]"
] | |
1a71d1d48c8c1e7899c78ae5ffffd819170fff8b | 0c5fed6415f7a307d0885d7579969d4e8f0121c8 | /Assignements/MT17143_Assignment5&6/MT17143_Problem1.py | fb5a51bc4753c66cf95906fd1944be7a9261bf8c | [] | no_license | akshitasawhney3008/Machine-Learning | 4032dfb3efaa7fdeb794913bb30e39f7a2fece31 | bef1672ecb51e07b291349af9df219d2bfea8f2d | refs/heads/master | 2023-02-02T16:22:01.016505 | 2020-12-10T10:37:49 | 2020-12-10T10:37:49 | 320,232,766 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,121 | py | #MT17143 Akshita Sawhney
#Problem 1 RNA Sequencing
from sklearn.metrics import accuracy_score
from sklearn.cross_validation import train_test_split
from sklearn.cluster import KMeans
from sklearn.cluster import MiniBatchKMeans
from sklearn.cluster import SpectralClustering
from sklearn.manifold import TSNE
from sklearn.decomposition import PCA
import matplotlib.pyplot as plt
import numpy as np
import math
montpick= open("Read_count_rattus.txt",'r')
matrix = []
read_file = montpick.readlines()
for line in read_file: #file is extracted in a 2D matrix
row = []
list_of_words = line.split()
for i in range(1, len(list_of_words)):
row.append(int(list_of_words[i]))
matrix.append(row)
#Normalization
trc = 0 # total read count is calculated
for l in matrix:
for el in l:
trc+=el
sum=0
count=0
# print(len(matrix[1]))
for i in range(len(matrix[0])): # Sum of each column is calculated
column_sum = 0
for l in matrix:
column_sum += l[i]
sum+=column_sum
sum=sum/len(matrix[0])
for l in matrix: #Each readcount value is divided by the total read count
for i in range(len(l)):
div = float(l[i]/trc)
l[i]=div
for l in matrix: #Each readcount value is then multiplied by the sum of columns
for i in range(len(l)):
l[i]= float(l[i] * sum)
#Log Transform
for l in matrix:
for i in range(len(l)):
l[i]=math.log(1+l[i],2)
# print(matrix)
# print("hi")
input_matrix = np.array(matrix)
# print(M)
#The actual data matrix is extracted from the phenodata which acts as the true data.
phenotype = []
phenodata = open("Phenotype.txt",'r')
lines= phenodata.readlines()
for l in lines:
phen = l.split()
phenotype.append(int(phen[0]))
# phenotype1 = phenotype[1:]
true_matrix= np.array(phenotype)
#Input Data is split into Train and Test set with test size to be 33%
X_train, X_test, y_train, y_test = train_test_split(np.transpose(input_matrix),true_matrix,test_size=0.33)
#Kmeans Clustering is performed
kmeans=KMeans(n_clusters=2, random_state= 0).fit(X_train)
kmean_prediction = kmeans.predict(X_test) #Test data is passed to check the results.
print(accuracy_score(y_test,kmean_prediction)*100) # Accuracy of the predicted output with true data is taken out.
X_train, X_test, y_train, y_test = train_test_split(np.transpose(input_matrix),true_matrix,test_size=0.33)
#MiniBatchKmeans clustering is performed
Minibatchkmeans = MiniBatchKMeans(n_clusters=2, random_state= 0).fit(X_train)
minibatchkmean_prediction = Minibatchkmeans.predict(X_test)
print(accuracy_score(y_test,minibatchkmean_prediction)*100)
#Principle Component Analysis is performed to reduce the input data to 2Dimensional data.
pca = PCA(n_components=2).fit_transform(np.transpose(input_matrix))
# pca_fit = pca.fit(np.transpose(input_matrix))
y_trans = np.transpose(true_matrix)
plt.scatter(pca[:, 0], pca[:, 1], y_trans.shape[0], c = y_trans) #Scatter is used to visualize the graph
plt.show() | [
"[email protected]"
] | |
b3953b62fa3db6523934640dd6efa14908a3bbea | c5744c2fda48ae6a79c155c641fe98021a0cb7f3 | /PP4E/System/setenv.py | a03d7c04233f64c0efbc11ad24b5bc1eaace0f37 | [] | no_license | skinkie/Scripts | e0fd3d3f767612ade111f28bc7af3e1b25fc2947 | 80a1ba71ddf9a0c5ff33866832cb5c42aca0c0b1 | refs/heads/master | 2021-05-31T16:57:21.100919 | 2016-05-23T09:58:59 | 2016-05-23T09:58:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 299 | py | #!/usr/bin/env python
# eg. 3-3
import os
print('setenv...', end=' ')
print(os.environ['USER'])
os.environ['USER'] = 'Brian'
os.system('python echoenv.py')
os.environ['USER'] = 'Arthur'
os.system('python echoenv.py')
os.environ['USER'] = input('?')
print(os.popen('python3.5 echoenv.py').read()) | [
"[email protected]"
] | |
fb602658c47b01a30ff2c7ae2b51eec8b1d10712 | faf2b052e7f8cd79467ad34fbc173f3bf4b1a21d | /test/test_ws_equipment_list_result.py | 7230f7e81388c99a49ad58906a36598acb75af45 | [] | no_license | atbe/MSU-Scholar-Api-Client-Python | 5d39577ce07ab285f0df9ee58a1ed7ff8ab08d2a | 31b263e5ad848fc6593c4662fbf2828ab9e2594c | refs/heads/master | 2021-05-04T00:51:46.720474 | 2018-03-12T23:52:17 | 2018-03-12T23:52:17 | 120,354,270 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,853 | py | # coding: utf-8
"""
PURE API 510
This is the Pure Web Service. Listed below are all available endpoints, along with a short description.<br/>In order to use the Pure Web Service, you must enter an API key. These are generated in the Administrator tab of Pure, and issues with a given set of available endpoints.<br/>To enter your API key and begin your use, press the Authorize button to at the top of the page. You are then presented with two options for entering the API key: the first option is to use the API key in query format, and the second option is to use the API key in a header.<br/> For further documentation, see <a href=\"documentation/Default.htm\">API Documentation</a>.<br/>A new version of the API is released with each major version of Pure, and remains available for one year. This version is no longer available in Pure 5.14<br/>The old web service is deprecated, but still available <a href=\"../../../doc/\">here</a>, and it will no longer be available in Pure 5.13
OpenAPI spec version: 510
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import msu_scholars_api
from msu_scholars_api.rest import ApiException
from msu_scholars_api.models.ws_equipment_list_result import WSEquipmentListResult
class TestWSEquipmentListResult(unittest.TestCase):
""" WSEquipmentListResult unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testWSEquipmentListResult(self):
"""
Test WSEquipmentListResult
"""
# FIXME: construct object with mandatory attributes with example values
#model = msu_scholars_api.models.ws_equipment_list_result.WSEquipmentListResult()
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
dc68438bc369293cbf262f10722059dbc20ee2e8 | 57094f0d09fd3e74eeb511e94400c3ec97051ad3 | /Quax_dev_archive/quax_misc/angular_momentum/tensor_approach/contracted/contracted_overlap.py | 1d41836be55f7685a172233f95c91a9581ffd5c8 | [] | no_license | adabbott/Research_Notes | cccba246e81065dc4a663703fe225fc1ebbf806b | 644394edff99dc6542e8ae6bd0ce8bcf158cff69 | refs/heads/master | 2023-05-12T20:26:58.938617 | 2021-06-02T17:15:35 | 2021-06-02T17:15:35 | 119,863,228 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,501 | py | import jax
import jax.numpy as np
import numpy as onp
from jax.config import config; config.update("jax_enable_x64", True)
np.set_printoptions(precision=10)
onp.set_printoptions(precision=10)
def double_factorial(n):
'''The double factorial function for small Python integer `n`.'''
return np.prod(np.arange(n, 1, -2))
@jax.jit
def odd_double_factorial(x): # this ones jittable, roughly equal speed
n = (x + 1)/2
return 2**n * np.exp(jax.scipy.special.gammaln(n + 0.5)) / (np.pi**(0.5))
@jax.jit
def normalize(aa,ax,ay,az):
'''
Normalization constant for gaussian basis function.
aa : orbital exponent
ax : angular momentum component x
ay : angular momentum component y
az : angular momentum component z
'''
#f = np.sqrt(double_factorial(2*ax-1) * double_factorial(2*ay-1) * double_factorial(2*az-1))
f = np.sqrt(odd_double_factorial(2*ax-1) * odd_double_factorial(2*ay-1) * odd_double_factorial(2*az-1))
N = (2*aa/np.pi)**(3/4) * (4 * aa)**((ax+ay+az)/2) / f
return N
@jax.jit
def overlap_ss(A, B, alpha_bra, alpha_ket):
ss = ((np.pi / (alpha_bra + alpha_ket))**(3/2) * np.exp((-alpha_bra * alpha_ket * np.dot(A-B,A-B)) / (alpha_bra + alpha_ket)))
return ss
#@jax.jit
def contracted_normalize(exponents,coeff,ax,ay,az):
'''Normalization constant for a single contracted gaussian basis function'''
K = exponents.shape[0] # Degree of contraction K
L = ax + ay + az # Total angular momentum L
# all possible combinations of ci * cj
c_times_c = np.outer(coeff,coeff)
# all possible combinations of alphai + alphaj
a_plus_a = np.broadcast_to(exponents, (K,K)) + np.transpose(np.broadcast_to(exponents, (K,K)), (1,0))
prefactor = (np.pi**(1.5) * double_factorial(2*ax-1) * double_factorial(2*ay-1) * double_factorial(2*az-1)) / 2**L
#prefactor = (np.pi**(1.5) * odd_double_factorial(2*ax-1) * odd_double_factorial(2*ay-1) * odd_double_factorial(2*az-1)) / 2**L
sum_term = np.sum(c_times_c / (a_plus_a**(L + 1.5)))
return (prefactor * sum_term) ** -0.5
@jax.jit
def contracted_overlap_ss(A, B, alpha_bra, alpha_ket, c_bra, c_ket):
size = alpha_bra.shape[0]
AB = np.dot(A-B,A-B)
# rather than looping over all primitive combinations, vectorize by expanding data into arrays
# all possible combinations of c_bra * c_ket
c_times_c = np.outer(c_bra,c_ket)
# all possible combinations of alpha_bra * alpha_ket
a_times_a = np.outer(alpha_bra,alpha_ket)
# all possible combinations of alpha_bra + alpha_ket
a_plus_a = np.outer(alpha_bra, np.ones_like(alpha_ket)) + np.transpose(np.outer(alpha_ket, np.ones_like(alpha_bra)))
ss = np.sum((np.pi / a_plus_a)**(1.5) * np.exp(-a_times_a * AB / a_plus_a) * c_times_c)
return ss
geom = np.array([[0.0,0.0,-0.849220457955],
[0.0,0.0, 0.849220457955]])
charge = np.array([1.0,1.0])
A = np.array([0.0,0.0,-0.849220457955])
B = np.array([0.0,0.0, 0.849220457955])
# This is a basis function
exps = np.array([0.5,
0.5])
coeffs = np.array([1.00,
1.00])
# Bake the normalizations into the coefficients, like Psi4
primitive_norms = jax.vmap(normalize)(exps, np.array([0,0]), np.array([0,0]),np.array([0,0]))
ang_mom_x, ang_mom_y, ang_mom_z = np.zeros_like(exps), np.zeros_like(exps), np.zeros_like(exps)
# Use vmap to auto vectorize the primitve normalization function
primitive_norms = jax.vmap(normalize)(exps, ang_mom_x, ang_mom_y, ang_mom_z)
coeffs = coeffs * primitive_norms
contracted_norm = contracted_normalize(exps, coeffs, 0, 0, 0)
coeffs = coeffs * contracted_norm
@jax.jit
def overlap_ps_block(A, B, alpha_bra, alpha_ket):
oot_alpha_bra = 1 / (2 * alpha_bra)
return oot_alpha_bra * jax.jacrev(overlap_ss,0)(A,B,alpha_bra,alpha_ket)
@jax.jit
def overlap_pp_block(A, B, alpha_bra, alpha_ket):
# We are promoting the ket, so the factor is the ket exponent
oot_alpha_ket = 1 / (2 * alpha_ket)
# No second term, ai is 0 since we are promoting the ket and theres no AM in the ket.
return oot_alpha_ket * (jax.jacfwd(overlap_ps_block, 1)(A,B,alpha_bra,alpha_ket))
# Try (p|p) This is a basis function
exp1 = np.array([0.5,
0.4])
exp2 = np.array([0.5,
0.4])
#coeffs1 = np.array([1.00,
# 1.00])
#coeffs2 = np.array([1.00,
# 1.00])
#
#N1 = contracted_normalize(exp1, coeffs1, 1, 0, 0)
#N2 = contracted_normalize(exp2, coeffs2, 0, 0, 0)
full_c = 0.5993114751532237 * 0.4534350390443813
primitive_1 = overlap_pp_block(A,B, 0.5, 0.4)
primitive_2 = overlap_pp_block(A,B, 0.5, 0.4)
print(full_c * primitive_1)
print(full_c * primitive_2)
print(primitive_1 + primitive_2)
full_c = 0.30081673988809127 * 0.2275959260774826
#print(
print(full_c * (primitive_1 + primitive_2))
#c = 0.15905414575341015
#c = 0.22671751952219066
#print(c * primitive_1 + c * primitive_2)
#print("EHHHH")
# Compute all unnormalized primitives
test_func = jax.vmap(overlap_pp_block, in_axes=(None,None,0,0))
test = test_func(A, B, exp1, exp2)
#print('og')
#print(test)
#print('sum')
#print(test[0] + test[1])
#print('sum, double, * c**2')
#c = .22671751952219066
#print( 2 * (test[0] + test[1]) * c**2)
#c1 = 0.30081673988809127
#c2 = 0.2275959260774826
#print((c1 * c1 * test[0] + c2 * c2 * test[1]))
#print((c1 * test[0] + c2 * test[1]))
#print(( test[0] + test[1]) * c1 * c2)
#erd_c1 = 0.21103860739153443
#erd_c2 = 0.15967039369300745
#print((erd_c1 * test[0] + erd_c2 * test[1])) #* c1 * c2)
#print('sum, 1/2alpha, * c**2')
#print( 1/(2*exp1[0]) * (test[0] + test[1]) * c**2)
#print(test * 0.29965573757661185)
#print(test * 0.29965573757661185 * 0.5)
#jprint(test * 0.29965573757661185**2 )
#primitive_norm = normalize(0.5, 1,0,0)
#print(primitive_norm)
#print(contracted_normalize(exp1,primitive_norm, 1,0,0))
coeffs1 = np.array([0.30081673988809127,
0.2275959260774826])
coeffs2 = np.array([0.21238156276178832,
0.17965292907913089])
# Now contract
#print( test[0] * coeffs1[0] + test[1] * coeffs1[1])
#print( test[1] * coeffs1[0] + test[0] * coeffs1[1])
#print(test * coeffs1[0] + test * coeffs2[1])
#
#print(0.29965573757661185 * (test[0] + test[1]))
#print(0.29965573757661185**2 * (test[0] + test[1]))
#print(coeffs1[0] * coeffs[1] test[0] + test[1]))
#TODO temp
#coeffs1 = np.array([0.30081673988809127,
# 0.2275959260774826])
#coeffs2 = np.array([0.21238156276178832,
# 0.17965292907913089])
#
## This is the correct (px|s)
#N1 = contracted_normalize(exp1, coeffs1, 1, 0, 0)
#N2 = contracted_normalize(exp2, coeffs2, 0, 0, 0)
#test = contracted_overlap_ps_block(A,B,exp1,exp2,coeffs1,coeffs2)
#print(test)
#print(test * N1 * N2)
#vectorized_overlap_ps_block = jax.vmap(overlap_ps_block, in_axes=(None,None, 0, 0))
#c_o = vectorized_overlap_ps_block(A, B, exps, exps)
#print(c_o)
#print(c_o * 0.5993114751532237 * 0.4237772081237576) # Coef's from Psi4
#print(overlap_pp_block(A,B,0.5,0.5))
#vectorized_overlap_pp_block = jax.vmap(overlap_pp_block, in_axes=(None,None, 0, 0))
#c_o = vectorized_overlap_pp_block(A, B, exps, exps)
#print(c_o)
#print(coeffs)
#coeffs = np.tile(coeffs,3).reshape(2,3)
#print(c_o * coeffs)
#print("Raw normalization constant")
#print(tmp_N)
#print("normalization constant times coefficients")
#print(tmp_N * coeffs)
#print("Raw overlap")
#print(c_o)
#print("normalized overlap")
#print(tmp_N * tmp_N * c_o)
#print(tmp_N * c_o)
s_N = 0.4237772081237576
p_N = 0.5993114751532237
d_N = 0.489335770373359
## (s|s)
#print(s_N * s_N * overlap_ss(A,B,alpha_bra,alpha_ket)) # YUP
## (p|s)
#print(p_N * s_N * overlap_ps_block(A,B,alpha_bra,alpha_ket)) # YUP
## (p|p)
#print(p_N * p_N * overlap_pp_block(A,B,alpha_bra,alpha_ket)) # YUP
## (d|s)
#print(d_N * s_N * overlap_ds_block(A,B,alpha_bra,alpha_ket)) # YUP
## (d|p)
#print(d_N * p_N * overlap_dp_block(A,B,alpha_bra,alpha_ket).reshape(6,3)) # YUP
## (d|d)
#print(d_N * d_N * overlap_dd_block(A,B,alpha_bra,alpha_ket))
#print('hard coded')
#print(overlap_ps_block(A,B,alpha_bra,alpha_ket))
#print('hard coded')
#print(overlap_pp_block(A,B,alpha_bra,alpha_ket))
#print('hard coded')
#print(overlap_ds_block(A,B,alpha_bra,alpha_ket))
#overlap_dp_block(A,B,alpha_bra,alpha_ket)
#dd_block = overlap_dd_block(A,B,alpha_bra,alpha_ket)
#print(dd_block * 0.489335770373359)
#for i in range(1000):
# overlap_pp_block(A,B,alpha_bra,alpha_ket)
@jax.jit
def overlap_ps_block(A, B, alpha_bra, alpha_ket):
oot_alpha_bra = 1 / (2 * alpha_bra)
return oot_alpha_bra * jax.jacrev(overlap_ss,0)(A,B,alpha_bra,alpha_ket)
@jax.jit
def overlap_sp_block(A, B, alpha_bra, alpha_ket): # not really needed is it?
oot_alpha_bra = 1 / (2 * alpha_bra)
return oot_alpha_bra * jax.jacrev(overlap_ss,1)(A,B,alpha_bra,alpha_ket)
@jax.jit
def overlap_pp_block(A, B, alpha_bra, alpha_ket):
# We are promoting the ket, so the factor is the ket exponent
oot_alpha_ket = 1 / (2 * alpha_ket)
# No second term, ai is 0 since we are promoting the ket and theres no AM in the ket.
return oot_alpha_ket * (jax.jacfwd(overlap_ps_block, 1)(A,B,alpha_bra,alpha_ket))
#@jax.jit
#def overlap_ds_block(A,B,alpha_bra,alpha_ket):
# # We are promoting the bra a second time, factor is bra exponent
# oot_alpha_bra = 1 / (2 * alpha_bra)
# # # This is of shape (3,3) all dij combos symmetric matrix # Thus a_i factor has to be 3x3 identity, so that only
# return oot_alpha_bra * (jax.jacfwd(overlap_ps_block, 0)(A,B,alpha_bra,alpha_ket) + np.eye(3) * overlap_ss(A,B,alpha_bra,alpha_ket))
@jax.jit
def overlap_ds_block(A,B,alpha_bra,alpha_ket):
'''
Returns a 1x6 array:
(dxx,s) (dxy,s) (dxz,s) (dyy,s) (dyz,s) (dzz,s)
'''
# We are promoting the bra a second time, factor is bra exponent
oot_alpha_bra = 1 / (2 * alpha_bra)
# # This is of shape (3,3) all dij combos symmetric matrix # Thus a_i factor has to be 3x3 identity, so that only
result = oot_alpha_bra * (jax.jacfwd(overlap_ps_block, 0)(A,B,alpha_bra,alpha_ket) + np.eye(3) * overlap_ss(A,B,alpha_bra,alpha_ket))
# This result is a 3x3 array containing all (dxx,s) (dxy,s) (dyx,s), only need upper or lower triangle
# Return upper triangle ((dxx, dxy, dxz, dyy, dyz, dzz) | s) as a vector
iu = np.triu_indices(3)
return result[iu]
@jax.jit
def overlap_dp_block(A,B,alpha_bra,alpha_ket):
'''
Returns a 1x18 array:
(dxx,px) (dxx,py) (dxx,pz) (dxy,px) (dxy,py) (dxy,pz) (dxz,px) (dxz,py) (dxz,pz) (dyy,px) (dyy,py) (dyy,pz) (dyz,px) (dyz,py) (dyz,pz) (dzz,px) (dzz,py) (dzz,pz)
If called directly, should reshape into a 6x3 block!
(dxx,px) (dxx,py) (dxx,pz)
(dxy,px) (dxy,py) (dxy,pz)
(dxz,px) (dxz,py) (dxz,pz)
(dyy,px) (dyy,py) (dyy,pz)
(dyz,px) (dyz,py) (dyz,pz)
(dzz,px) (dzz,py) (dzz,pz)
'''
oot_alpha_ket = 1 / (2 * alpha_ket) # use ket, since we are promoting ket from s-->p
# This is a 18x1 array of d by p functions. Could also use overlap_pp_block instead, i think?
return np.ravel(oot_alpha_ket * jax.jacfwd(overlap_ds_block, 1)(A,B,alpha_bra,alpha_ket))
@jax.jit
def overlap_dd_block(A,B,alpha_bra,alpha_ket):
'''
Returns a 6x6 array:
(dxx,dxx) (dxx,dxy) (dxx,dxz) (dxx,dyy) (dxx,dyz) (dxx,dzz)
(dxy,dxx) (dxy,dxy) (dxy,dxz) (dxy,dyy) (dxy,dyz) (dxy,dzz)
(dxz,dxx) (dxz,dxy) (dxz,dxz) (dxz,dyy) (dxz,dyz) (dxz,dzz)
(dyy,dxx) (dyy,dxy) (dyy,dxz) (dyy,dyy) (dyy,dyz) (dyy,dzz)
(dyz,dxx) (dyz,dxy) (dyz,dxz) (dyz,dyy) (dyz,dyz) (dyz,dzz)
(dzz,dxx) (dzz,dxy) (dzz,dxz) (dzz,dyy) (dzz,dyz) (dzz,dzz)
'''
oot_alpha_ket = 1 / (2 * alpha_ket) # use ket, since we are promoting ket from p-->d
# The jacfwd (first) term is an 18x3 array # ai coeffs are # the second term is
# (dxx,px) --> (dxx,dxx) (dxx, dxy), (dxx, dxz) 1, 0, 0 (dxx|s) (dxx|s) (dxx|s)
# (dxx,py) --> (dxx,dyx) (dxx, dyy), (dxx, dyz) 0, 1, 0 (dxx|s) (dxx|s) (dxx|s)
# (dxx,pz) --> (dxx,dzx) (dxx, dzy), (dxx, dzz) 0, 0, 1 (dxx|s) (dxx|s) (dxx|s)
# (dxy,px) --> (dxy,dxx) (dxy, dxy), (dxy, dxz) 1, 0, 0 (dxy|s) (dxy|s) (dxy|s)
# (dxy,py) --> (dxy,dyx) (dxy, dyy), (dxy, dyz) 0, 1, 0 (dxy|s) (dxy|s) (dxy|s)
# (dxy,pz) --> (dxy,dzx) (dxy, dzy), (dxy, dzz) 0, 0, 1 (dxy|s) (dxy|s) (dxy|s)
# .... ...
# (dzz,px) --> (dzz,dxx) (dzz, dxy), (dzz, dxz) 1, 0, 0 (dzz|s) (dzz|s) (dzz|s)
# (dzz,py) --> (dzz,dyx) (dzz, dyy), (dzz, dyz) 0, 1, 0 (dzz|s) (dzz|s) (dzz|s)
# (dzz,pz) --> (dzz,dzx) (dzz, dzy), (dzz, dzz) 0, 0, 1 (dzz|s) (dzz|s) (dzz|s)
first_term = jax.jacfwd(overlap_dp_block, 1)(A,B,alpha_bra,alpha_ket)
factor = np.tile(np.eye(3),(6,1))
tmp_second_term = overlap_ds_block(A,B,alpha_bra,alpha_ket)
second_term = factor * np.repeat(tmp_second_term, 9).reshape(18,3)
result = oot_alpha_ket * (first_term + second_term)
# result is of same signature as jacfwd (first) term above
# It contains duplicates in each 3x3 sub-array (upper and lower triangle are equal)
# reshape and grab out just upper triangle as a vector, reshape into matrix
iu1,iu2 = np.triu_indices(3)
result = result.reshape(6,3,3)[:,iu1,iu2].reshape(6,6)
return result
| [
"[email protected]"
] | |
945d9be3f8c30181dec6d9d90930e60f95885cd4 | b36d169e1353752486441636255d435568ca307d | /spikeforest/forestview/spikeforest_view_launchers.py | 799752c978e88f723fc1bfd9313fe29c641c09d9 | [
"Apache-2.0"
] | permissive | alexmorley/spikeforest2 | 5d211595f744ed755eea9b9376e31ed6f9a4da12 | 859fc8b843cc7b547ab83d3e0a43bb17230d09b1 | refs/heads/master | 2020-04-23T03:40:08.076097 | 2019-06-12T08:11:06 | 2019-06-12T08:11:06 | 170,885,069 | 0 | 0 | null | 2019-02-15T15:21:47 | 2019-02-15T15:21:46 | null | UTF-8 | Python | false | false | 11,809 | py | from .spikeforest_views.currentstateview import CurrentStateView
from .spikeforest_views.recordingtableview import RecordingTableView, RecordingSelectComponent
from .spikeforest_views.aggregatedsortingresultstableview import AggregatedSortingResultsTableView
from .recording_views.electrodegeometryview import ElectrodeGeometryView
from .recording_views.timeseriesview import TimeseriesView
from .recording_views.templatesview import TemplatesView
from .recording_views.recordingsummaryview import RecordingSummaryView
from .recording_views.unitstableview import UnitsTableView
from .recording_views.sortingresultstableview import SortingResultsTableView, SortingResultSelectComponent
from .recording_views.sortingresultdetailview import SortingResultDetailView
from .recording_views.featurespaceview import FeatureSpaceView
from .recording_views.clusterview import ClusterView
import vdomr as vd
from mountaintools import client as mt
import json
def get_spikeforest_view_launchers(context):
launchers = []
groups = []
ret = dict(
groups=groups,
launchers=launchers
)
# General
groups.append(dict(name='general', label=''))
launchers.append(dict(
group='general', name='recording-table', label='Recording table',
view_class=RecordingTableView,
context=context, opts=dict(),
enabled=True
))
launchers.append(dict(
group='general', name='current-state', label='Current state',
view_class=CurrentStateView,
context=context, opts=dict(),
enabled=True
))
# MEDIUM TODO: this should be a component rather than a launcher
launchers.append(dict(
group='general', name='recording-select',
component_class=RecordingSelectComponent,
context=context, opts=dict(),
enabled=True
))
recording_context = context.recordingContext(context.currentRecordingId())
# Aggregated sorting results
if context.hasAggregatedSortingResults():
groups.append(dict(name='aggregated_sorting_results', label='Aggregated results'))
launchers.append(dict(
group='aggregated_sorting_results', name='aggregated-results-table', label='Results table',
view_class=AggregatedSortingResultsTableView,
context=context, opts=dict(),
always_open_new=False,
enabled=True
))
# Recording
if recording_context:
groups.append(dict(name='recording', label='Recording', sublabel=context.currentRecordingId()))
launchers.append(dict(
group='recording', name='recording-summary', label='Recording summary',
view_class=RecordingSummaryView,
context=recording_context, opts=dict(),
always_open_new=True,
enabled=(recording_context is not None)
))
launchers.append(dict(
group='recording', name='electrode-geometry', label='Electrode geometry',
view_class=ElectrodeGeometryView,
context=recording_context, opts=dict(),
enabled=(recording_context is not None)
))
launchers.append(dict(
group='recording', name='timeseries', label='Timeseries',
view_class=TimeseriesView,
context=recording_context, opts=dict(),
always_open_new=True,
enabled=(recording_context is not None)
))
if recording_context.hasIntraRecording():
launchers.append(dict(
group='recording', name='intra-timeseries', label='Intra-timeseries',
view_class=TimeseriesView,
context=recording_context.intraRecordingContext(),
always_open_new=True,
enabled=(recording_context is not None)
))
# True sorting
if recording_context and recording_context.trueSortingContext():
true_sorting_context = recording_context.trueSortingContext()
groups.append(dict(name='true-sorting', label='True sorting'))
launchers.append(dict(
group='true-sorting', name='true-templates', label='Templates',
view_class=TemplatesView,
context=true_sorting_context, opts=dict(),
always_open_new=True,
enabled=(true_sorting_context is not None)
))
launchers.append(dict(
group='true-sorting', name='true-units-info', label='Units info',
view_class=UnitsTableView,
context=true_sorting_context, opts=dict(),
always_open_new=True,
enabled=(true_sorting_context is not None)
))
launchers.append(dict(
group='true-sorting', name='feature-space', label='Feature space',
view_class=FeatureSpaceView,
context=true_sorting_context, opts=dict(),
always_open_new=True,
enabled=(len(true_sorting_context.selectedUnitIds()) > 0)
))
launchers.append(dict(
group='true-sorting', name='clusters', label='Clusters',
view_class=ClusterView,
context=true_sorting_context, opts=dict(),
always_open_new=True,
enabled=(len(true_sorting_context.selectedUnitIds()) > 0)
))
dict(name='unit', label='Unit')
launchers.append(dict(
group='true-sorting', name='test', label='Test',
view_class=TemplatesView,
context=true_sorting_context, opts=dict(),
always_open_new=True,
enabled=(true_sorting_context.currentUnitId() is not None)
))
# Sorting results
if recording_context and (len(recording_context.sortingResultNames()) > 0):
groups.append(dict(name='sorting-results', label='Sorting results'))
launchers.append(dict(
group='sorting-results', name='sorting-results-table', label='Sorting results table',
view_class=SortingResultsTableView,
context=recording_context, opts=dict(),
always_open_new=True,
enabled=(len(recording_context.sortingResultNames()) > 0)
))
launchers.append(dict(
group='sorting-results', name='sorting-result-select',
component_class=SortingResultSelectComponent,
context=recording_context, opts=dict(),
always_open_new=True,
enabled=(len(recording_context.sortingResultNames()) > 0)
))
# Sorting result
if recording_context and recording_context.currentSortingResult():
srname = recording_context.currentSortingResult()
sorting_result_context = recording_context.sortingResultContext(srname)
groups.append(dict(name='sorting-result', label='Sorting result', sublabel=srname))
launchers.append(dict(
group='sorting-result', name='sorting-result-details', label='Details',
view_class=SortingResultDetailView,
context=sorting_result_context, opts=dict(),
always_open_new=True,
enabled=(sorting_result_context is not None)
))
launchers.append(dict(
group='sorting-result', name='templates', label='Templates',
view_class=TemplatesView,
context=sorting_result_context, opts=dict(),
always_open_new=True,
enabled=(sorting_result_context is not None)
))
launchers.append(dict(
group='sorting-result', name='units-info', label='Units info',
view_class=UnitsTableView,
context=sorting_result_context, opts=dict(),
always_open_new=True,
enabled=(sorting_result_context is not None)
))
launchers.append(dict(
group='sorting-result', name='feature-space', label='Feature space',
view_class=FeatureSpaceView,
context=sorting_result_context, opts=dict(),
always_open_new=True,
enabled=(len(sorting_result_context.selectedUnitIds()) > 0)
))
launchers.append(dict(
group='sorting-result', name='clusters', label='Clusters',
view_class=ClusterView,
context=sorting_result_context, opts=dict(),
always_open_new=True,
enabled=(len(sorting_result_context.selectedUnitIds()) > 0)
))
launchers.append(dict(
group='sorting-result', name='console-out', label='Console output',
view_class=ConsoleOutView,
context=sorting_result_context, opts=dict(),
always_open_new=True,
enabled=(sorting_result_context.consoleOutputPath() is not None)
))
launchers.append(dict(
group='sorting-result', name='exec-stats', label='Execution stats',
view_class=ExecutionStatsView,
context=sorting_result_context, opts=dict(),
always_open_new=True,
enabled=(sorting_result_context.executionStats() is not None)
))
launchers.append(dict(
group='sorting-result', name='comparison-with-truth', label='Comparison with truth',
view_class=ComparisonWithTruthView,
context=sorting_result_context, opts=dict(),
always_open_new=True,
enabled=(sorting_result_context.comparisonWithTruthPath() is not None)
))
dict(name='unit', label='Unit')
launchers.append(dict(
group='sorting-result', name='test', label='Test',
view_class=TemplatesView,
context=sorting_result_context, opts=dict(),
always_open_new=True,
enabled=(sorting_result_context.currentUnitId() is not None)
))
return ret
class ConsoleOutView(vd.Component):
def __init__(self, *, context, opts=None):
vd.Component.__init__(self)
self._context = context
self._size = (100, 100)
if not context.consoleOutputPath():
self._text = 'no console output found'
else:
self._text = mt.loadText(path=context.consoleOutputPath()) or 'unable to load console output'
def setSize(self, size):
if self._size != size:
self._size = size
def size(self):
return self._size
def tabLabel(self):
return 'Console out'
def render(self):
return vd.components.ScrollArea(vd.pre(self._text), height=self._size[1])
class ExecutionStatsView(vd.Component):
def __init__(self, *, context, opts=None):
vd.Component.__init__(self)
self._context = context
self._size = (100, 100)
self._stats = context.executionStats()
def setSize(self, size):
if self._size != size:
self._size = size
def size(self):
return self._size
def tabLabel(self):
return 'Exec stats'
def render(self):
if not self._stats:
return vd.div('No stats found')
return vd.div(vd.pre(json.dumps(self._stats, indent=4)))
class ComparisonWithTruthView(vd.Component):
def __init__(self, *, context, opts=None):
vd.Component.__init__(self)
self._context = context
self._size = (100, 100)
if not context.comparisonWithTruthPath():
self._object = None
else:
self._object = mt.loadObject(path=context.comparisonWithTruthPath())
def setSize(self, size):
if self._size != size:
self._size = size
def size(self):
return self._size
def tabLabel(self):
return 'Comparison with truth'
def render(self):
if not self._object:
return vd.div('Unable to load comparison data.')
return vd.components.ScrollArea(vd.pre(json.dumps(self._object, indent=4)), height=self._size[1])
| [
"[email protected]"
] | |
607f0c745c7df74bf1cbfc3ebac73ac5b92debb3 | 8d03310627f1f625eddda8f4a3e680892872e0ec | /batemaneq/__init__.py | 09ee7bcfa329b2d98875fd9beb5ea50bbdbf1f40 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] | permissive | Rolleroo/batemaneq | 4da15e4bff32484d27ea9dc2b3338edc4956b0df | bd8c24d1f77ccb166b3210d81d9468f7789813ad | refs/heads/master | 2021-02-05T12:43:40.639427 | 2020-02-23T20:47:48 | 2020-02-23T20:47:48 | 243,781,711 | 1 | 0 | BSD-2-Clause | 2020-02-28T14:31:36 | 2020-02-28T14:31:36 | null | UTF-8 | Python | false | false | 356 | py | # -*- coding: utf-8 -*-
"""
batemaneq provides a Python package for evaluating the Bateman equation
"""
from __future__ import absolute_import
from ._release import __version__
from .bateman import bateman_parent, bateman_full
from ._bateman_double import bateman_parent as bateman_parent_arr
from ._bateman_double import bateman_full as bateman_full_arr
| [
"[email protected]"
] | |
d1be16162f4ac7c7277b10428e050b2deff850ea | ef18d99eff01a708dddfc2cbf77f68bb1d8aa889 | /python/415.add-strings.py | 0d3f611c808e58d327f44045f8c4f9a31bc6f054 | [
"MIT"
] | permissive | fengbaoheng/leetcode | 53d5b6f92f7958d551e6297f77c4edfc042a4d58 | e37535a06b0b4cb23de9a2cfa357eb689b1c06fb | refs/heads/master | 2023-07-23T11:33:42.938862 | 2023-07-08T08:43:46 | 2023-07-08T08:43:46 | 131,551,048 | 1 | 0 | MIT | 2022-11-16T02:53:49 | 2018-04-30T03:13:55 | Java | UTF-8 | Python | false | false | 1,582 | py | #
# @lc app=leetcode.cn id=415 lang=python3
#
# [415] 字符串相加
#
class Solution:
# 字符串拆分为链表或数组,逐位相加
# 直接转成数字会超出最大整数范围
def addStrings(self, num1: str, num2: str) -> str:
short_num, long_num = (num1, num2) if len(num1) < len(num2) else (num2, num1)
short_length = len(short_num)
long_length = len(long_num)
if short_length == 0:
return long_num
elif long_length == 0:
return short_num
# 转换为数组形式,并倒序
short_num = list(short_num)
short_num.reverse()
long_num = list(long_num)
long_num.reverse()
carry = 0
num = []
# 逐位相加
for i in range(short_length):
s = int(short_num[i]) + int(long_num[i]) + carry
if s >= 10:
carry = 1
s -= 10
else:
carry = 0
num.append(s)
# 处理长数组剩余的数字
for i in range(short_length, long_length):
s = int(long_num[i]) + carry
if s >= 10:
carry = 1
num.append(s - 10)
else:
carry = 0
num.append(s)
num.extend(long_num[i + 1 :])
break
# 最高位需要进1
if carry == 1:
num.append(1)
num.reverse()
return "".join(map(str, num))
if __name__ == "__main__":
print(Solution().addStrings("1234", "9999"))
| [
"[email protected]"
] | |
ff82dba0faaadec9068bbc9b3ccc625a721573a6 | 786027545626c24486753351d6e19093b261cd7d | /ghidra9.2.1_pyi/ghidra/file/formats/gzip/GZipFileSystemFactory.pyi | 65e1831d93c9e7d26029af1620bb52a08cc18eb9 | [
"MIT"
] | permissive | kohnakagawa/ghidra_scripts | 51cede1874ef2b1fed901b802316449b4bf25661 | 5afed1234a7266c0624ec445133280993077c376 | refs/heads/main | 2023-03-25T08:25:16.842142 | 2021-03-18T13:31:40 | 2021-03-18T13:31:40 | 338,577,905 | 14 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,302 | pyi | from typing import List
import ghidra.formats.gfilesystem
import ghidra.formats.gfilesystem.factory
import ghidra.util.task
import java.io
import java.lang
class GZipFileSystemFactory(object, ghidra.formats.gfilesystem.factory.GFileSystemFactoryWithFile, ghidra.formats.gfilesystem.factory.GFileSystemProbeBytesOnly):
MAX_BYTESREQUIRED: int = 65536
PROBE_BYTES_REQUIRED: int = 2
def __init__(self): ...
def create(self, __a0: ghidra.formats.gfilesystem.FSRL, __a1: ghidra.formats.gfilesystem.FSRLRoot, __a2: java.io.File, __a3: ghidra.formats.gfilesystem.FileSystemService, __a4: ghidra.util.task.TaskMonitor) -> ghidra.formats.gfilesystem.GFileSystem: ...
def equals(self, __a0: object) -> bool: ...
def getBytesRequired(self) -> int: ...
def getClass(self) -> java.lang.Class: ...
def hashCode(self) -> int: ...
def notify(self) -> None: ...
def notifyAll(self) -> None: ...
def probeStartBytes(self, __a0: ghidra.formats.gfilesystem.FSRL, __a1: List[int]) -> bool: ...
def toString(self) -> unicode: ...
@overload
def wait(self) -> None: ...
@overload
def wait(self, __a0: long) -> None: ...
@overload
def wait(self, __a0: long, __a1: int) -> None: ...
@property
def bytesRequired(self) -> int: ...
| [
"[email protected]"
] | |
1fa034f767ef9f88cf6992b4ac2982972c7b0b5f | ca61296e18ae834628b6a4199bbd5a9379bdeff3 | /worker/models.py | 0cd8b2b7d891bed8050b7ab22c805edc0417230a | [] | no_license | shashank-sharma/gdrs | 8979361a21a01097ca9f5a9e969c55c8548fedfa | b0cb17eade5049b5175dc78eb93b0385b72ac61a | refs/heads/master | 2020-03-29T22:41:18.934031 | 2019-01-22T15:31:31 | 2019-01-22T15:31:31 | 150,435,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,181 | py | from django.db import models
from accounts.models import User
# Create your models here.
class Driver(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
driving_licence_number = models.CharField(max_length=20)
expiry_date = models.DateField()
working = models.BooleanField(default=False)
class Cab(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
driver = models.ForeignKey(Driver, on_delete=models.CASCADE)
license_plate = models.CharField(max_length=20)
car_model_id = models.CharField(max_length=20)
manufacturing_id = models.CharField(max_length=20)
active = models.BooleanField(default=True)
class CarModel(models.Model):
cab = models.ForeignKey(Cab, on_delete=models.CASCADE)
model_name = models.CharField(max_length=80)
model_description = models.CharField(max_length=100)
class shift(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
driver = models.ForeignKey(Driver, on_delete=models.CASCADE)
shift_start = models.DateField()
shift_end = models.DateField()
login_time = models.DateField()
logout_time = models.DateField()
| [
"[email protected]"
] | |
7aa49d03b00df1969a441a334cfa985a4fe57e98 | e87d793b3a5facc6e54e0263fbd67703e1fbb382 | /duckietown-world-venv/lib/python3.6/site-packages/geometry/manifolds/tests/embedding_test.py | c6435354a8abc351527207b355f8785768e2ff0f | [] | no_license | llingg/behaviour-benchmarking | a860bbe709309e13f3e1133d916944882199a40f | 85bbf1a9c2c628ba74480fe7abac3804d6afdac4 | refs/heads/v1 | 2022-10-06T08:21:29.068329 | 2020-06-11T07:02:46 | 2020-06-11T07:02:46 | 259,622,704 | 0 | 0 | null | 2020-06-02T17:52:46 | 2020-04-28T11:52:08 | C++ | UTF-8 | Python | false | false | 1,989 | py | # coding=utf-8
from geometry.manifolds import (SO3, SO2, R1, R2, R3, SE2, SE3, S2, S1, T1, T2,
T3, so2, so3, se2, se3, Tran3, Tran2, Tran1, tran2, tran1, tran3)
from nose.plugins.attrib import attr
def check_embed_relation_cond(A, B):
check_embed_relation_cond.description = 'Checking %s < %s' % (A, B)
msg = None
if not A.embeddable_in(B):
msg = '%s is not embeddable in %s' % (A, B)
if not B.can_represent(A):
msg = '%s cannot represent %s' % (B, A)
if msg:
raise Exception('%s;\n %s: %s\n %s: %s' %
(msg, A, A.relations_descriptions(),
B, B.relations_descriptions()))
def check_embed_relation(A, B):
check_embed_relation_cond(A, B)
points = list(A.interesting_points())
if not points:
msg = ('Cannot test because manifold %s does '
'not have interesting points' % A)
raise Exception(msg)
for a1 in points:
A.belongs(a1)
b = A.embed_in(B, a1)
B.belongs(b)
a2 = A.project_from(B, b)
A.belongs(a2)
a3 = B.project_to(A, b)
A.belongs(a3)
A.assert_close(a1, a2)
A.assert_close(a1, a3)
@attr('embed')
def test_embed_relations():
couples = []
def add(A, B):
couples.append((A, B))
add(R1, R2)
add(R2, R3)
add(R1, R3)
add(SO2, SO3)
add(SO2, SE3)
add(SO2, SE2)
add(SO3, SE3)
add(so3, se3)
add(so2, se2)
add(so2, se3)
add(S1, S2)
add(R1, SE2)
add(R2, SE2)
add(R1, SE3)
add(R2, SE3)
add(R3, SE3)
add(Tran1, SE2)
add(Tran2, SE2)
add(Tran1, SE3)
add(Tran2, SE3)
add(Tran3, SE3)
add(T1, T2)
add(T2, T3)
add(T1, T3)
add(T1, R1)
add(T2, R2)
add(T3, R3)
add(T3, SE3)
add(S1, SE3)
add(S2, SE3)
add(tran1, se3)
add(tran2, se3)
add(tran3, se3)
add(T1, S1)
for A, B in couples:
check_embed_relation(A, B)
| [
"[email protected]"
] | |
d37a305a988762a88462d72268ef5b9960e21900 | f7e9bf6fa18a41b52994b1f16fd55c4d69f33b56 | /plugins/embed_picasaweb_image/embed_picasaweb_image.py | 02b13d5d2a5d91850ad78e166be1f0e6b52a1e0a | [
"Unlicense",
"LicenseRef-scancode-public-domain"
] | permissive | siongui/userpages | 1716f2d24e52b514ea8534027cec9707783d0d47 | 494b95e61715a49dce6615103a5b19fa05f276f1 | refs/heads/master | 2023-07-01T12:52:04.813216 | 2023-06-12T16:31:25 | 2023-06-12T16:31:25 | 13,944,800 | 80 | 39 | Unlicense | 2023-08-18T03:51:40 | 2013-10-29T02:39:13 | Go | UTF-8 | Python | false | false | 1,564 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
# Creating reStructuredText Directives
# @see http://docutils.sourceforge.net/docs/howto/rst-directives.html
from docutils.parsers.rst import directives, Directive
from docutils import nodes
class embed_picasaweb_image(Directive):
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = { 'album_name' : directives.unchanged,
'css_class' : directives.unchanged,
'description' : directives.unchanged,
'album_url' : directives.uri,
'image_url' : directives.uri,
}
has_content = False
def run(self):
url = directives.uri(self.arguments[0])
album_name = self.options.get('album_name', None)
album_url = self.options.get('album_url', None)
image_url = self.options.get('image_url', None)
css_class = self.options.get('css_class', None)
description = self.options.get('description', u'')
if album_name and album_url:
html = u'<div class="{}"><a href="{}"><image src="{}"></a><div>{}</div><div class="album">From Album: <a href="{}">{}</a></div></div>'.format(
css_class, image_url, url, description, album_url, album_name)
else:
html = u'<div class="{}"><a href="{}"><image src="{}"></a><div>{}</div></div>'.format(
css_class, image_url, url, description)
return [nodes.raw('', html, format='html')]
def register():
directives.register_directive('embed_picasaweb_image', embed_picasaweb_image)
| [
"[email protected]"
] | |
a83aef36ff14f7c63007b1cf8d651c30aeb8ef94 | 573f85b19a687b103bf0e70b48f1eab0fbed792a | /certbot/AliDNSCertbot.py | 716f19879cb1900387314795c22cde75a263459f | [
"Apache-2.0"
] | permissive | calllivecn/dockerbuild | 7e240326743aaf88b7adc3637c8c643a8c4b7c41 | e2c2c315677d2510f806e3dfa3fec062c58c7134 | refs/heads/master | 2023-07-23T03:27:17.378620 | 2023-03-13T04:19:54 | 2023-03-13T04:19:54 | 128,035,897 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,289 | py | #!/usr/bin/env python3
# coding=utf-8
# date 2019-11-20 16:34:16
# author calllivecn <[email protected]>
import os
import sys
import time
import base64
import hashlib
import hmac
import logging
#import configparser
import urllib
from urllib import request
from urllib import parse
# pip install alibabacloud_alidns20150109==2.0.2
from alibabacloud_alidns20150109.client import Client as Alidns20150109Client
from alibabacloud_tea_openapi import models as open_api_models
from alibabacloud_alidns20150109 import models as alidns_20150109_models
def getlogger(level=logging.INFO):
logger = logging.getLogger("logger")
formatter = logging.Formatter("%(asctime)s %(filename)s:%(funcName)s:%(lineno)d %(levelname)s: %(message)s", datefmt="%Y-%m-%d-%H:%M:%S")
consoleHandler = logging.StreamHandler(stream=sys.stdout)
#logger.setLevel(logging.DEBUG)
consoleHandler.setFormatter(formatter)
# consoleHandler.setLevel(logging.DEBUG)
logger.addHandler(consoleHandler)
logger.setLevel(level)
return logger
logger = getlogger()
ALI_DDNS_URL = 'alidns.cn-zhangjiakou.aliyuncs.com'
LetsEncryptSubDomain = '_acme-challenge'
class AliDDNS:
def __init__(self, access_key_id, access_key_secret):
self.access_key_id = access_key_id
self.access_key_secret = access_key_secret
"""
使用AK&SK初始化账号Client
@param access_key_id:
@param access_key_secret:
@return: Client
@throws Exception
"""
config = open_api_models.Config(
# 您的AccessKey ID,
access_key_id=self.access_key_id,
# 您的AccessKey Secret,
access_key_secret=self.access_key_secret
)
# 访问的域名
config.endpoint = ALI_DDNS_URL
self.client = Alidns20150109Client(config)
def addDomainRecord(self, domain_name, rr, typ, value):
"""
参数:
domain_name='calllive.cc',
type='AAAA',
rr='route'
value='240e:3b5:3013:f760:6edd:c591:41db:7a5d',
return:
{
"RequestId": "69698E87-A897-5FFA-B578-1001D5052D75",
"RecordId": "751818936343988224"
}
"""
add_domain_record_request = alidns_20150109_models.AddDomainRecordRequest(
domain_name=domain_name,
type=typ,
value=value,
rr=rr
)
# 复制代码运行请自行打印 API 的返回值
response = self.client.add_domain_record(add_domain_record_request)
return response.body.to_map()
def updateDonameRecord(self, record_id, rr, typ, value):
"""
参数:
record_id='751812982741233664',
rr='route',
type='AAAA',
value='240e:3b5:3013:f760:2292:83ab:872:2'
return:
{
"RequestId": "A997E4E6-C6BF-5A2B-85AE-01BE6E3AC1BE",
"RecordId": "751812982741233664"
}
"""
update_domain_record_request = alidns_20150109_models.UpdateDomainRecordRequest(
record_id=record_id,
rr=rr,
type=typ,
value=value
)
# 复制代码运行请自行打印 API 的返回值
response = self.client.update_domain_record(update_domain_record_request)
return response.body.to_map()
def describe_sub_domain(self, sub_domain, typ):
"""
return:
{
"TotalCount": 1,
"RequestId": "5AA5CC8A-4675-5B92-898A-5FBCC742E975",
"PageSize": 20,
"DomainRecords": {
"Record": [
{
"RR": "route",
"Line": "default",
"Status": "ENABLE",
"Locked": false,
"Type": "AAAA",
"DomainName": "calllive.cc",
"Value": "240e:3b5:3013:f760:7942:d2cd:5cc4:2aa1",
"RecordId": "751945591127363584",
"TTL": 600,
"Weight": 1
}
]
},
"PageNumber": 1
}
"""
describe_sub_domain_records_request = alidns_20150109_models.DescribeSubDomainRecordsRequest(
sub_domain=sub_domain,
type=typ
)
# 复制代码运行请自行打印 API 的返回值
response = self.client.describe_sub_domain_records(describe_sub_domain_records_request)
# logger.debug(f"response type: {type(response)}")
# logger.debug(f"response dir(): {dir(response)}")
# logger.debug(f"response to_map(): {response.to_map()}")
# logger.debug(f"response body: {response.body.to_map()}")
# logger.debug(f"response.body type: {type(response.body)}")
# jsondata = UtilClient.to_jsonstring(TeaCore.to_map(response))
return response.body.to_map()
def descrbieDomainRecord(self, domain_name, rrkey_word, typ):
"""
domain_name='baidu.com',
rrkey_word='ditu',
typ='AAAA'
return:
{
"TotalCount": 1,
"RequestId": "06A55865-42D5-5453-B7D3-ECA434200584",
"PageSize": 20,
"DomainRecords": {
"Record": [
{
"RR": "route",
"Line": "default",
"Status": "ENABLE",
"Locked": false,
"Type": "AAAA",
"DomainName": "calllive.cc",
"Value": "240e:3b5:3013:f760:6edd:c591:41db:7a5d",
"RecordId": "751812982741233664",
"TTL": 600,
"Weight": 1
}
]
},
"PageNumber": 1
}
"""
describe_domain_records_request = alidns_20150109_models.DescribeDomainRecordsRequest(
domain_name=domain_name,
rrkey_word=rrkey_word,
type=typ
)
# 复制代码运行请自行打印 API 的返回值
response = self.client.describe_domain_records(describe_domain_records_request)
return response.body.to_map()
class AliyunDns:
__endpoint = 'https://alidns.aliyuncs.com'
__appid = ''
__appsecret = ''
__logger = logging.getLogger("logger")
def __init__(self, appid, appsecret):
self.__appid = appid
self.__appsecret = appsecret
def __getSignatureNonce(self):
return str(int(round(time.time() * 1000)))
def __percentEncode(self, s):
res = parse.quote_plus(s.encode('utf8'), '')
res = res.replace('+', '%20')
res = res.replace('*', '%2A')
res = res.replace('%7E', '~')
#res = res.replace('+', '%20')
#res = res.replace('\'', '%27')
#res = res.replace('\"', '%22')
#res = res.replace('*', '%2A')
#res = res.replace('%7E', '~')
return res
def __signature(self, params):
sortedParams = sorted(params.items(), key=lambda params: params[0])
query = ''
for k, v in sortedParams:
query += '&' + self.__percentEncode(k) + '=' + self.__percentEncode(v)
self.__logger.debug("参数编码串:{}".format(query))
stringToSign = 'GET&%2F&' + self.__percentEncode(query[1:])
self.__logger.debug("签名串:{}".format(stringToSign))
try:
h = hmac.new((self.__appsecret + "&").encode("utf-8"), stringToSign.encode("utf-8"), hashlib.sha1)
except Exception as e:
self.__logger.error("签名出错...")
self.__logger.error(e)
signature = base64.encodebytes(h.digest()).strip()
return signature
def __request(self, params):
commonParams = {
'Format': 'JSON',
'Version': '2015-01-09',
'SignatureMethod': 'HMAC-SHA1',
'SignatureNonce': self.__getSignatureNonce(),
'SignatureVersion': '1.0',
'AccessKeyId': self.__appid,
'Timestamp': time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
}
# merge all params
finalParams = commonParams.copy()
finalParams.update(params)
self.__logger.debug("finalParams: {}".format(finalParams))
# signature
finalParams['Signature'] = self.__signature(finalParams)
self.__logger.info('Signature: {}'.format(finalParams['Signature']))
# get final url
url = "{}/?{}".format(self.__endpoint, parse.urlencode(finalParams))
# print(url)
req = request.Request(url)
self.__logger.debug(req.full_url)
self.__logger.debug(req.get_method())
try:
f = request.urlopen(req)
response = f.read()
self.__logger.info(response.decode('utf-8'))
except request.HTTPError as e:
self.__logger.info(e.read().strip().decode('utf-8'))
raise SystemExit(e)
def addDomainRecord(self, domain, rr, value):
params = {
'Action': 'AddDomainRecord',
'DomainName': domain,
'RR': rr,
'Type': 'TXT',
'Value': value
}
self.__request(params)
def deleteSubDomainRecord(self, domain, rr):
params = {
'Action': 'DeleteSubDomainRecords',
'DomainName': domain,
'RR': rr,
'Type': 'TXT'
}
self.__request(params)
def addLetsencryptDomainRecord(self, domain, value):
self.addDomainRecord(domain, self.__letsencryptSubDomain, value)
def deleteLetsencryptDomainRecord(self, domain):
self.deleteSubDomainRecord(domain, self.__letsencryptSubDomain)
def toString(self):
print('AliyunDns[appid=' + self.__appid + ', appsecret=' + self.__appsecret+']')
def auth(aliyunDns):
domain = os.environ.get('CERTBOT_DOMAIN')
value = os.environ.get('CERTBOT_VALIDATION')
if domain is None:
raise Exception('Environment variable CERTBOT_DOMAIN is empty.')
if value is None:
raise Exception('Environment variable CERTBOT_VALIDATION is empty.')
try:
logger.info('Start setting DNS')
logger.info('Domain:' + domain)
logger.info('Value:' + value)
# aliyunDns.toString()
# add letsencrypt domain record
aliyunDns.addLetsencryptDomainRecord(domain, value)
logger.debug("addDomainRecord()")
# wait for completion
logger.info('sleep 10 secs')
time.sleep(10)
logger.info('Success.')
logger.info('DNS setting end!')
except urllib.error.HTTPError as e:
logger.error(e)
sys.exit(1)
except Exception as e:
logger.error(e)
sys.exit(1)
def cleanup(aliyunDns):
domain = os.environ.get('CERTBOT_DOMAIN')
if domain is None:
raise Exception('Environment variable CERTBOT_DOMAIN is empty.')
try:
logger.info('Start to clean up')
logger.info('Domain:' + domain)
# aliyunDns.toString()
# delete letsencrypt domain record
aliyunDns.deleteLetsencryptDomainRecord(domain)
logger.info('Success.')
logger.info('Clean up end!')
except Exception as e:
logger.error(e)
sys.exit(1)
Usage="""\
Usage: {} <auth|cleanup> <appid> <secretkey>
And: set environment CERTBOT_DOMAIN CERTBOT_VALIDATION
""".format(sys.argv[0])
def main():
if len(sys.argv) == 1:
print(Usage)
sys.exit(1)
if len(sys.argv) == 4:
if "auth" == sys.argv[1] or "cleanup" == sys.argv[1]:
appid = sys.argv[2]
secretkey = sys.argv[3]
else:
logger.error(Usage)
sys.exit(1)
else:
logger.error("Usage: {} <auth|cleanup> <appid> <secretkey>".format(sys.argv[0]))
sys.exit(1)
if sys.argv[1] == "auth":
auth(AliyunDns(appid, secretkey))
elif sys.argv[1] == "cleanup":
cleanup(AliyunDns(appid, secretkey))
else:
logger.error(Usage)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
018b478deaa34ef7036f428aa0a5ce8e3ee99722 | 7f3112bd1cb6d5831370f01db1bf4ef7b9d6aee6 | /selenium/test_search_in_python_org_search.py | 43a2016183284cf053b611255f753820858169f3 | [] | no_license | insta-code1/Python-Unittests | f8a2138ae457756d8897594eaa2745a40f908a7e | 84d62edce5b929b1822d4d7a92c7edf3003ddf07 | refs/heads/master | 2020-12-25T14:59:08.705048 | 2016-09-04T12:11:22 | 2016-09-04T12:11:22 | 67,342,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 641 | py | import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class PythonOrgSearch(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
def test_search_in_python_org(self):
driver = self.driver
driver.get("http://www.python.org")
self.assertIn("Python", driver.title)
elem = driver.find_element_by_name("q")
elem.send_keys("pycon")
elem.send_keys(Keys.RETURN)
assert "No results found." not in driver.page_source
def tearDown(self):
self.driver.close()
if __name__ == "__main__":
unittest.main() | [
"[email protected]"
] | |
d0684e191884794bcca60c9a003d3a736017998e | f8ece22d9e9e12e2cbca56d72a6b2728ba9a275a | /polyaxon/experiments/utils.py | 50329e5e6fe312b3cb5120c878e85833117c63a9 | [
"MIT"
] | permissive | pparan/polyaxon | 8c8912f9ba724e007357efcaefeab86fec2d5630 | 423199721e90431209b00c0f76caa6b4f9aa4b24 | refs/heads/master | 2021-04-15T07:15:19.701268 | 2018-03-21T11:59:12 | 2018-03-21T11:59:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 719 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from experiments.models import Experiment
def is_experiment_still_running(experiment_id=None, experiment_uuid=None):
if not any([experiment_id, experiment_uuid]) or all([experiment_id, experiment_uuid]):
raise ValueError('`is_experiment_still_running` function expects an experiment id or uuid.')
try:
if experiment_uuid:
experiment = Experiment.objects.get(uuid=experiment_uuid)
else:
experiment = Experiment.objects.get(id=experiment_id)
except Experiment.DoesNotExist:
return False
if not experiment.is_running:
return False
return True
| [
"[email protected]"
] | |
f4ee36d85f337be493ffa614eb246403c3fd37ca | 2bd4392a0929bf294df65bf45338d62e22474a25 | /expenses/utils.py | 07e29fb03ca4ec007e93ca6c77e29ab631a28c23 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | DjangoGworls/django-expenses | c5c7825017884be1bd53d5d19ee15acfb7bafbbd | 60f2c20c21a9f01d7efa169b501e3beb361795d1 | refs/heads/master | 2023-01-30T01:20:45.723489 | 2020-11-07T11:13:03 | 2020-11-07T11:13:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,995 | py | # Django-Expenses
# Copyright © 2018-2020, Chris Warrick.
# All rights reserved.
# See /LICENSE for licensing information.
"""Assorted Expenses utilities."""
import babel.numbers
import datetime
import decimal
import iso8601
import itertools
import typing
from django.utils import timezone
from django.conf import settings
from django.utils.translation import get_language
def format_money(amount: typing.Union[int, float, decimal.Decimal]) -> str:
"""Format an amount of money for display."""
if amount is None:
amount = 0
return babel.numbers.format_currency(
amount, settings.EXPENSES_CURRENCY_CODE, locale=settings.EXPENSES_CURRENCY_LOCALE
)
def today_date() -> datetime.date:
"""Get today’s date."""
return timezone.now().date()
def revchron(qs):
"""Sort expenses in reverse-chronological order."""
return qs.order_by("-date", "-date_added")
def round_money(amount: decimal.Decimal) -> decimal.Decimal:
"""Round money in a way appropriate for money."""
return amount.quantize(decimal.Decimal(".01"), rounding=decimal.ROUND_HALF_UP)
def dict_overwrite(destdict: dict, destkey, srcdict: dict, srckey=None) -> None:
"""Override a dict key with one taken from another dict."""
destdict[destkey] = srcdict.get(srckey or destkey, destdict[destkey])
def serialize_date(date: datetime.date) -> str:
"""Serialize a datetime."""
return date.isoformat()
def serialize_dt(dt: datetime.datetime) -> str:
"""Serialize a datetime."""
return dt.isoformat()
def serialize_decimal(amount: decimal.Decimal) -> str:
"""Serialize a decimal value."""
return str(amount)
def parse_date(date_str: str) -> datetime.date:
"""Parse an ISO 8601 date."""
return iso8601.parse_date(date_str).date()
def parse_dt(dt_str: str) -> datetime.datetime:
"""Parse an ISO 8601 datetime."""
return iso8601.parse_date(dt_str)
def parse_decimal(amount_str: str) -> decimal.Decimal:
"""Parse a decimal object."""
return decimal.Decimal(amount_str)
def parse_amount_input(amount_str: str) -> typing.Optional[decimal.Decimal]:
"""Parse an amount in a human-forgiving way."""
try:
return decimal.Decimal(amount_str)
except decimal.InvalidOperation:
try:
return decimal.Decimal(amount_str.replace(",", "."))
except ValueError:
return None
def get_babel_locale() -> str:
"""Get a babel-friendly locale name."""
lang, _, region = get_language().partition("-")
if not region:
region = lang.upper()
return f"{lang}_{region.upper()}"
T = typing.TypeVar("T")
def peek(iterable: typing.Iterable[T]) -> (T, typing.Iterable[T]):
"""Peek at the first row of an iterable.
Returns (first row, iterable with first row)."""
iterator = iter(iterable)
try:
first_row = next(iterator)
except StopIteration:
return None, None
return first_row, itertools.chain([first_row], iterator)
| [
"[email protected]"
] | |
9fbba12d321ad7bcae325cc7b8e8bc3d77faa827 | b557781831f6345f36f5d35b9c5fa6cbdb4c4815 | /billing/yup/views.py | eb97a8947f82f58dad204478f718bf8e1651efe5 | [] | no_license | komsihon/Project1 | 5c067bcc2f299a28163eccf27716ed092e070b78 | e32c481ad358c2a8af52d95a9bbc2f9faebfd703 | refs/heads/master | 2021-06-03T23:52:21.555310 | 2021-01-13T10:53:24 | 2021-01-13T10:53:24 | 98,784,648 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,108 | py | import json
import traceback
import requests
from django.conf import settings
from django.db import transaction
from django.http import HttpResponse
from django.http.response import HttpResponseRedirect
from django.shortcuts import render
from django.template.defaultfilters import slugify
from django.utils.module_loading import import_by_path
from ikwen.core.utils import get_service_instance
from ikwen.billing.models import PaymentMean, MoMoTransaction
import logging
logger = logging.getLogger('ikwen')
YUP = 'yup'
UNKNOWN_PHONE = '<Unknown>'
CURRENCY = "XAF"
def init_yup_web_payment(request, *args, **kwargs):
api_url = getattr(settings, 'YUP_API_URL', 'https://33027.tagpay.fr/online/online.php')
yup = json.loads(PaymentMean.objects.get(slug=YUP).credentials)
phone = UNKNOWN_PHONE
service = get_service_instance()
request.session['phone'] = phone
amount = int(request.session['amount'])
model_name = request.session['model_name']
object_id = request.session['object_id']
if request.user.is_authenticated():
username = request.user.username
language = request.user.language
else:
username = None
language = 'en'
# Request a session id
try:
params = {'merchantid': yup['merchant_id']}
session_id_request = requests.get(api_url, params=params, verify=False)
except requests.exceptions.HTTPError as errh:
logger.error("YUP: Http Error:", errh)
return HttpResponseRedirect(request.session['cancel_url'])
except requests.exceptions.ConnectionError as errc:
logger.error("Error Connecting:", errc)
return HttpResponseRedirect(request.session['cancel_url'])
except requests.exceptions.Timeout as errt:
logger.error("Timeout Error:", errt)
return HttpResponseRedirect(request.session['cancel_url'])
except requests.exceptions.RequestException as err:
logger.error("OOps: Something Else", err)
return HttpResponse(request.session['cancel_url'])
session_id_resp_message = session_id_request.text
if session_id_resp_message[:2] == "NO":
logger.debug("YUP: Unable to provide a session with %s as Merchand ID" % (yup['merchant_id']))
logger.debug("YUP: SERVER ERR TEXT is : %s" % session_id_resp_message)
return HttpResponse("Error, YUP: Unable to provide a session with %s as Merchand ID; Please check and restart" % (yup['merchant_id']))
else:
logger.debug("YUP: Session ID OK; ")
session_id = session_id_resp_message.replace('OK:', '')
payments_conf = getattr(settings, 'PAYMENTS', None)
if payments_conf:
conf = request.session['payment_conf']
path = payments_conf[conf]['after']
else:
path = getattr(settings, 'MOMO_AFTER_CASH_OUT')
with transaction.atomic(using='wallets'):
try:
momo_tx = MoMoTransaction.objects.using('wallets').get(object_id=object_id)
except MoMoTransaction.DoesNotExist:
momo_tx = MoMoTransaction.objects.using('wallets').create(service_id=service.id, type=MoMoTransaction.CASH_OUT,
phone=phone, amount=amount, model=model_name,
object_id=object_id, wallet=YUP, username=username,
callback=path)
except MoMoTransaction.MultipleObjectsReturned:
momo_tx = MoMoTransaction.objects.using('wallets').filter(object_id=object_id)[0]
request.session['tx_id'] = momo_tx.id
accept_url = request.session['return_url']
# accept_url += '/%d' % momo_tx.id
company_name = slugify(service.config.company_name).replace('-', ' ')
logger.debug("YUP: Initiating paymentof %dF with %s as Merchand ID" % (amount, yup['merchant_id']))
context = {
'api_url': api_url,
'sessionid': session_id,
'merchantid': yup['merchant_id'],
'amount': amount,
'currency': CURRENCY,
'purchaseref': object_id,
'phone': phone,
'brand': company_name,
'description': '',
'declineurl': request.session['cancel_url'],
'cancelurl': request.session['cancel_url'],
'accepturl': accept_url,
'text': '',
'language': language
}
return render(request, 'billing/yup/do_redirect.html', context)
def yup_process_notification(request, *args, **kwargs):
logger.debug("YUP: New incoming notification %s" % request.META['REQUEST_URI'])
amount = request.GET['amount']
object_id = request.GET['purchaseref']
paymentref = request.GET['paymentref']
error_text = request.GET.get('error')
status = request.GET['status']
try:
tx = MoMoTransaction.objects.using('wallets').get(object_id=object_id)
except:
logger.error("YUP: Failure while querying transaction status", exc_info=True)
return HttpResponse("OK")
logger.debug("YUP: Successful payment of %dF from %s" % (tx.amount, tx.username))
if status == "OK":
path = tx.callback
momo_after_checkout = import_by_path(path)
with transaction.atomic(using='wallets'):
try:
with transaction.atomic():
MoMoTransaction.objects.using('wallets').filter(object_id=object_id) \
.update(processor_tx_id=paymentref, message='OK', is_running=False,
status=MoMoTransaction.SUCCESS)
except:
logger.error("YUP: Could not mark transaction as Successful. User: %s, Amt: %d" % (tx.username, tx.amount), exc_info=True)
else:
try:
momo_after_checkout(request, transaction=tx)
except:
MoMoTransaction.objects.using('wallets').filter(object_id=object_id) \
.update(message=traceback.format_exc())
logger.error("YUP: Error while running callback. User: %s, Amt: %d" % (tx.username, tx.amount), exc_info=True)
elif error_text != 'AUTHENTICATION':
with transaction.atomic(using='wallets'):
try:
if "CANCEL" in error_text:
logger.debug("YUP: transaction canceled. User: %s, Amt: %d " % (tx.username, tx.amount))
MoMoTransaction.objects.using('wallets').filter(object_id=object_id) \
.update(message=error_text, is_running=False, status=MoMoTransaction.DROPPED)
else:
logger.debug("YUP: transaction failed. User: %s, Amt: %d " % (tx.username, tx.amount))
MoMoTransaction.objects.using('wallets').filter(object_id=object_id) \
.update(message=error_text, is_running=False, status=MoMoTransaction.FAILURE)
except:
logger.error("YUP: Could not mark transaction as Failed or Canceled. User: %s, Amt: %s" % (tx.username, tx.amount), exc_info=True)
return HttpResponse('OK')
| [
"[email protected]"
] | |
9e17efeaae7712f632dfc951b8c4faccf09300ea | 3a85089c2498ff04d1b9bce17a4b8bf6cf2380c9 | /EventFilter/Cosmics/python/__init__.py | 8de5e10f583a8354f7bdce130bf75b64b564ba0f | [] | no_license | sextonkennedy/cmssw-ib | c2e85b5ffa1269505597025e55db4ffee896a6c3 | e04f4c26752e0775bd3cffd3a936b288ee7b0268 | HEAD | 2016-09-01T20:09:33.163593 | 2013-04-26T12:05:17 | 2013-04-29T16:40:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | #Automatically created by SCRAM
import os
__path__.append(os.path.dirname(os.path.abspath(__file__).rsplit('/EventFilter/Cosmics/',1)[0])+'/cfipython/slc6_amd64_gcc480/EventFilter/Cosmics')
| [
"[email protected]"
] | |
5eee522f1c71624e222b152f905e6ca8a07c2df5 | b06bceb8fdc24e0c890fb2201c535cb660a94f86 | /pretrain_module/mbart_deprecated.py | ac4b7e85270d93661fd4ae3d1554b48ad9738826 | [
"MIT"
] | permissive | quanpn90/NMTGMinor | 7f294b40763b3f586d34ef4985799b851052f2ed | 5e1e424d0d9c2135a456e372a2ea9ee49de5bd2c | refs/heads/master | 2023-08-22T14:53:31.420276 | 2023-08-21T08:26:49 | 2023-08-21T08:26:49 | 116,663,163 | 92 | 39 | NOASSERTION | 2023-07-31T15:07:35 | 2018-01-08T10:33:56 | HTML | UTF-8 | Python | false | false | 8,832 | py | from .modeling_bart import MBartAttention
class MBartCrossAttentionSlow(MBartAttention):
def convert_fast_attention(self):
pass
def forward(
self,
hidden_states: torch.Tensor,
key_value_states: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
output_attentions: bool = False,
lang=None, atb=None,
incremental=False, incremental_cache=None, **kwargs
) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
"""Input shape: Batch x Time x Channel"""
# if key_value_states are provided this layer is used as a cross-attention layer
# for the decoder
# is_cross_attention = key_value_states is not None
assert key_value_states is not None
bsz, tgt_len, embed_dim = hidden_states.size()
# get query proj
query_states = self.q_proj(hidden_states) * self.scaling
if incremental and ('c_k' in incremental_cache and 'c_v' in incremental_cache):
# these are stored
key_states = incremental_cache['c_k']
value_states = incremental_cache['c_v']
else:
key_states = self.k_proj(key_value_states)
value_states = self.v_proj(key_value_states)
if incremental:
incremental_cache['c_k'] = key_states
incremental_cache['c_v'] = value_states
# reshape into B x H x T x D ?
key_states = self._shape(key_states, -1, bsz)
value_states = self._shape(value_states, -1, bsz)
proj_shape = (bsz * self.num_heads, -1, self.head_dim)
query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape)
key_states = key_states.view(*proj_shape)
value_states = value_states.view(*proj_shape)
src_len = key_states.size(1)
attn_weights = torch.bmm(query_states, key_states.transpose(1, 2))
if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len):
raise ValueError(
f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {attn_weights.size()}"
)
if attention_mask is not None:
if attention_mask.size() != (bsz, 1, tgt_len, src_len):
raise ValueError(
f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}"
)
attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
attn_weights = nn.functional.softmax(attn_weights, dim=-1)
if output_attentions:
# this operation is a bit awkward, but it's required to
# make sure that attn_weights keeps its gradient.
# In order to do so, attn_weights have to be reshaped
# twice and have to be reused in the following
attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len)
else:
attn_weights_reshaped = None
attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training)
attn_output = torch.bmm(attn_probs, value_states)
if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim):
raise ValueError(
f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {attn_output.size()}"
)
attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim)
attn_output = attn_output.transpose(1, 2)
attn_output = attn_output.reshape(bsz, tgt_len, embed_dim)
attn_output = self.out_proj(attn_output)
return attn_output, attn_weights_reshaped, incremental_cache
class MBartAutoRegressiveSelfAttentionSLow(MBartAttention):
def convert_fast_attention(self):
pass
def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int):
return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous()
def forward(
self,
hidden_states: torch.Tensor,
key_value_states: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
layer_head_mask: Optional[torch.Tensor] = None,
output_attentions: bool = False,
incremental=False, incremental_cache=None, **kwargs
) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
"""Input shape: Batch x Time x Channel"""
# if key_value_states are provided this layer is used as a cross-attention layer
# for the decoder
# is_cross_attention = key_value_states is not None
assert key_value_states is None
bsz, tgt_len, embed_dim = hidden_states.size()
# get query proj
query_states = self.q_proj(hidden_states) * self.scaling
key_states = self.k_proj(hidden_states)
value_states = self.v_proj(hidden_states)
if incremental:
if 'k' in incremental_cache and 'v' in incremental_cache:
key_states = torch.cat([incremental_cache['k'], key_states], dim=1) # time first
value_states = torch.cat([incremental_cache['v'], value_states], dim=1) # time first
incremental_cache['k'] = key_states
incremental_cache['v'] = value_states
else:
incremental_cache['k'] = key_states
incremental_cache['v'] = value_states
# reshape into B x H x T x D ?
key_states = self._shape(key_states, -1, bsz)
value_states = self._shape(value_states, -1, bsz)
proj_shape = (bsz * self.num_heads, -1, self.head_dim)
query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape)
key_states = key_states.view(*proj_shape)
value_states = value_states.view(*proj_shape)
src_len = key_states.size(1)
attn_weights = torch.bmm(query_states, key_states.transpose(1, 2))
if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len):
raise ValueError(
f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {attn_weights.size()}"
)
if attention_mask is not None:
if attention_mask.size() != (bsz, 1, tgt_len, src_len):
raise ValueError(
f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}"
)
attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
attn_weights = nn.functional.softmax(attn_weights, dim=-1)
if layer_head_mask is not None:
if layer_head_mask.size() != (self.num_heads,):
raise ValueError(
f"Head mask for a single layer should be of size {(self.num_heads,)}, but is {layer_head_mask.size()}"
)
attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
if output_attentions:
# this operation is a bit awkward, but it's required to
# make sure that attn_weights keeps its gradient.
# In order to do so, attn_weights have to be reshaped
# twice and have to be reused in the following
attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len)
else:
attn_weights_reshaped = None
attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training)
attn_output = torch.bmm(attn_probs, value_states)
if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim):
raise ValueError(
f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {attn_output.size()}"
)
attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim)
attn_output = attn_output.transpose(1, 2)
attn_output = attn_output.reshape(bsz, tgt_len, embed_dim)
attn_output = self.out_proj(attn_output)
return attn_output, attn_weights_reshaped, incremental_cache | [
"[email protected]"
] | |
6cc0b40552a7b84b67654c5343748b10becaba83 | 8997a0bf1e3b6efe5dd9d5f307e1459f15501f5a | /qbittorrent_examples/common.py | ddc95e8e8fe8667135cad88bfda306fb07fca849 | [
"CC-BY-4.0"
] | permissive | stepik/SimplePyScripts | 01092eb1b2c1c33756427abb2debbd0c0abf533f | 3259d88cb58b650549080d6f63b15910ae7e4779 | refs/heads/master | 2023-05-15T17:35:55.743164 | 2021-06-11T22:59:07 | 2021-06-11T22:59:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,358 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
import sys
from typing import List, Dict
from pathlib import Path
# pip install tabulate
from tabulate import tabulate
# pip install python-qbittorrent
from qbittorrent import Client
from config import IP_HOST, USER, PASSWORD
sys.path.append(str(Path(__file__).resolve().parent.parent))
from human_byte_size import sizeof_fmt
def print_table(rows: List[List[str]], headers: List[str], show_index=True):
if show_index:
show_index = range(1, len(rows) + 1)
text = tabulate(rows, headers=headers, tablefmt="grid", showindex=show_index)
print(text)
def print_files_table(files: List[Dict]):
rows = [(file['name'], sizeof_fmt(file['size'])) for file in sorted(files, key=lambda x: x['name'])]
headers = ['#', 'File Name', 'Size']
print_table(rows, headers)
def print_torrents(torrents: List[Dict]):
total_size = 0
for i, torrent in enumerate(torrents, 1):
torrent_size = torrent['total_size']
total_size += torrent_size
print(f"{i:3}. {torrent['name']} ({sizeof_fmt(torrent_size)})")
print()
print(f'Total torrents: {len(torrents)}, total size: {sizeof_fmt(total_size)} ({total_size} bytes)')
def get_client() -> Client:
client = Client(IP_HOST)
client.login(USER, PASSWORD)
return client
| [
"[email protected]"
] | |
c589a73bdb953c385df8a3734ad9b98afacc6e90 | 72839718a4b47b1babd4ad895ecd503a0a0e14d2 | /stembot/executor/ticket.py | fff953e6803693a1039328f71b3155b6a80807ac | [
"MIT"
] | permissive | phnomcobra/stembot-python | 6fb0d9a2874fc1bb8b8e5cf69e9f4d39c38dba5a | 497dd782556d62eeb9e9301f9de37332d93207d7 | refs/heads/master | 2021-06-17T10:56:33.148454 | 2021-02-23T00:58:00 | 2021-02-23T00:58:00 | 174,921,135 | 0 | 0 | MIT | 2021-02-07T03:48:14 | 2019-03-11T03:44:21 | Python | UTF-8 | Python | false | false | 13,314 | py | #!/usr/bin/python3
ASYNC_TICKET_TIMEOUT = 3600
SYNC_TICKET_TIMEOUT = 15
import traceback
from base64 import b64encode, b64decode
from time import time, sleep
from threading import Thread, Timer
from stembot.dao.ramdocument import Collection as RAMCollection
from stembot.dao.document import Collection as SQLCollection
from stembot.adapter.agent import MPIClient
from stembot.model.peer import create_peer
from stembot.model.peer import delete_peer
from stembot.model.peer import delete_peers
from stembot.model.peer import get_peers
from stembot.model.peer import get_routes
from stembot.model import kvstore
from stembot.adapter.python import interpret
from stembot.adapter.file import create_file_handle
from stembot.adapter.file import close_file_handle
from stembot.adapter.file import file_handle_read
from stembot.adapter.file import file_handle_write
from stembot.adapter.file import file_handle_seek
from stembot.adapter.file import file_handle_tell
from stembot.adapter.file import file_handle_truncate
from stembot.adapter.process import create_process_handle
from stembot.adapter.process import process_handle_status
from stembot.adapter.process import process_handle_kill
from stembot.adapter.process import process_handle_terminate
from stembot.adapter.process import process_handle_wait
from stembot.adapter.process import process_handle_recv
from stembot.adapter.process import process_handle_send
from stembot.adapter.process import close_process_handle
from stembot.executor.cascade import create_cascade_request
from stembot.executor.cascade import create_anonymous_cascade_request
from stembot.executor.cascade import get_cascade_responses
from stembot.executor.cascade import pop_cascade_responses
from stembot.executor.cascade import wait_on_cascade_responses
from stembot.executor.counters import increment as ctr_increment
from stembot.executor.counters import get_all as ctr_get_all
from stembot.executor.timers import register_timer
def create_ticket(request):
ctr_increment('tickets created')
tickets = RAMCollection('tickets')
ticket = tickets.get_object()
ticket.object['src'] = kvstore.get(name='agtuuid')
if 'dest' in request:
ticket.object['dest'] = request['dest']
else:
ticket.object['dest'] = kvstore.get(name='agtuuid')
ticket.object['timestamp'] = time()
ticket.object['request'] = request
ticket.object['response'] = None
ticket.set()
message = {}
message['type'] = 'ticket request'
message['src'] = ticket.object['src']
message['request'] = ticket.object['request']
message['dest'] = ticket.object['dest']
message['tckuuid'] = ticket.object['objuuid']
return message
def process_ticket(message):
ctr_increment('tickets processed')
message['type'] = 'ticket response'
message['src'], message['dest'] = message['dest'], message['src']
request = message['request']
response = {}
try:
if request['type'] == 'discover peer':
if 'ttl' in request:
ttl = request['ttl']
else:
ttl = None
if 'polling' in request:
polling = request['polling']
else:
request = False
create_peer(
MPIClient(
request['url'],
kvstore.get(name='secret_digest')
).send_json({'type': 'create info event'})['dest'],
url=request['url'],
ttl=ttl,
polling=polling
)
response = request
elif request['type'] == 'create peer':
if 'url' in request:
url = request['url']
else:
url = None
if 'ttl' in request:
ttl = request['ttl']
else:
ttl = None
if 'polling' in request:
polling = request['polling']
else:
polling = False
create_peer(
request['agtuuid'],
url=url,
ttl=ttl,
polling=polling
)
response = request
elif request['type'] == 'delete peers':
delete_peers()
response = request
elif request['type'] == 'delete peer':
delete_peer(request['agtuuid'])
response = request
elif request['type'] == 'get peers':
response = get_peers()
elif request['type'] == 'get routes':
response = get_routes()
elif request['type'] == 'get counters':
response = ctr_get_all()
elif request['type'] == 'file handle open':
response['fhduuid'] = create_file_handle(
request['filename'],
request['mode']
)
response['type'] = request['type']
elif request['type'] == 'file handle close':
close_file_handle(request['fhduuid'])
response = request
elif request['type'] == 'file handle read':
if 'size' in request:
response['b64data'] = b64encode(
file_handle_read(
request['fhduuid'],
request['size']
)
).decode()
else:
response['b64data'] = b64encode(
file_handle_read(
request['fhduuid']
)
).decode()
response['type'] = request['type']
elif request['type'] == 'file handle write':
file_handle_write(
request['fhduuid'],
b64decode(request['b64data'])
)
response = request
elif request['type'] == 'file handle truncate':
file_handle_truncate(request['fhduuid'], request['size'])
response = request
elif request['type'] == 'file handle seek':
file_handle_seek(request['fhduuid'], request['position'])
response = request
elif request['type'] == 'file handle tell':
response['position'] = file_handle_tell(request['fhduuid'])
response['type'] = request['type']
elif request['type'] == 'process handle create':
response['phduuid'] = create_process_handle(request['command'])
response['type'] = request['type']
elif request['type'] == 'process handle status':
response['status'] = process_handle_status(request['phduuid'])
elif request['type'] == 'process handle kill':
process_handle_kill(request['phduuid'])
response = request
elif request['type'] == 'process handle terminate':
process_handle_terminate(request['phduuid'])
response = request
elif request['type'] == 'process handle wait':
process_handle_wait(request['phduuid'])
response = request
elif request['type'] == 'process handle close':
close_process_handle(request['phduuid'])
response = request
elif request['type'] == 'process handle send':
process_handle_send(request['phduuid'], b64decode(request['b64data']))
response = request
elif request['type'] == 'process handle recv':
stdout, stderr = process_handle_recv(request['phduuid'])
response['stdout b64data'] = b64encode(stdout).decode()
response['stderr b64data'] = b64encode(stderr).decode()
response['type'] = request['type']
elif request['type'] == 'create cascade async':
response = create_cascade_request(request)
elif request['type'] == 'create cascade anon':
create_anonymous_cascade_request(request)
response = request
elif request['type'] == 'create cascade sync':
if 'timeout' in request:
response = wait_on_cascade_responses(
create_cascade_request(request)['cscuuid'],
request['timeout']
)
else:
response = wait_on_cascade_responses(
create_cascade_request(request)['cscuuid']
)
elif request['type'] == 'get cascade responses':
response = get_cascade_responses(request['cscuuid'])
elif request['type'] == 'pull cascade responses':
response = pop_cascade_responses(request['cscuuid'])
elif request['type'] == 'delete collection':
SQLCollection(request['name']).destroy()
response = request
elif request['type'] == 'rename collection':
SQLCollection(request['name']).rename(request['new name'])
response = request
elif request['type'] == 'create collection attribute':
SQLCollection(request['name']).create_attribute(
request['attribute'],
request['path']
)
response = request
elif request['type'] == 'delete collection attribute':
SQLCollection(request['name']).delete_attribute(request['attribute'])
response = request
elif request['type'] == 'find collection objects':
response = []
for temp in SQLCollection(request['name']).find(**request['query']):
response.append(temp.object)
elif request['type'] == 'find collection object uuids':
response = SQLCollection(request['name']).find_objuuids(**request['query'])
elif request['type'] == 'get collection object':
if 'objuuid' in request:
response = SQLCollection(request['name']).get_object(request['objuuid']).object
else:
response = SQLCollection(request['name']).get_object().object
elif request['type'] == 'set collection object':
response = request
c = SQLCollection(request['name'])
o = c.get_object(request['object']['objuuid'])
o.object = request['object']
o.set()
elif request['type'] == 'delete collection object':
response = request
SQLCollection(request['name']).get_object(request['objuuid']).destroy()
elif request['type'] == 'list collection object uuids':
response = SQLCollection(request['name']).list_objuuids()
elif request['type'] == 'ping':
response = request
elif request['type'] == 'execute python':
response['status'], response['stdout'], response['stderr'] = interpret(request['body'])
else:
raise Exception('Unknown request type!')
except:
response['exception'] = traceback.format_exc()
message['response'] = response
return message
def service_ticket(message):
ctr_increment('tickets serviced')
tickets = RAMCollection('tickets')
ticket = tickets.get_object(message['tckuuid'])
ticket.object['response'] = message['response']
ticket.set()
def wait_on_ticket_response(tckuuid, timeout=None):
tickets = RAMCollection('tickets')
if timeout == None:
timeout = SYNC_TICKET_TIMEOUT
while True:
ticket = tickets.get_object(tckuuid)
if time() - ticket.object['timestamp'] > timeout:
ticket.destroy()
raise Exception('Ticket timeout period reached!')
if ticket.object['response'] != None:
response = ticket.object['response']
ticket.destroy()
break
sleep(1.0)
return response
def get_ticket_response(tckuuid):
tickets = RAMCollection('tickets')
ticket = tickets.get_object(tckuuid)
response = ticket.object['response']
return response
def delete_ticket(tckuuid):
RAMCollection('tickets').get_object(tckuuid).destroy()
def worker():
tickets = RAMCollection('tickets')
for objuuid in tickets.list_objuuids():
ticket = tickets.get_object(objuuid)
try:
if time() - ticket.object['timestamp'] > ASYNC_TICKET_TIMEOUT:
ticket.destroy()
ctr_increment('tickets expired')
except:
ticket.destroy()
register_timer(
name='ticket_worker',
target=worker,
timeout=ASYNC_TICKET_TIMEOUT
).start()
Thread(target=worker).start()
| [
"[email protected]"
] | |
69192c6ab4ee2b552ad6a32cd7ad4ec54844ebd7 | a4e187eb26c926a72ee260d3eb4f07a57eb31af0 | /src/aceinna/devices/openrtk/lan_provider.py | 8ea2bf70d39ddc9359b6154897157531eb45a6e2 | [
"Apache-2.0"
] | permissive | BrunoScaglione/python-openimu | 2cab6386a65dba3676b152ba4ed07e3579e47aa4 | 5653fad05b735a26c44e46c4ee023137e621e58e | refs/heads/master | 2023-06-04T08:36:30.982960 | 2021-05-07T09:09:12 | 2021-05-07T09:09:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,756 | py | import os
import time
import json
import datetime
import threading
import math
import re
from ..widgets import (
NTRIPClient, LanDataLogger, LanDebugDataLogger, LanRTCMDataLogger
)
from ...framework.utils import (
helper, resource
)
from ...framework.context import APP_CONTEXT
from ..base.provider_base import OpenDeviceBase
from ..configs.openrtk_predefine import (
APP_STR, get_openrtk_products, get_configuratin_file_mapping
)
from ..decorator import with_device_message
from ..parsers.open_field_parser import encode_value
from ...framework.utils.print import print_yellow
class Provider(OpenDeviceBase):
'''
OpenRTK LAN provider
'''
def __init__(self, communicator, *args):
super(Provider, self).__init__(communicator)
self.type = 'RTK'
self.server_update_rate = 100
self.sky_data = []
self.pS_data = []
self.app_config_folder = ''
self.device_info = None
self.app_info = None
self.parameters = None
self.setting_folder_path = None
self.data_folder = None
self.debug_serial_port = None
self.rtcm_serial_port = None
self.user_logf = None
self.debug_logf = None
self.rtcm_logf = None
self.debug_c_f = None
self.enable_data_log = False
self.is_app_matched = False
self.ntrip_client_enable = False
self.nmea_buffer = []
self.nmea_sync = 0
self.prepare_folders()
self.ntripClient = None
self.connected = True
self.rtk_log_file_name = ''
def prepare_folders(self):
'''
Prepare folders for data storage and configuration
'''
executor_path = resource.get_executor_path()
setting_folder_name = 'setting'
data_folder_path = os.path.join(executor_path, 'data')
if not os.path.isdir(data_folder_path):
os.makedirs(data_folder_path)
self.data_folder = data_folder_path
# copy contents of app_config under executor path
self.setting_folder_path = os.path.join(
executor_path, setting_folder_name, 'openrtk')
all_products = get_openrtk_products()
config_file_mapping = get_configuratin_file_mapping()
for product in all_products:
product_folder = os.path.join(self.setting_folder_path, product)
if not os.path.isdir(product_folder):
os.makedirs(product_folder)
for app_name in all_products[product]:
app_name_path = os.path.join(product_folder, app_name)
app_name_config_path = os.path.join(
app_name_path, config_file_mapping[product])
if not os.path.isfile(app_name_config_path):
if not os.path.isdir(app_name_path):
os.makedirs(app_name_path)
app_config_content = resource.get_content_from_bundle(
setting_folder_name, os.path.join(product, app_name, config_file_mapping[product]))
if app_config_content is None:
continue
with open(app_name_config_path, "wb") as code:
code.write(app_config_content)
def bind_device_info(self, device_access, device_info, app_info):
self._build_device_info(device_info)
self._build_app_info(app_info)
self.connected = True
self._device_info_string = '# Connected {0} with LAN #\n\rDevice: {1} \n\rFirmware: {2}'\
.format('OpenRTK', device_info, app_info)
return self._device_info_string
def _build_device_info(self, text):
'''
Build device info
'''
split_text = text.split(' ')
sn = split_text[4]
# remove the prefix of SN
if sn.find('SN:') == 0:
sn = sn[3:]
self.device_info = {
'name': split_text[0],
'imu': split_text[1],
'pn': split_text[2],
'firmware_version': split_text[3],
'sn': sn
}
def _build_app_info(self, text):
'''
Build app info
'''
app_version = text
split_text = app_version.split(' ')
app_name = next(
(item for item in APP_STR if item in split_text), None)
if not app_name:
app_name = 'RTK_INS'
self.is_app_matched = False
else:
self.is_app_matched = True
self.app_info = {
'app_name': app_name,
'version': text
}
def load_properties(self):
# Load config from user working path
local_config_file_path = os.path.join(os.getcwd(), 'openrtk.json')
if os.path.isfile(local_config_file_path):
with open(local_config_file_path) as json_data:
self.properties = json.load(json_data)
return
# Load the openimu.json based on its app
product_name = self.device_info['name']
app_name = self.app_info['app_name']
app_file_path = os.path.join(
self.setting_folder_path, product_name, app_name, 'openrtk.json')
with open(app_file_path) as json_data:
self.properties = json.load(json_data)
if not self.is_app_matched:
print_yellow(
'Failed to extract app version information from unit.' +
'\nThe supported application list is {0}.'.format(APP_STR) +
'\nTo keep runing, use INS configuration as default.' +
'\nYou can choose to place your json file under execution path if it is an unknown application.')
def ntrip_client_thread(self):
self.ntripClient = NTRIPClient(self.properties, self.communicator)
self.ntripClient.run()
def after_setup(self):
set_user_para = self.cli_options and self.cli_options.set_user_para
self.ntrip_client_enable = self.cli_options and self.cli_options.ntrip_client
# with_raw_log = self.cli_options and self.cli_options.with_raw_log
if set_user_para:
result = self.set_params(
self.properties["initial"]["userParameters"])
##print('set user para {0}'.format(result))
if result['packetType'] == 'success':
self.save_config()
if self.ntrip_client_enable:
t = threading.Thread(target=self.ntrip_client_thread)
t.start()
try:
if self.data_folder is not None:
dir_time = time.strftime("%Y%m%d_%H%M%S", time.localtime())
file_time = time.strftime(
"%Y_%m_%d_%H_%M_%S", time.localtime())
file_name = self.data_folder + '/' + 'openrtk_log_' + dir_time
os.mkdir(file_name)
self.rtk_log_file_name = file_name
self.user_logf = open(
file_name + '/' + 'user_' + file_time + '.bin', "wb")
self.debug_logf = open(
file_name + '/' + 'debug_' + file_time + '.bin', "wb")
self.rtcm_logf = open(
file_name + '/' + 'rtcm_' + file_time + '.bin', "wb")
# start a thread to log data
threading.Thread(target=self.thread_data_log).start()
threading.Thread(target=self.thread_debug_data_log).start()
threading.Thread(target=self.thread_rtcm_data_log).start()
self.save_device_info()
except Exception as e:
print(e)
return False
def nmea_checksum(self, data):
data = data.replace("\r", "").replace("\n", "").replace("$", "")
nmeadata, cksum = re.split('\*', data)
calc_cksum = 0
for s in nmeadata:
calc_cksum ^= ord(s)
return int(cksum, 16), calc_cksum
def on_read_raw(self, data):
for bytedata in data:
if bytedata == 0x24:
self.nmea_buffer = []
self.nmea_sync = 0
self.nmea_buffer.append(chr(bytedata))
else:
self.nmea_buffer.append(chr(bytedata))
if self.nmea_sync == 0:
if bytedata == 0x0D:
self.nmea_sync = 1
elif self.nmea_sync == 1:
if bytedata == 0x0A:
try:
str_nmea = ''.join(self.nmea_buffer)
cksum, calc_cksum = self.nmea_checksum(
str_nmea)
if cksum == calc_cksum:
if str_nmea.find("$GPGGA") != -1:
if self.ntrip_client_enable and self.ntripClient != None:
self.ntripClient.send(str_nmea)
print(str_nmea, end='')
# else:
# print("nmea checksum wrong {0} {1}".format(cksum, calc_cksum))
except Exception as e:
# print('NMEA fault:{0}'.format(e))
pass
self.nmea_buffer = []
self.nmea_sync = 0
# if self.user_logf is not None:
# self.user_logf.write(data)
def thread_data_log(self, *args, **kwargs):
self.lan_data_logger = LanDataLogger(
self.properties, self.communicator, self.user_logf)
self.lan_data_logger.run()
def thread_debug_data_log(self, *args, **kwargs):
self.lan_debug_data_logger = LanDebugDataLogger(
self.properties, self.communicator, self.debug_logf)
self.lan_debug_data_logger.run()
def thread_rtcm_data_log(self, *args, **kwargs):
self.lan_rtcm_data_logger = LanRTCMDataLogger(
self.properties, self.communicator, self.rtcm_logf)
self.lan_rtcm_data_logger.run()
def on_receive_output_packet(self, packet_type, data, error=None):
'''
Listener for getting output packet
'''
# $GPGGA,080319.00,3130.4858508,N,12024.0998832,E,4,25,0.5,12.459,M,0.000,M,2.0,*46
if packet_type == 'gN':
if self.ntrip_client_enable:
# $GPGGA
gpgga = '$GPGGA'
# time
timeOfWeek = float(data['GPS_TimeofWeek']) - 18
dsec = int(timeOfWeek)
msec = timeOfWeek - dsec
sec = dsec % 86400
hour = int(sec / 3600)
minute = int(sec % 3600 / 60)
second = sec % 60
gga_time = format(hour*10000 + minute*100 +
second + msec, '09.2f')
gpgga = gpgga + ',' + gga_time
# latitude
latitude = float(data['latitude']) * 180 / 2147483648.0
if latitude >= 0:
latflag = 'N'
else:
latflag = 'S'
latitude = math.fabs(latitude)
lat_d = int(latitude)
lat_m = (latitude-lat_d) * 60
lat_dm = format(lat_d*100 + lat_m, '012.7f')
gpgga = gpgga + ',' + lat_dm + ',' + latflag
# longitude
longitude = float(data['longitude']) * 180 / 2147483648.0
if longitude >= 0:
lonflag = 'E'
else:
lonflag = 'W'
longitude = math.fabs(longitude)
lon_d = int(longitude)
lon_m = (longitude-lon_d) * 60
lon_dm = format(lon_d*100 + lon_m, '013.7f')
gpgga = gpgga + ',' + lon_dm + ',' + lonflag
# positionMode
gpgga = gpgga + ',' + str(data['positionMode'])
# svs
gpgga = gpgga + ',' + str(data['numberOfSVs'])
# hop
gpgga = gpgga + ',' + format(float(data['hdop']), '03.1f')
# height
gpgga = gpgga + ',' + \
format(float(data['height']), '06.3f') + ',M'
#
gpgga = gpgga + ',0.000,M'
# diffage
gpgga = gpgga + ',' + \
format(float(data['diffage']), '03.1f') + ','
# ckm
checksum = 0
for i in range(1, len(gpgga)):
checksum = checksum ^ ord(gpgga[i])
str_checksum = hex(checksum)
if str_checksum.startswith("0x"):
str_checksum = str_checksum[2:]
gpgga = gpgga + '*' + str_checksum + '\r\n'
print(gpgga)
if self.ntripClient != None:
self.ntripClient.send(gpgga)
return
elif packet_type == 'pS':
try:
if data['latitude'] != 0.0 and data['longitude'] != 0.0:
if self.pS_data:
if self.pS_data['GPS_Week'] == data['GPS_Week']:
if data['GPS_TimeofWeek'] - self.pS_data['GPS_TimeofWeek'] >= 0.2:
self.add_output_packet('pos', data)
self.pS_data = data
if data['insStatus'] >= 3 and data['insStatus'] <= 5:
ins_status = 'INS_INACTIVE'
if data['insStatus'] == 3:
ins_status = 'INS_SOLUTION_GOOD'
elif data['insStatus'] == 4:
ins_status = 'INS_SOLUTION_FREE'
elif data['insStatus'] == 5:
ins_status = 'INS_ALIGNMENT_COMPLETE'
ins_pos_type = 'INS_INVALID'
if data['insPositionType'] == 1:
ins_pos_type = 'INS_SPP'
elif data['insPositionType'] == 4:
ins_pos_type = 'INS_RTKFIXED'
elif data['insPositionType'] == 5:
ins_pos_type = 'INS_RTKFLOAT'
inspva = '#INSPVA,%s,%10.2f, %s, %s,%12.8f,%13.8f,%8.3f,%9.3f,%9.3f,%9.3f,%9.3f,%9.3f,%9.3f' %\
(data['GPS_Week'], data['GPS_TimeofWeek'], ins_status, ins_pos_type,
data['latitude'], data['longitude'], data['height'],
data['velocityNorth'], data['velocityEast'], data['velocityUp'],
data['roll'], data['pitch'], data['heading'])
print(inspva)
else:
self.add_output_packet('pos', data)
self.pS_data = data
else:
self.add_output_packet('pos', data)
self.pS_data = data
except Exception as e:
# print(e)
pass
elif packet_type == 'sK':
if self.sky_data:
if self.sky_data[0]['timeOfWeek'] == data[0]['timeOfWeek']:
self.sky_data.extend(data)
else:
self.add_output_packet('skyview', self.sky_data)
self.add_output_packet('snr', self.sky_data)
self.sky_data = []
self.sky_data.extend(data)
else:
self.sky_data.extend(data)
else:
output_packet_config = next(
(x for x in self.properties['userMessages']['outputPackets']
if x['name'] == packet_type), None)
if output_packet_config and output_packet_config.__contains__('from') \
and output_packet_config['from'] == 'imu':
self.add_output_packet('imu', data)
def do_write_firmware(self, firmware_content):
raise Exception('It is not supported by connecting device with LAN')
# rules = [
# InternalCombineAppParseRule('rtk', 'rtk_start:', 4),
# InternalCombineAppParseRule('sdk', 'sdk_start:', 4),
# ]
# parsed_content = firmware_content_parser(firmware_content, rules)
# user_port_num, port_name = self.build_connected_serial_port_info()
# sdk_port = port_name + str(int(user_port_num) + 3)
# sdk_uart = serial.Serial(sdk_port, 115200, timeout=0.1)
# if not sdk_uart.isOpen():
# raise Exception('Cannot open SDK upgrade port')
# upgrade_center = UpgradeCenter()
# upgrade_center.register(
# FirmwareUpgradeWorker(self.communicator, parsed_content['rtk']))
# upgrade_center.register(
# SDKUpgradeWorker(sdk_uart, parsed_content['sdk']))
# upgrade_center.on('progress', self.handle_upgrade_process)
# upgrade_center.on('error', self.handle_upgrade_error)
# upgrade_center.on('finish', self.handle_upgrade_complete)
# upgrade_center.start()
def get_device_connection_info(self):
return {
'modelName': self.device_info['name'],
'deviceType': self.type,
'serialNumber': self.device_info['sn'],
'partNumber': self.device_info['pn'],
'firmware': self.device_info['firmware_version']
}
def get_operation_status(self):
if self.is_logging:
return 'LOGGING'
return 'IDLE'
def save_device_info(self):
if not self.rtk_log_file_name or not self._device_info_string:
return
local_time = time.localtime()
formatted_file_time = time.strftime("%Y_%m_%d_%H_%M_%S", local_time)
file_path = os.path.join(
self.rtk_log_file_name,
'device_info_{0}.txt'.format(formatted_file_time)
)
with open(file_path, 'w') as outfile:
outfile.write(self._device_info_string)
# command list
def server_status(self, *args): # pylint: disable=invalid-name
'''
Get server connection status
'''
return {
'packetType': 'ping',
'data': {'status': '1'}
}
def get_device_info(self, *args): # pylint: disable=invalid-name
'''
Get device information
'''
return {
'packetType': 'deviceInfo',
'data': [
{'name': 'Product Name', 'value': self.device_info['name']},
{'name': 'IMU', 'value': self.device_info['imu']},
{'name': 'PN', 'value': self.device_info['pn']},
{'name': 'Firmware Version',
'value': self.device_info['firmware_version']},
{'name': 'SN', 'value': self.device_info['sn']},
{'name': 'App Version', 'value': self.app_info['version']}
]
}
def get_log_info(self):
'''
Build information for log
'''
return {
"type": self.type,
"model": self.device_info['name'],
"logInfo": {
"pn": self.device_info['pn'],
"sn": self.device_info['sn'],
"rtkProperties": json.dumps(self.properties)
}
}
def get_conf(self, *args): # pylint: disable=unused-argument
'''
Get json configuration
'''
return {
'packetType': 'conf',
'data': {
'outputs': self.properties['userMessages']['outputPackets'],
'inputParams': self.properties['userConfiguration']
}
}
@with_device_message
def get_params(self, *args): # pylint: disable=unused-argument
'''
Get all parameters
'''
has_error = False
parameter_values = []
if self.app_info['app_name'] == 'INS':
conf_parameters = self.properties['userConfiguration']
conf_parameters_len = len(conf_parameters)-1
step = 10
for i in range(2, conf_parameters_len, step):
start_byte = i
end_byte = i+step-1 if i+step < conf_parameters_len else conf_parameters_len
command_line = helper.build_packet(
'gB', [start_byte, end_byte])
result = yield self._message_center.build(command=command_line, timeout=2)
if result['error']:
has_error = True
break
parameter_values.extend(result['data'])
else:
command_line = helper.build_input_packet('gA')
result = yield self._message_center.build(command=command_line, timeout=3)
if result['error']:
has_error = True
parameter_values = result['data']
if not has_error:
self.parameters = parameter_values
yield {
'packetType': 'inputParams',
'data': parameter_values
}
yield {
'packetType': 'error',
'data': 'No Response'
}
@with_device_message
def get_param(self, params, *args): # pylint: disable=unused-argument
'''
Update paramter value
'''
command_line = helper.build_input_packet(
'gP', properties=self.properties, param=params['paramId'])
# self.communicator.write(command_line)
# result = self.get_input_result('gP', timeout=1)
result = yield self._message_center.build(command=command_line)
data = result['data']
error = result['error']
if error:
yield {
'packetType': 'error',
'data': 'No Response'
}
if data:
self.parameters = data
yield {
'packetType': 'inputParam',
'data': data
}
yield {
'packetType': 'error',
'data': 'No Response'
}
@with_device_message
def set_params(self, params, *args): # pylint: disable=unused-argument
'''
Update paramters value
'''
input_parameters = self.properties['userConfiguration']
grouped_parameters = {}
for parameter in params:
exist_parameter = next(
(x for x in input_parameters if x['paramId'] == parameter['paramId']), None)
if exist_parameter:
has_group = grouped_parameters.__contains__(
exist_parameter['category'])
if not has_group:
grouped_parameters[exist_parameter['category']] = []
current_group = grouped_parameters[exist_parameter['category']]
current_group.append(
{'paramId': parameter['paramId'], 'value': parameter['value'], 'type': exist_parameter['type']})
for group in grouped_parameters.values():
message_bytes = []
for parameter in group:
message_bytes.extend(
encode_value('int8', parameter['paramId'])
)
message_bytes.extend(
encode_value(parameter['type'], parameter['value'])
)
# print('parameter type {0}, value {1}'.format(
# parameter['type'], parameter['value']))
# result = self.set_param(parameter)
command_line = helper.build_packet(
'uB', message_bytes)
# for s in command_line:
# print(hex(s))
result = yield self._message_center.build(command=command_line)
packet_type = result['packet_type']
data = result['data']
if packet_type == 'error':
yield {
'packetType': 'error',
'data': {
'error': data
}
}
break
if data > 0:
yield {
'packetType': 'error',
'data': {
'error': data
}
}
break
yield {
'packetType': 'success',
'data': {
'error': 0
}
}
@with_device_message
def set_param(self, params, *args): # pylint: disable=unused-argument
'''
Update paramter value
'''
command_line = helper.build_input_packet(
'uP', properties=self.properties, param=params['paramId'], value=params['value'])
# self.communicator.write(command_line)
# result = self.get_input_result('uP', timeout=1)
result = yield self._message_center.build(command=command_line)
error = result['error']
data = result['data']
if error:
yield {
'packetType': 'error',
'data': {
'error': data
}
}
yield {
'packetType': 'success',
'data': {
'error': data
}
}
@with_device_message
def save_config(self, *args): # pylint: disable=unused-argument
'''
Save configuration
'''
command_line = helper.build_input_packet('sC')
# self.communicator.write(command_line)
# result = self.get_input_result('sC', timeout=2)
result = yield self._message_center.build(command=command_line, timeout=2)
data = result['data']
error = result['error']
if data:
yield {
'packetType': 'success',
'data': error
}
yield {
'packetType': 'success',
'data': error
}
@with_device_message
def reset_params(self, params, *args): # pylint: disable=unused-argument
'''
Reset params to default
'''
command_line = helper.build_input_packet('rD')
result = yield self._message_center.build(command=command_line, timeout=2)
error = result['error']
data = result['data']
if error:
yield {
'packetType': 'error',
'data': {
'error': error
}
}
yield {
'packetType': 'success',
'data': data
}
def upgrade_framework(self, params, *args): # pylint: disable=unused-argument
'''
Upgrade framework
'''
file = ''
if isinstance(params, str):
file = params
if isinstance(params, dict):
file = params['file']
# start a thread to do upgrade
if not self.is_upgrading:
self.is_upgrading = True
self._message_center.pause()
if self._logger is not None:
self._logger.stop_user_log()
thread = threading.Thread(
target=self.thread_do_upgrade_framework, args=(file,))
thread.start()
print("Upgrade OpenRTK firmware started at:[{0}].".format(
datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
return {
'packetType': 'success'
}
| [
"[email protected]"
] | |
4d066a1f3af37064dc6990b14a9a2e2baf54dc92 | 8f70b40ef1c657ee14accfe6e2f8b1ebb1bebb7e | /employeeform/migrations/0004_auto_20191206_1630.py | f84f3d3d85c39b082cf3985e9977f625ffe70444 | [] | no_license | TejashviVerma/School_ERP | e3d6f1aabe92167c2b55c0b1682dde505bb04edd | 11406da8b1d8701b7ea55f75c76f1cbf44a72c53 | refs/heads/master | 2023-08-03T15:10:11.481306 | 2020-09-13T18:02:40 | 2020-09-13T18:02:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 381 | py | # Generated by Django 2.2.5 on 2019-12-06 11:00
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('employeeform', '0003_auto_20191206_1619'),
]
operations = [
migrations.RenameField(
model_name='employeedocuments',
old_name='idProof',
new_name='IdProof',
),
]
| [
"[email protected]"
] | |
482a9c23b8b78c4c068f2a92b69400934aa9d8fd | 5f06ea565f6d0d555a0034de591c1948b925a7e7 | /blog/views.py | 1cae3ad2d74213e99b7c23fb9a3da2f424d190bb | [] | no_license | cement-hools/blog_by_molchanov | 82ef3385080320b74a1cd9c4c21446d8f0ae60e4 | da0a4c2c083c5c1da0d720a631ae1253792b32be | refs/heads/main | 2023-03-30T08:51:41.100697 | 2021-03-28T02:09:49 | 2021-03-28T02:09:49 | 350,162,121 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,702 | py | from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.paginator import Paginator
from django.db.models import Q
from django.shortcuts import render
from django.views.generic import View
from blog.forms import TagForm, PostForm
from blog.models import Post, Tag
from blog.utils import (ObjectDetailMixin, ObjectCreateMixin,
ObjectUpdateMixin, ObjectDelete)
OBJ_IN_PAGE = 3
def posts_list(request):
search_query = request.GET.get('search')
if search_query:
posts = Post.objects.filter(
Q(title__icontains=search_query) |
Q(body__icontains=search_query)
)
else:
posts = Post.objects.all()
paginator = Paginator(posts, OBJ_IN_PAGE)
page_number = request.GET.get('page', 1)
page = paginator.get_page(page_number)
is_paginated = page.has_other_pages()
if page.has_previous():
prev_url = f'?page={page.previous_page_number()}'
else:
prev_url = ''
if page.has_next():
next_url = f'?page={page.next_page_number()}'
else:
next_url = ''
context = {
'page_object': page,
'is_paginated': is_paginated,
'next_url': next_url,
'prev_url': prev_url,
}
return render(request, 'blog/index.html', context)
class PostDetail(ObjectDetailMixin, View):
model = Post
template = 'blog/post_detail.html'
class PostCreate(LoginRequiredMixin, ObjectCreateMixin, View):
model_form = PostForm
template = 'blog/post_create_form.html'
raise_exception = True
class PostUpdate(LoginRequiredMixin, ObjectUpdateMixin, View):
model = Post
model_form = PostForm
template = 'blog/post_update_form.html'
raise_exception = True
class PostDelete(LoginRequiredMixin, ObjectDelete, View):
model = Post
template = 'blog/post_delete_form.html'
redirect_url = 'posts_list_url'
raise_exception = True
class TagDetail(ObjectDetailMixin, View):
model = Tag
template = 'blog/tag_detail.html'
class TagCreate(LoginRequiredMixin, ObjectCreateMixin, View):
model_form = TagForm
template = 'blog/tag_create_form.html'
raise_exception = True
class TagUpdate(LoginRequiredMixin, ObjectUpdateMixin, View):
model = Tag
model_form = TagForm
template = 'blog/tag_update_form.html'
raise_exception = True
class TagDelete(LoginRequiredMixin, ObjectDelete, View):
model = Tag
template = 'blog/tag_delete_form.html'
redirect_url = 'tags_list_url'
raise_exception = True
def tags_list(request):
tags = Tag.objects.all()
context = {
'tags': tags,
}
return render(request, 'blog/tags_list.html', context)
| [
"[email protected]"
] | |
65da08b0f3c75f793eca363ec016e0441370c495 | a47ac7c64cb6bb1f181eadff8e4b24735c19080a | /PythonStudy/9-Tkinter/4-Entry.py | fc6d9a973f75667cf9bcbae7cca69b495df559b5 | [
"MIT"
] | permissive | CoderTitan/PythonDemo | 6dcc88496b181df959a9d43b963fe43a6e4cb032 | feb5ef8be91451b4622764027ac684972c64f2e0 | refs/heads/master | 2020-03-09T09:15:28.299827 | 2018-08-21T03:43:25 | 2018-08-21T03:43:25 | 128,708,650 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,321 | py | # 主窗口
import tkinter
# 验证输入的文字
def varileText():
text = entry4.get()
if text != '1':
print('对喽')
return True
print('错漏')
return False
#
def testInvaild():
print('invaildCommanf被调用')
return True
# 创建主窗口
window = tkinter.Tk()
# 设置标题
window.title('Titanjun')
# 设置窗口大小
window.geometry('400x400')
button = tkinter.Button(window, text='Titan', bg='#ff4040')
button.pack()
'''
输入控件
用于显示简单的文本内容
'''
vari = tkinter.Variable()
entry = tkinter.Entry(window, textvariable=vari)
entry.pack()
# 设置值
vari.set('very good')
# 取值
print(vari.get())
print(entry.get())
# 只读输入框
vari2 = tkinter.Variable()
entry2 = tkinter.Entry(window, textvariable=vari2, state='disabled')
entry2.pack()
# 设置值
vari2.set('very bad')
print(vari2.get())
# 密码输入框, 无论输入什么都显示密文
vari3 = tkinter.Variable()
entry3 = tkinter.Entry(window, textvariable=vari3, show='@', bg='red', fg='white')
entry3.pack()
# 验证输入的内容是否符合要求
vari4 = tkinter.Variable()
entry4 = tkinter.Entry(window, textvariable=vari4, validate='key', validatecommand=varileText, invalidcommand=testInvaild)
entry4.pack()
# 进入消息循环
window.mainloop()
| [
"[email protected]"
] | |
c7224b78c1a6f736145512b1515152716e084fb0 | 7a63ce94e1806a959c9c445c2e0bae95afb760c8 | /tests/user/test_update_credentials.py | 0525fc1882db4236ea941f77e653a698474a366a | [
"MIT"
] | permissive | pklauke/pycamunda | 20b54ceb4a40e836148e84912afd04d78d6ba0ec | 3faac4037212df139d415ee1a54a6594ae5e9ac5 | refs/heads/master | 2023-08-18T10:23:30.503737 | 2022-04-17T18:34:40 | 2022-04-17T18:34:40 | 240,333,835 | 40 | 16 | MIT | 2023-09-12T13:29:08 | 2020-02-13T18:37:25 | Python | UTF-8 | Python | false | false | 2,128 | py | # -*- coding: utf-8 -*-
import unittest.mock
import pytest
import pycamunda.base
import pycamunda.user
import pycamunda.resource
from tests.mock import raise_requests_exception_mock, not_ok_response_mock
def test_update_credentials_params(engine_url, update_credentials_input):
update_credentials = pycamunda.user.UpdateCredentials(
url=engine_url, **update_credentials_input
)
assert update_credentials.url == engine_url + '/user/janedoe/credentials'
assert update_credentials.query_parameters() == {}
assert update_credentials.body_parameters() == {
'password': 'password',
'authenticatedUserPassword': 'password'
}
@unittest.mock.patch('requests.Session.request')
def test_update_credentials_calls_requests(mock, engine_url, update_credentials_input):
update_credentials = pycamunda.user.UpdateCredentials(
url=engine_url, **update_credentials_input
)
update_credentials()
assert mock.called
assert mock.call_args[1]['method'].upper() == 'PUT'
@unittest.mock.patch('requests.Session.request', raise_requests_exception_mock)
def test_update_credentials_raises_pycamunda_exception(engine_url, update_credentials_input):
update_credentials = pycamunda.user.UpdateCredentials(
url=engine_url, **update_credentials_input
)
with pytest.raises(pycamunda.PyCamundaException):
update_credentials()
@unittest.mock.patch('requests.Session.request', not_ok_response_mock)
@unittest.mock.patch('pycamunda.base._raise_for_status')
def test_update_credentials_raises_for_status(mock, engine_url, update_credentials_input):
update_credentials = pycamunda.user.UpdateCredentials(
url=engine_url, **update_credentials_input
)
update_credentials()
assert mock.called
@unittest.mock.patch('requests.Session.request', unittest.mock.MagicMock())
def test_update_credentials_returns_none(engine_url, update_credentials_input):
update_credentials = pycamunda.user.UpdateCredentials(
url=engine_url, **update_credentials_input
)
result = update_credentials()
assert result is None
| [
"[email protected]"
] | |
9edb6fb910255cf29713ca49bd8f2e57d1186ea7 | a5aa3e80fe2e97cc9de3d42be873fdf468a68968 | /a10_openstack_lib/resources/a10_scaling_group.py | 312cd817ddd0ea9bc95f2784cacf72012e30ba03 | [
"Apache-2.0"
] | permissive | Cedev/a10-openstack-lib | 60911420f781db99f9d7456be5c4c707985c3c2d | 23c6a5ae2cfaeb5bb950e96be3a79c3b0e014247 | refs/heads/master | 2020-04-05T22:53:54.765410 | 2016-06-07T23:02:01 | 2016-06-07T23:02:01 | 61,076,970 | 0 | 0 | null | 2016-06-13T23:41:50 | 2016-06-13T23:41:49 | Python | UTF-8 | Python | false | false | 12,633 | py | # Copyright (C) 2016 A10 Networks Inc. All rights reserved.
EXTENSION = 'a10-scaling-group'
SERVICE = "A10_SCALING_GROUP"
SCALING_GROUPS = 'a10_scaling_groups'
SCALING_GROUP = 'a10_scaling_group'
SCALING_GROUP_WORKERS = 'a10_scaling_group_workers'
SCALING_GROUP_WORKER = 'a10_scaling_group_worker'
SCALING_POLICIES = 'a10_scaling_policies'
SCALING_POLICY = 'a10_scaling_policy'
SCALING_ALARMS = 'a10_scaling_alarms'
SCALING_ALARM = 'a10_scaling_alarm'
SCALING_ACTIONS = 'a10_scaling_actions'
SCALING_ACTION = 'a10_scaling_action'
ALARM_UNITS = ['count', 'percentage', 'bytes']
ALARM_AGGREGATIONS = ['avg', 'min', 'max', 'sum']
ALARM_MEASUREMENTS = ['connections', 'memory', 'cpu', 'interface']
ALARM_OPERATORS = ['>=', '>', '<=', '<']
ALARM_PERIOD_UNITS = ['minute', 'hour', 'day']
ACTIONS = ['scale-in', 'scale-out']
RESOURCE_ATTRIBUTE_MAP = {
SCALING_GROUPS: {
'id': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:uuid': None
},
'is_visible': True,
'primary_key': True
},
'tenant_id': {
'allow_post': True,
'allow_put': False,
'required_by_policy': True,
'is_visible': True
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True,
'default': ''
},
'description': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True,
'default': '',
},
'scaling_policy_id': {
'allow_post': True,
'allow_put': True,
'validate': {
'a10_type:nullable': {
'type:uuid': None,
'a10_type:reference': SCALING_POLICY
}
},
'is_visible': True,
'default': lambda attr: attr.ATTR_NOT_SPECIFIED
}
},
SCALING_GROUP_WORKERS: {
'id': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:uuid': None
},
'is_visible': True,
'primary_key': True
},
'tenant_id': {
'allow_post': True,
'allow_put': False,
'required_by_policy': True,
'is_visible': True
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True,
'default': ''
},
'description': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True,
'default': '',
},
'scaling_group_id': {
'allow_post': True,
'allow_put': False,
'validate': {
'type:uuid': None,
'a10_type:reference': SCALING_GROUP
},
'is_visible': True
},
'host': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True
},
'username': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True
},
'password': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': False
},
'api_version': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:values': ['2.1', '3.0']
},
'is_visible': True
},
'protocol': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:values': ['http', 'https']
},
'convert_to': lambda attr: convert_to_lower,
'is_visible': True,
'default': lambda attr: attr.ATTR_NOT_SPECIFIED
},
'port': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:range': [0, 65535]
},
'convert_to': lambda attr: attr.convert_to_int,
'is_visible': True,
'default': lambda attr: attr.ATTR_NOT_SPECIFIED
},
'nova_instance_id': {
'allow_post': False,
'allow_put': False,
'validate': {
'type:uuid': None
},
'is_visible': True,
'default': lambda attr: attr.ATTR_NOT_SPECIFIED
}
},
SCALING_POLICIES: {
'id': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:uuid': None
},
'is_visible': True,
'primary_key': True
},
'tenant_id': {
'allow_post': True,
'allow_put': False,
'required_by_policy': True,
'is_visible': True
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True,
'default': ''
},
'description': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True,
'default': '',
},
'cooldown': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:non_negative': None
},
'convert_to': lambda attr: attr.convert_to_int,
'is_visible': True,
'default': 300,
},
'min_instances': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:non_negative': None
},
'convert_to': lambda attr: attr.convert_to_int,
'is_visible': True,
'default': 1,
},
'max_instances': {
'allow_post': True,
'allow_put': True,
'validate': {
'a10_type:nullable': {
'type:non_negative': None
}
},
'convert_to': lambda attr: convert_nullable(attr.convert_to_int),
'is_visible': True,
'default': lambda attr: attr.ATTR_NOT_SPECIFIED
},
'reactions': {
'allow_post': True,
'allow_put': True,
'convert_list_to': lambda attr: attr.convert_kvp_list_to_dict,
'is_visible': True,
'default': lambda attr: attr.ATTR_NOT_SPECIFIED
}
},
SCALING_ALARMS: {
'id': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:uuid': None
},
'is_visible': True,
'primary_key': True
},
'tenant_id': {
'allow_post': True,
'allow_put': False,
'required_by_policy': True,
'is_visible': True
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True,
'default': ''
},
'description': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True,
'default': '',
},
'aggregation': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:values': ['avg', 'min', 'max', 'sum']
},
'is_visible': True,
'convert_to': lambda attr: convert_to_lower,
'default': 'avg'
},
'measurement': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:values': ['connections', 'memory', 'cpu', 'interface']
},
'convert_to': lambda attr: convert_to_lower,
'is_visible': True
},
'operator': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:values': ['>=', '>', '<=', '<']
},
'is_visible': True
},
'threshold': {
'allow_post': True,
'allow_put': True,
'validate': {
'a10_type:float': None
},
'convert_to': lambda attr: convert_to_float,
'is_visible': True
},
'unit': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:values': ['count', 'percentage', 'bytes']
},
'convert_to': lambda attr: convert_to_lower,
'is_visible': True
},
'period': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:non_negative': None
},
'convert_to': lambda attr: attr.convert_to_int,
'is_visible': True,
},
'period_unit': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:values': ['minute', 'hour', 'day']
},
'convert_to': lambda attr: convert_to_lower,
'is_visible': True
}
},
SCALING_ACTIONS: {
'id': {
'allow_post': False,
'allow_put': True,
'validate': {
'type:uuid': None
},
'is_visible': True,
'primary_key': True
},
'tenant_id': {
'allow_post': True,
'allow_put': False,
'required_by_policy': True,
'is_visible': True
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True,
'default': ''
},
'description': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:string': None
},
'is_visible': True,
'default': '',
},
'action': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:values': ['scale-in', 'scale-out']
},
'convert_to': lambda attr: convert_to_lower,
'is_visible': True
},
'amount': {
'allow_post': True,
'allow_put': True,
'validate': {
'type:non_negative': None
},
'convert_to': lambda attr: attr.convert_to_int,
'is_visible': True,
},
}
}
def convert_to_lower(input):
try:
return input.lower()
except AttributeError:
return input
def convert_to_float(input):
try:
return float(input)
except ValueError:
return input
def convert_nullable(convert_value):
def f(input):
if input is not None:
return convert_value(input)
return None
return f
def validate_float(data, options):
if not isinstance(data, float):
return "'%s' is not a number" % input
def validate_reference(data, options):
"""Referential integrity is enforced by the data model"""
return None
def validate_nullable(validators):
def f(data, options):
if data is not None:
for rule in options:
value_validator = validators[rule]
reason = value_validator(data, options[rule])
if reason:
return reason
return f
VALIDATORS = {
'a10_type:float': lambda validators: validate_float,
'a10_type:reference': lambda validators: validate_reference,
'a10_type:nullable': validate_nullable
}
| [
"[email protected]"
] | |
b2741fa2aa47d2ca507a4a587d78662b490be852 | b47f2e3f3298388b1bcab3213bef42682985135e | /experiments/jacobi-2d/tmp_files/4634.py | 598e8470565aa941811dde2f95b33c4baece406f | [
"BSD-2-Clause"
] | permissive | LoopTilingBenchmark/benchmark | 29cc9f845d323431e3d40e878cbfc6d1aad1f260 | 52a3d2e70216552a498fd91de02a2fa9cb62122c | refs/heads/master | 2020-09-25T09:45:31.299046 | 2019-12-04T23:25:06 | 2019-12-04T23:25:06 | 225,975,074 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 366 | py | from chill import *
source('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/polybench/polybench-code/stencils/jacobi-2d/kernel.c')
destination('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/experiments/jacobi-2d/tmp_files/4634.c')
procedure('kernel_jacobi_2d')
loop(0)
known(' n > 2 ')
tile(0,2,16,2)
tile(0,4,64,4)
tile(1,2,16,2)
tile(1,4,64,4)
| [
"[email protected]"
] | |
b85e6af344facb6e0df6e9ed8dff20da26f7144a | 10ddfb2d43a8ec5d47ce35dc0b8acf4fd58dea94 | /Python/merge-strings-alternately.py | 107572aa3949742adfc4813ca836790e9dbcd7cc | [
"MIT"
] | permissive | kamyu104/LeetCode-Solutions | f54822059405ef4df737d2e9898b024f051fd525 | 4dc4e6642dc92f1983c13564cc0fd99917cab358 | refs/heads/master | 2023-09-02T13:48:26.830566 | 2023-08-28T10:11:12 | 2023-08-28T10:11:12 | 152,631,182 | 4,549 | 1,651 | MIT | 2023-05-31T06:10:33 | 2018-10-11T17:38:35 | C++ | UTF-8 | Python | false | false | 471 | py | # Time: O(m + n)
# Space: O(1)
class Solution(object):
def mergeAlternately(self, word1, word2):
"""
:type word1: str
:type word2: str
:rtype: str
"""
result = []
i = 0
while i < len(word1) or i < len(word2):
if i < len(word1):
result.append(word1[i])
if i < len(word2):
result.append(word2[i])
i += 1
return "".join(result)
| [
"[email protected]"
] | |
d3d2478915380b6f8d4f5778c5babd647003d786 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-dataartsstudio/huaweicloudsdkdataartsstudio/v1/model/show_instance_result_response.py | 89a066b6d19712691fb0599b6d0fc736ad86c3d5 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 4,168 | py | # coding: utf-8
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ShowInstanceResultResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'count': 'int',
'resources': 'list[SubInstanceResult]'
}
attribute_map = {
'count': 'count',
'resources': 'resources'
}
def __init__(self, count=None, resources=None):
"""ShowInstanceResultResponse
The model defined in huaweicloud sdk
:param count: 总数量
:type count: int
:param resources: resources
:type resources: list[:class:`huaweicloudsdkdataartsstudio.v1.SubInstanceResult`]
"""
super(ShowInstanceResultResponse, self).__init__()
self._count = None
self._resources = None
self.discriminator = None
if count is not None:
self.count = count
if resources is not None:
self.resources = resources
@property
def count(self):
"""Gets the count of this ShowInstanceResultResponse.
总数量
:return: The count of this ShowInstanceResultResponse.
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this ShowInstanceResultResponse.
总数量
:param count: The count of this ShowInstanceResultResponse.
:type count: int
"""
self._count = count
@property
def resources(self):
"""Gets the resources of this ShowInstanceResultResponse.
resources
:return: The resources of this ShowInstanceResultResponse.
:rtype: list[:class:`huaweicloudsdkdataartsstudio.v1.SubInstanceResult`]
"""
return self._resources
@resources.setter
def resources(self, resources):
"""Sets the resources of this ShowInstanceResultResponse.
resources
:param resources: The resources of this ShowInstanceResultResponse.
:type resources: list[:class:`huaweicloudsdkdataartsstudio.v1.SubInstanceResult`]
"""
self._resources = resources
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowInstanceResultResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
990724591460f6a3454e06b1a3df500f07f90241 | 58ee1dc37b57e0b4f06cf383c6a9e0654f490150 | /python-tflearn-git/lilac.py | 5214cab05f245eed7f14892fa3df205c85351b16 | [] | no_license | MikeyBaldinger/arch4edu | f3af87ef3a8d4cd78fde7e0ef75658c17dbe8c06 | c1775bf7fe0ffc87f3c8b4109fb1e8acde12a430 | refs/heads/master | 2022-12-23T16:40:55.513537 | 2020-09-28T21:00:59 | 2020-09-28T21:00:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | #!/usr/bin/env python3
from lilaclib import *
maintainers = [{'github': 'petronny', 'email': 'Jingbei Li <[email protected]>'}]
update_on = [{'aur': None}, {'github': 'tflearn/tflearn'}, {'alias': 'python'}]
build_prefix = 'extra-x86_64'
pre_build = aur_pre_build
post_build = aur_post_build
if __name__ == '__main__':
single_main(build_prefix)
| [
"[email protected]"
] | |
54073a0a96169761ca6e309c1f572aa135b71df0 | 682319f56c17e949bab0d6e418838d33977dd760 | /RP/search_element.py | 6bddc659f268253cf4d1a9296c7704a8a0a4f81b | [] | no_license | DilipBDabahde/PythonExample | 8eb70773a783b1f4b6cf6d7fbd2dc1302af8aa1b | 669762a8d9ee81ce79416d74a4b6af1e2fb63865 | refs/heads/master | 2020-08-23T01:05:44.788080 | 2020-07-25T21:59:52 | 2020-07-25T21:59:52 | 216,511,985 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,431 | py | '''
.Write a program which accept N numbers from user and store it into List. Accept one another number from user and
return frequency of that number from List.
input: Num of elements: 12
input Elements: 5 8 6 8 5 9 3 7 2 21 1 5
Element to search = 5
output: Freq of search element is: 3
'''
def search_Element(arr, iNo):
if len(arr) < 0:
return -1;
icnt = 0; # icnt is counter variable which is used to increament it's value by One when we get our Element
for i in range(0, len(arr)):
if arr[i] == iNo:
icnt = icnt + 1;
return icnt;
def main():
arr_list = list(); # arr_list is object of list class , this object is used to add elements in it
size = input("Enter list size: ");
size = int(size); # type conversion of size variable str to int
print("Enter elements for list");
for i in range(0, size):
no = input("Enter element: ");
no = int(no); # type conversion
arr_list.append(no); # appending element to list class object
#now our list is created using loop iteration
print("Created list is: ",arr_list);
search_var = input("Enter number to search its freq:");
search_var = int(search_var);
result =search_Element(arr_list, search_var);
if result > 0 :
print("FReq of given variable in list is: ",result);
elif result == 0:
print("There is no element in list ");
else:
print("Invalid input");
if __name__ == "__main__":
main();
| [
"[email protected]"
] | |
34e6d9bd427d80013aeb40dfba6f4734f2d186e4 | e6bc1f55371786dad70313eb468a3ccf6000edaf | /Datasets/py-if-else/Correct/076.py | 2c07f238adcfd70b429c52cda3509dc1a5eb15ba | [] | no_license | prateksha/Source-Code-Similarity-Measurement | 9da92e3b22c372ed6ea54d8b6ab2c5921e8c41c0 | fb371b837917794d260a219a1ca09c46a5b15962 | refs/heads/master | 2023-01-04T07:49:25.138827 | 2020-10-25T14:43:57 | 2020-10-25T14:43:57 | 285,744,963 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178 | py | #!/bin/python3
import sys
N = int(input().strip())
if(N%2==0) :
if (N<5 or N>20):
print('Not Weird')
else :
print ("Weird")
else :
print ("Weird") | [
"[email protected]"
] | |
850985fddff858e55bfd488b48ba7aff47e39da6 | fbf73800e27f66960f677a284c2771e66708973b | /subreview_lib/classicreviewdecisionpage.py | dfbc360e28ba50b5a16d59e1c83ece7bce6d2c65 | [
"MIT"
] | permissive | allankellynet/mimas | 94140a341693d4729b3cdf5ea94ef2f7e550aad6 | 10025d43bba9e84f502a266760786842e7158a05 | refs/heads/master | 2022-05-30T21:35:06.083902 | 2020-02-27T14:04:27 | 2020-02-27T14:04:27 | 235,146,506 | 0 | 0 | MIT | 2022-05-25T04:56:13 | 2020-01-20T16:30:39 | Python | UTF-8 | Python | false | false | 3,568 | py | #-----------------------------------------------------
# Mimas: conference submission and review system
# (c) Allan Kelly 2016-2020 http://www.allankelly.net
# Licensed under MIT License, see LICENSE file
# -----------------------------------------------------
# System imports
# Google imports
import logging
from google.appengine.ext import ndb
# Local imports
import roundreviews
import basehandler
from submission_lib import submissionrecord
class ClassicReviewDecisionPage(basehandler.BaseHandler):
def make_page(self, crrt_conf):
review_round = int(self.request.get("round"))
tracks = crrt_conf.mapped_track_obects()
crrt_track = self.request.get("track", default_value=tracks.keys()[0])
submissions = self.sorted_submissions(crrt_conf, crrt_track, review_round)
template_values = {
'crrt_conf': crrt_conf,
"track_objects": tracks,
"crrt_track": crrt_track,
"submissions": submissions,
"submissions_len": len(submissions),
"decisions": submissionrecord.get_decision_summary(crrt_conf.key, crrt_track, review_round),
"decision_maker": crrt_conf.user_rights().has_decision_right_for_round(
self.get_crrt_user().email(), review_round),
"review_round": review_round,
"track_slots": crrt_conf.mapped_track_obects()[crrt_track].slots,
}
self.write_page('subreview_lib/classicreviewdecisionpage.html', template_values)
def sorted_submissions(self, crrt_conf, crrt_track, review_round):
submissions = submissionrecord.retrieve_conference_submissions_by_track_and_round(
crrt_conf.key, crrt_track, review_round)
if self.request.params.has_key("mean"):
sorted = submissionrecord.sort_submissions_by_mean_high_to_low(submissions, review_round)
else:
sorted = submissionrecord.sort_submissions_by_total_high_to_low(submissions, review_round)
return sorted
def get(self):
if not (self.session.has_key("crrt_conference")):
logging.debug("Conference key session variable missing")
return
crrt_conf = ndb.Key(urlsafe=self.session["crrt_conference"]).get()
self.make_page(crrt_conf)
def submit_decisions(self, review_round):
if not (self.session.has_key("crrt_conference")):
logging.debug("Conference key session variable missing")
return
roundreviews.submit_decisions(
ndb.Key(urlsafe=self.session["crrt_conference"]),
self.request.get("tracklist"),
review_round,
self.request)
def decline_no_decisions(self, review_round):
self.submit_decisions(review_round)
roundreviews.mass_track_change(
ndb.Key(urlsafe=self.session["crrt_conference"]),
self.request.get("tracklist"),
review_round,
"No decision",
"Decline")
def post(self):
review_round = int(self.request.get("review_round"))
if self.request.get("SubmitDecision"):
self.submit_decisions(review_round)
if self.request.get("DeclineNoDecisions"):
self.decline_no_decisions(review_round)
self.redirect("/classic_review_decisions?track=" +
self.request.get("tracklist") +
"&round=" + str(review_round))
| [
"[email protected]"
] | |
9ba77a93b34b31c7c184176444d94a568deb7688 | e4d4149a717d08979953983fa78fea46df63d13d | /Week5/Day1/XP.py | 2c661c7abdf2c8897ce0f26c7fbce353061e2d6f | [] | no_license | fayblash/DI_Bootcamp | 72fd75497a2484d19c779775c49e4306e602d10f | a4e8f62e338df5d5671fd088afa575ea2e290837 | refs/heads/main | 2023-05-05T20:55:31.513558 | 2021-05-27T06:48:40 | 2021-05-27T06:48:40 | 354,818,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,863 | py | # # Exercise 1: Cats
# # Instructions
# # Using this class
# class Cat:
# def __init__(self, name, age):
# self.name = name
# self.age = age
# def oldest_cat(cat_list):
# oldest_current=cat_list[0]
# for cat in cat_list:
# if cat.age>oldest_current.age:
# oldest_current=cat
# return oldest_current
# # Instantiate three Cat objects using the code provided above.
# c1=Cat("Roxy",3)
# c2=Cat("Meow",2)
# c3=Cat("Fluffy",4)
# # Outside of the class, create a function that finds the oldest cat and returns the cat.
# all_cats=[c1,c2,c3]
# oldest=oldest_cat(all_cats)
# print(f"{oldest.name} is the oldest cat and she is {oldest.age} years old.")
# # Print the following string: “The oldest cat is <cat_name>, and is <cat_age> years old.”. Use the function previously created.
# # Exercise 2 : Dogs
# # Instructions
# # Create a class called Dog.
# class Dog:
# def __init__(self,name,height):
# self.name=name
# self.height=height
# def bark(self):
# print(f"{self.name} goes woof!")
# # In this class, create an __init__ method that takes two parameters : name and height. This function instantiates two attributes, which values are the parameters.
# # Create a method called bark that prints the following string “<dog_name> goes woof!”.
# # Create a method called jump that prints the following string “<dog_name> jumps <x> cm high!”. x is the height*2.
# def jump(self):
# print(f"{self.name} jumps {self.height*2} cm")
# # Outside of the class, create an object called davids_dog. His dog’s name is “Rex” and his height is 50cm.
# davids_dog=Dog("Rex",50)
# print(davids_dog.name)
# print(davids_dog.height)
# davids_dog.bark()
# davids_dog.jump()
# # Print the details of his dog (ie. name and height) and call the methods bark and jump.
# # Create an object called sarahs_dog. Her dog’s name is “Teacup” and his height is 20cm.
# sarahs_dog=Dog("Teacup",20)
# print(sarahs_dog.name)
# print(sarahs_dog.height)
# sarahs_dog.bark()
# sarahs_dog.jump()
# # Print the details of her dog (ie. name and height) and call the methods bark and jump.
# # Create an if statement outside of the class to check which dog is bigger. Print the name of the bigger dog.
# if sarahs_dog.height>davids_dog.height:
# print(f"{sarahs_dog.name} is bigger.")
# else:
# print(f"{davids_dog.name} is bigger.")
# # Exercise 3 : Who’s The Song Producer?
# # Instructions
# # Define a class called Song, it will show the lyrics of a song.
# class Song:
# def __init__(self,lyrics):
# self.lyrics=lyrics
# # Its __init__() method should have two arguments: self and lyrics (a list).
# # Inside your class create a method called sing_me_a_song that prints each element of lyrics on its own line.
# def sing_me_a_song(self):
# for lyric in self.lyrics:
# print(lyric)
# # Create an object, for example:
# stairway= Song(["There’s a lady who's sure","all that glitters is gold", "and she’s buying a stairway to heaven"])
# # Then, call the sing_me_a_song method. The output should be:
# stairway.sing_me_a_song()
# # There’s a lady who's sure
# # all that glitters is gold
# # and she’s buying a stairway to heaven
# Exercise 4 : Afternoon At The Zoo
# Instructions
# Create a class called Zoo.
class Zoo:
def __init__(self,zoo_name):
self.zoo_name=zoo_name
self.animals=[]
self.list_animals=[]
# In this class create a method __init__ that takes one parameter: zoo_name.
# It instantiates two attributes: animals (an empty list) and name (name of the zoo).
# Create a method called add_animal that takes one parameter new_animal. This method adds the new_animal to the animals list as long as it isn’t already in the list.
def add_animal(self,new_animal):
if new_animal not in self.animals:
self.animals.append(new_animal)
# Create a method called get_animals that prints all the animals of the zoo.
def get_animals(self):
print(self.animals)
# Create a method called sell_animal that takes one parameter animal_sold. This method removes the animal from the list and of course the animal needs to exist in the list.
def sell_animal(self,animal_sold):
if animal_sold in self.animals:
self.animals.remove(animal_sold)
# Create a method called sort_animals that sorts the animals alphabetically and groups them together based on their first letter.
# Example
def sort_animals(self):
self.animals=sorted(self.animals)
# list_animals=[]
temp_list=[self.animals[0]]
for i in range(1,len(self.animals)):
if self.animals[i][0] == temp_list[-1][0]:
temp_list.append(self.animals[i])
else:
self.list_animals.append(temp_list)
temp_list=[]
temp_list.append(self.animals[i])
i+=1
# print(list_animals)
return {v+1: k for v, k in enumerate(self.list_animals)}
def get_groups(self):
for i in self.list_animals:
print(i)
fays_zoo=Zoo("fay")
fays_zoo.add_animal("Bear")
fays_zoo.add_animal("Ape")
fays_zoo.add_animal("Cat")
fays_zoo.add_animal("Emu")
fays_zoo.add_animal("Cougar")
fays_zoo.add_animal("Eel")
fays_zoo.add_animal("Baboon")
fays_zoo.get_animals()
print(fays_zoo.sort_animals())
fays_zoo.get_groups()
# {
# 1: "Ape",
# 2: ["Baboon", "Bear"],
# 3: ['Cat', 'Cougar'],
# 4: ['Eel', 'Emu']
# }
# Create a method called get_groups that prints the animal/animals inside each group.
#
# Create an object called ramat_gan_safari and call all the methods.
# Tip: The zookeeper is the one who will use this class.
# Example
# Which animal should we add to the zoo --> Giraffe
| [
"[email protected]"
] | |
d3a3564a7a3dfa3476aed8c37bc0eefe96e862bd | b54f9fb585648e4fe0b8ca727f42c97a6c1486fd | /variability/varModels.py | 5006e96121da25ab9e80c656b99ff8bf870894bf | [] | no_license | tribeiro/SMAPS | 46a36ab3fd74e35d97d9b43d5d80d88d9581b9da | b1e8dd9444e7fcbc7a82ab30941bab224b5ae600 | refs/heads/master | 2021-01-22T22:35:09.228649 | 2014-05-08T11:19:47 | 2014-05-08T11:19:47 | 19,570,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,107 | py |
import numpy as np
###################################################################################################
def ecbinary(time,period,ecduration,depth):
'''
Simulate eclipsing binary.
'''
phase = time / period
cycle = np.ceil(phase)
phase = phase - cycle
mask = np.bitwise_and(phase > -ecduration, phase < ecduration)
flux = np.zeros_like(time)+1.0
flux[mask] -= depth
return flux
###################################################################################################
def pulsating(time,period,amplitude):
'''
Simulate pulsating star.
'''
return np.sin(2*np.pi*time/period)
###################################################################################################
def transient(time,t0,amplitude,duration):
flux = np.zeros_like(time)
mask = time > t0
flux[mask] += amplitude * np.exp(- ((time[mask]-t0) / duration)**2.)
return flux
###################################################################################################
###################################################################################################
if __name__ == '__main__':
import pylab as py
tt = np.arange(10,40,0.1)
#tobs = np.loadtxt( '/Users/tiago/Documents/JPAS/variables/filtersObservations.txt',
# delimiter=',',unpack=True,usecols=(1,))
mag0 = 16
ectobs = np.array([17.0413348326,17.0480014993,26.1886086683,30.3348673002])+np.random.random(1)[0]*10-5
ectobs.sort()
ecflx = mag0-ecbinary(tt,2,0.1,1.5)
ecobs = mag0-ecbinary(ectobs,2,0.1,1.5)
ecerr = np.random.exponential(0.1,len(ectobs)) * (-1)**np.random.randint(0,2,len(ectobs))
pltobs = np.array([17.0413348326,17.0480014993,26.1886086683,30.3348673002])+np.random.random(1)[0]*10-5
pltobs.sort()
plflx = mag0-pulsating(tt,2,0.5)
plobs = mag0-pulsating(pltobs,2,0.5)
plerr = np.random.exponential(0.1,len(pltobs)) * (-1)**np.random.randint(0,2,len(pltobs))
trtobs = np.array([17.0413348326,17.0480014993,26.1886086683,30.3348673002])+np.random.random(1)[0]*10-5
trtobs.sort()
trflx = mag0-transient(tt,20,1.0,10)+transient(tt,600,10.0,40)
trobs = mag0-transient(trtobs,20,1.0,10)+transient(trtobs,600,10.0,40)
trerr = np.random.exponential(0.1,len(trtobs)) * (-1)**np.random.randint(0,2,len(trtobs))
py.figure(1,figsize=(8,4))
########################
ax1 = py.subplot(311)
py.plot(tt,ecflx,'-')
py.errorbar(ectobs,ecobs+ecerr,0.1,fmt='o')
py.ylim(17.499,14.5)
ax2 = py.subplot(312)
py.plot(tt,plflx,'-')
py.errorbar(pltobs,plobs+plerr,0.1,fmt='o')
py.ylim(17.5,14.5)
ax3 = py.subplot(313)
py.plot(tt,trflx,'-')
py.errorbar(trtobs,trobs+trerr,0.1,fmt='o')
py.ylim(17.5,14.501)
########################
py.setp(ax1.get_xticklabels(),visible=False)
py.setp(ax2.get_xticklabels(),visible=False)
ax3.set_xlabel('Time (days)')
ax2.set_ylabel('Magnitude')
py.subplots_adjust(hspace=0,wspace=0,bottom=0.13,top=0.93)
#py.savefig('/Users/tiago/Dropbox/Apps/TeX Writer (1)/fig/jpas_variability_fig01.pdf')
py.show()
################################################################################################### | [
"[email protected]"
] | |
5694f828530a430b4aca5569f67e50d0baf88575 | aff694b019806db8f8cd66fd205f9049351bb10c | /bin/wheel | e54d9f83eb92ea97085a22f82f854bd08e745464 | [] | no_license | mikilabarda/my-first-blog | 3885d08f87e9c3f05da7000b9e60d29f3895efd3 | 7e1476fa75e6db95bfe8685ad43a233777166071 | refs/heads/master | 2021-05-30T19:25:38.022284 | 2016-03-20T05:31:16 | 2016-03-20T05:31:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | #!/Users/Miki/Desktop/env/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
e3399daf37f287b2f7c0b62e55f30e6611bf5d97 | 0f89043a9e7caac53bc76cd359d704d5cfaef3db | /main/migrations/0044_remove_tag_resources.py | eaef56cf0970beb2e07945c8e6a10d9b814acaf4 | [] | no_license | sirodoht/knowhub | f704d987f6c800717c2dba7b811d05b0d85801fd | 4c242a9f1bc14a11fbf799119b19d79c4201ba2d | refs/heads/master | 2022-03-05T15:28:55.539951 | 2019-11-18T18:33:42 | 2019-11-18T18:33:42 | 134,064,221 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | # Generated by Django 2.1 on 2018-08-27 13:24
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("main", "0043_auto_20180826_0050")]
operations = [migrations.RemoveField(model_name="tag", name="resources")]
| [
"[email protected]"
] | |
29b9663bb72f21946ffdb20c501c498e7c0cfee6 | f2e09eea7c995df2cac15f16ae5eeb79b6fc748c | /odmltools/info.py | cda1f635ddc615fb0e6c0b291916d4bb2d05c164 | [
"BSD-2-Clause"
] | permissive | mpsonntag/odmltools | 676d829212ababd3ea3eb3396f25d0df8f3a4373 | 87e67fc737fbad2bd9866d529d47abbc2b7115d1 | refs/heads/master | 2021-07-13T07:54:23.214505 | 2021-06-21T18:11:19 | 2021-06-21T18:11:19 | 221,953,387 | 0 | 0 | null | 2019-11-15T15:40:14 | 2019-11-15T15:40:14 | null | UTF-8 | Python | false | false | 357 | py | import os
import json
INSTALL_PATH = os.path.dirname(__file__)
with open(os.path.join(INSTALL_PATH, "info.json")) as infofile:
infodict = json.load(infofile)
VERSION = infodict["VERSION"]
AUTHOR = infodict["AUTHOR"]
COPYRIGHT = infodict["COPYRIGHT"]
CONTACT = infodict["CONTACT"]
HOMEPAGE = infodict["HOMEPAGE"]
CLASSIFIERS = infodict["CLASSIFIERS"]
| [
"[email protected]"
] | |
7acf5941940c678da4795277f2ddd08749ad98a3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03192/s975847643.py | 0b87008f474274d7ec53b07ee4ec58d374c6d871 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | n = int(input())
li = []
while n > 0:
li.append(n%10)
n //= 10
li.reverse()
ans = 0
for i in range(len(li)):
if li[i] == 2:
ans += 1
print(ans) | [
"[email protected]"
] | |
a9fe63f7d3ec967b0984566e83707772eedadfb5 | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-5/765188917950a2d371982a81fa142747ea65f14a-<binned_statistic_2d>-bug.py | 37399820742f3945f3f54302e29aeac36416da57 | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,928 | py | def binned_statistic_2d(x, y, values, statistic='mean', bins=10, range=None, expand_binnumbers=False):
"\n Compute a bidimensional binned statistic for one or more sets of data.\n\n This is a generalization of a histogram2d function. A histogram divides\n the space into bins, and returns the count of the number of points in\n each bin. This function allows the computation of the sum, mean, median,\n or other statistic of the values (or set of values) within each bin.\n\n Parameters\n ----------\n x : (N,) array_like\n A sequence of values to be binned along the first dimension.\n y : (N,) array_like\n A sequence of values to be binned along the second dimension.\n values : (N,) array_like or list of (N,) array_like\n The data on which the statistic will be computed. This must be\n the same shape as `x`, or a list of sequences - each with the same\n shape as `x`. If `values` is such a list, the statistic will be\n computed on each independently.\n statistic : string or callable, optional\n The statistic to compute (default is 'mean').\n The following statistics are available:\n\n * 'mean' : compute the mean of values for points within each bin.\n Empty bins will be represented by NaN.\n * 'std' : compute the standard deviation within each bin. This \n is implicitly calculated with ddof=0.\n * 'median' : compute the median of values for points within each\n bin. Empty bins will be represented by NaN.\n * 'count' : compute the count of points within each bin. This is\n identical to an unweighted histogram. `values` array is not\n referenced.\n * 'sum' : compute the sum of values for points within each bin.\n This is identical to a weighted histogram.\n * 'min' : compute the minimum of values for points within each bin.\n Empty bins will be represented by NaN.\n * 'max' : compute the maximum of values for point within each bin.\n Empty bins will be represented by NaN.\n * function : a user-defined function which takes a 1D array of\n values, and outputs a single numerical statistic. This function\n will be called on the values in each bin. Empty bins will be\n represented by function([]), or NaN if this returns an error.\n\n bins : int or [int, int] or array_like or [array, array], optional\n The bin specification:\n\n * the number of bins for the two dimensions (nx = ny = bins),\n * the number of bins in each dimension (nx, ny = bins),\n * the bin edges for the two dimensions (x_edge = y_edge = bins),\n * the bin edges in each dimension (x_edge, y_edge = bins).\n\n If the bin edges are specified, the number of bins will be,\n (nx = len(x_edge)-1, ny = len(y_edge)-1).\n\n range : (2,2) array_like, optional\n The leftmost and rightmost edges of the bins along each dimension\n (if not specified explicitly in the `bins` parameters):\n [[xmin, xmax], [ymin, ymax]]. All values outside of this range will be\n considered outliers and not tallied in the histogram.\n expand_binnumbers : bool, optional\n 'False' (default): the returned `binnumber` is a shape (N,) array of\n linearized bin indices.\n 'True': the returned `binnumber` is 'unraveled' into a shape (2,N)\n ndarray, where each row gives the bin numbers in the corresponding\n dimension.\n See the `binnumber` returned value, and the `Examples` section.\n\n .. versionadded:: 0.17.0\n\n Returns\n -------\n statistic : (nx, ny) ndarray\n The values of the selected statistic in each two-dimensional bin.\n x_edge : (nx + 1) ndarray\n The bin edges along the first dimension.\n y_edge : (ny + 1) ndarray\n The bin edges along the second dimension.\n binnumber : (N,) array of ints or (2,N) ndarray of ints\n This assigns to each element of `sample` an integer that represents the\n bin in which this observation falls. The representation depends on the\n `expand_binnumbers` argument. See `Notes` for details.\n\n\n See Also\n --------\n numpy.digitize, numpy.histogram2d, binned_statistic, binned_statistic_dd\n\n Notes\n -----\n Binedges:\n All but the last (righthand-most) bin is half-open. In other words, if\n `bins` is ``[1, 2, 3, 4]``, then the first bin is ``[1, 2)`` (including 1,\n but excluding 2) and the second ``[2, 3)``. The last bin, however, is\n ``[3, 4]``, which *includes* 4.\n\n `binnumber`:\n This returned argument assigns to each element of `sample` an integer that\n represents the bin in which it belongs. The representation depends on the\n `expand_binnumbers` argument. If 'False' (default): The returned\n `binnumber` is a shape (N,) array of linearized indices mapping each\n element of `sample` to its corresponding bin (using row-major ordering).\n If 'True': The returned `binnumber` is a shape (2,N) ndarray where\n each row indicates bin placements for each dimension respectively. In each\n dimension, a binnumber of `i` means the corresponding value is between\n (D_edge[i-1], D_edge[i]), where 'D' is either 'x' or 'y'.\n\n .. versionadded:: 0.11.0\n\n Examples\n --------\n >>> from scipy import stats\n\n Calculate the counts with explicit bin-edges:\n\n >>> x = [0.1, 0.1, 0.1, 0.6]\n >>> y = [2.1, 2.6, 2.1, 2.1]\n >>> binx = [0.0, 0.5, 1.0]\n >>> biny = [2.0, 2.5, 3.0]\n >>> ret = stats.binned_statistic_2d(x, y, None, 'count', bins=[binx,biny])\n >>> ret.statistic\n array([[ 2., 1.],\n [ 1., 0.]])\n\n The bin in which each sample is placed is given by the `binnumber`\n returned parameter. By default, these are the linearized bin indices:\n\n >>> ret.binnumber\n array([5, 6, 5, 9])\n\n The bin indices can also be expanded into separate entries for each\n dimension using the `expand_binnumbers` parameter:\n\n >>> ret = stats.binned_statistic_2d(x, y, None, 'count', bins=[binx,biny],\n ... expand_binnumbers=True)\n >>> ret.binnumber\n array([[1, 1, 1, 2],\n [1, 2, 1, 1]])\n\n Which shows that the first three elements belong in the xbin 1, and the\n fourth into xbin 2; and so on for y.\n\n "
try:
N = len(bins)
except TypeError:
N = 1
if ((N != 1) and (N != 2)):
xedges = yedges = np.asarray(bins, float)
bins = [xedges, yedges]
(medians, edges, binnumbers) = binned_statistic_dd([x, y], values, statistic, bins, range, expand_binnumbers=expand_binnumbers)
return BinnedStatistic2dResult(medians, edges[0], edges[1], binnumbers) | [
"[email protected]"
] | |
8845672ea92d7bddefee80d4f9a40864a8f36823 | bb198232df12a1adb9e8a6164ff2a403bf3107cf | /cookie-monster/MonsterBrowser.py | da21df7b654d09e613e51a1984046a21401e3364 | [] | no_license | vanello/wifi-arsenal | 9eb79a43dfdd73d3ead1ccd5d2caf9bad9e327ee | 1ca4c5a472687f8f017222893f09a970652e9a51 | refs/heads/master | 2021-01-16T22:00:37.657041 | 2015-09-03T03:40:43 | 2015-09-03T03:40:43 | 42,060,303 | 1 | 0 | null | 2015-09-07T15:24:11 | 2015-09-07T15:24:11 | null | UTF-8 | Python | false | false | 3,179 | py | from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtWebKit import *
from PyQt4.QtNetwork import *
import getopt
import sys
import re
class MyBrowser(QWebView):
def __init__(self,father=None):
super(MyBrowser, self).__init__(father)
self.page().setLinkDelegationPolicy(QWebPage.DelegateExternalLinks)
self.connect(self, SIGNAL("linkClicked(QUrl)"), self.onLinkClicked)
def onLinkClicked(self, url):
self.load(url)
class MonsterWindow(QWidget):
def __init__(self, father = None):
super(MonsterWindow, self).__init__(father)
class MonsterBrowser():
urlPat = re.compile("https?://([^/]*)(.*)")
def usage(self):
print """
Usage: python MonsterBrowser.py [options] url
Options:
-c --cookie <Cookie> set cookie
-u --useragent <UserAgent> set useragent
"""
def parseArguments(self, argv):
try:
opts, args = getopt.getopt(argv, "c:u:", ["cookie=", "useragent="])
except getopt.GetoptError:
self.usage()
sys.exit(2)
if len(args) < 1:
self.usage()
sys.exit(2)
url = args[0]
cookie = None
useragent = None
for opt, args in opts:
if opt in ("-c", "--cookie"):
cookie = args
if opt in ("-u", "--useragent"):
useragent = args
if useragent is None:
useragent = "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:15.0) Gecko/20120427 Firefox/15.0a1"
print cookie, useragent, url
self.launch(cookie, useragent, url)
def launch(self, rawcookie, useragent, url):
'''
url: http://xxx.yyy.zzz/aaa/bbb?ccc/
host: xxx.yyy.zzz
domain: yyy.zzz
'''
cookies = []
# if no http protocol header, append it
if not url.startswith("http://"):
url = "http://" + url
match = self.urlPat.match(url)
host = match.group(1)
uri = match.group(2)
domain = ".".join(host.split(".")[-2:])
# adding cookies to cookiejar
for cookie in rawcookie.split(";"):
qnc = QNetworkCookie()
qnc.setDomain("."+domain)
key = cookie.split("=")[0]
value = "=".join(cookie.split("=")[1:])
qnc.setName(key)
qnc.setValue(value)
cookies.append(qnc)
self.open_web(url, cookies, useragent)
return
def open_web(self, url, cookies, useragent):
app = QApplication(sys.argv)
wind = QMainWindow()
view = MyBrowser()
nam = QNetworkAccessManager()
view.page().setNetworkAccessManager(nam)
print " [!] Spawning web view of " + url
ncj = QNetworkCookieJar()
ncj.setAllCookies(cookies)
nam.setCookieJar(ncj)
qnr = QNetworkRequest(QUrl(url))
qnr.setRawHeader("User-Agent", useragent)
view.load(qnr)
wind.setCentralWidget(view)
wind.show()
app.exec_()
if __name__ == "__main__":
browser = MonsterBrowser()
browser.parseArguments(sys.argv[1:])
| [
"[email protected]"
] | |
b50ab0437bdeb0851adabcf7abdab17632f1e3ef | 82b495a208ebdeb71314961021fbfe767de57820 | /chapter-06/temperature.py | 6c3619f4fe12c91df242c2a86240bd498aa1abd1 | [
"MIT"
] | permissive | krastin/pp-cs3.0 | 7c860794332e598aa74278972d5daa16853094f6 | 502be9aac2d84215db176864e443c219e5e26591 | refs/heads/master | 2020-05-28T02:23:58.131428 | 2019-11-13T13:06:08 | 2019-11-13T13:06:08 | 188,853,205 | 0 | 0 | MIT | 2019-11-13T13:06:09 | 2019-05-27T13:56:41 | Python | UTF-8 | Python | false | false | 469 | py | def convert_to_celsius(fahrenheit: float) -> float:
"""Return the number of Celsius degrees equivalent to fahrenheit
degrees.
>>> convert_to_celsius(75)
23.88888888888889
"""
return (fahrenheit - 32.0) * 5.0 / 9.0
def above_freezing(celsius: float) -> bool:
"""Return true if the temperature in celsius degrees is above freezing
>>> above_freezing(5.2)
True
>>> above_freezing(-2)
False
"""
return celsius > 0
| [
"[email protected]"
] | |
38af0a8f4ec7b286afc5854b6b8d7664edf331e0 | f4df9e53f984f293786fa3b890ab15f8c20abefd | /Django/beltreview/beltreview/urls.py | 7767d39b40ce4346388762908fc3bc5c218872b4 | [] | no_license | Chiefautoparts/Python_Party | bd0fbac9ea003f92be0f97d6f1d8f8d51c98a1b8 | 3d81052c51f14d5fc5dbd25317d23b891e766d8e | refs/heads/master | 2021-01-25T07:55:05.218262 | 2017-06-30T04:44:18 | 2017-06-30T04:44:18 | 93,680,027 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 767 | py | """beltreview URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
d6ee5dc1abe1d518d68ca2e2446b88515339d72a | 92cec18f145b71403d4c5511084a0da5fdeca9fb | /tests/test_bem_filesystem.py | 14ce83ae0635b4f6c0f08f8a41a49b1b55f6bac2 | [] | no_license | Zed-chi/bem_fs | 12b320861c5984df0a222a6f4548f30a3de2a78d | a013e4d31eddd343d35a5edb3f99ef36535c73d4 | refs/heads/master | 2022-11-06T17:16:05.292183 | 2020-06-30T05:22:42 | 2020-06-30T05:22:42 | 275,676,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 95 | py | from bem_filesystem import __version__
def test_version():
assert __version__ == "0.1.0"
| [
"[email protected]"
] | |
65db9b7872898345eee84550ab79aa3f9bbe16ab | 6ed034d0a5e239d7b0c528b287451409ffb4a494 | /mmpose/datasets/samplers/__init__.py | da09effaf20fefe1a102277672b98db7d884f002 | [
"Apache-2.0"
] | permissive | ViTAE-Transformer/ViTPose | 8f9462bd5bc2fb3e66de31ca1d03e5a9135cb2bf | d5216452796c90c6bc29f5c5ec0bdba94366768a | refs/heads/main | 2023-05-23T16:32:22.359076 | 2023-03-01T06:42:22 | 2023-03-01T06:42:22 | 485,999,907 | 869 | 132 | Apache-2.0 | 2023-03-01T06:42:24 | 2022-04-27T01:09:19 | Python | UTF-8 | Python | false | false | 134 | py | # Copyright (c) OpenMMLab. All rights reserved.
from .distributed_sampler import DistributedSampler
__all__ = ['DistributedSampler']
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.