blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3f7abd79ae8d7f73be6caf80979087ffde251930 | b8c24a63d6871da9039e234aecb14f2eb6ff71f5 | /1MD3/Tutorial 4/Q4_voineat.py | be298feb418eebcafd9431c39881ea72d5864fd5 | [] | no_license | teovoinea/First-Year-Assignments | e3eeea9eba08bbacdf996e06b31b4b7861d805a6 | 6f8b02f17aab5562154cc59e0e2b76277779f1c9 | refs/heads/master | 2020-05-17T03:28:37.655057 | 2015-04-13T19:16:08 | 2015-04-13T19:16:08 | 33,888,290 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | def main():
word = input("Please enter a word")
print(pluralplus(word))
def pluralplus(w):
if w.endswith('s') or w.endswith('h'):
return w + 'es'
elif w.endswith('y'):
return w[:-1]+'ies'
else:
return w+'s'
main()
| [
"[email protected]"
] | |
027ed53d0d391de223c657cc4c16bd4a060e7526 | ec4d4509d9ec9885f7f1461efde321f38c1df62c | /app/routes/__init__.py | e809ae620abd590324c01462bfcd51a8624cfa93 | [] | no_license | skollhati/CLACK | d60071479c38feab871f1216affc22a84f23655f | 34b451d389785d7c38ef4a54c4e01f148f224844 | refs/heads/master | 2020-07-28T02:46:13.592736 | 2019-10-25T03:54:30 | 2019-10-25T03:54:30 | 209,284,253 | 0 | 0 | null | 2020-07-18T14:54:41 | 2019-09-18T10:45:43 | CSS | UTF-8 | Python | false | false | 98 | py | from app import app
from flask import render_template, request, session
from app.models import *
| [
"[email protected]"
] | |
52b921495921259eff0bf10fb855d93cbd453564 | e20cecb5c34959321a281370c489ae3994fbc78c | /COVID_gif.py | 65259039f36b6ed9e54ed2054e1f8eaa5e92b47e | [
"MIT"
] | permissive | JackLidge/COVIDtracker | 3b51500a153e26f87a2516471f543ac44117f036 | 0432d52910a00c987f9b942215f921923ed9a62b | refs/heads/master | 2022-11-28T15:34:46.177421 | 2020-08-14T22:57:38 | 2020-08-14T22:57:38 | 287,638,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,403 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib.offsetbox import OffsetImage, AnnotationBbox
import rona_db
results = rona_db.window()
results.window.mainloop()
countries_of_interest = results.country_list
countries_of_interest = [i.strip("''") for i in countries_of_interest]
countries = {}
country_sel = 0
dates = results.data['dateRep'].unique()
dates.sort()
for i, country in enumerate(countries_of_interest):
temp = results.data.loc[results.data['countriesAndTerritories'] == country].copy()
temp['cumulative'] = temp['cases'].cumsum()
temp['cum_deaths'] = temp['deaths'].cumsum()
print(country)
temp['per_capita_cases'] = rona_db.find_per_capita(temp, pop_col='popData2018', vals_col='cumulative')
temp['per_capita_deaths'] = rona_db.find_per_capita(temp, pop_col='popData2018', vals_col='cum_deaths')
temp.reset_index(drop=True, inplace=True)
countries[country] = temp
full_data = []
for j, date in enumerate(dates):
date_countries = []
for i, country in enumerate(countries):
country_list = [str(i)[:10] for i in countries[country]['dateRep'].values]
if str(date)[:10] in country_list:
index = np.searchsorted(countries[country]['dateRep'], date)
y = countries[country]['cumulative'].iloc[:index+1]
x = countries[country]['dateRep'].iloc[:index+1]
x = [str(i)[:10] for i in x]
temp = {'country': country, 'x': x, 'y': y}
date_countries.append(temp)
full_data.append(date_countries)
# for i, country in enumerate(countries):
# full_data.append({'country': country, 'x': [], 'y' : []})
# country_list = [str(i)[:10] for i in countries[country]['dateRep'].values]
# for j, date in enumerate(dates):
# if str(date)[:10] in country_list:
# index = np.searchsorted(countries[country]['dateRep'], date)
# y = countries[country]['cumulative'].iloc[index]
# x = str(countries[country]['dateRep'].iloc[index])[:10]
# full_data[i]['x'].append(x)
# full_data[i]['y'].append(y)
for idx, val in enumerate(full_data):
fig, ax = plt.subplots(figsize=[14,10])
max_vals = []
[max_vals.append(i['y'].max()) for i in val]
if np.max(max_vals) == 0:
continue
for i in val:
plt.semilogy(i['x'], i['y'], linewidth=2, label=i['country'])
plt.scatter(i['x'][-1], i['y'].values[-1], s=8, marker='o')
if i['country'] == 'United_States_of_America':
i['country'] = 'USA'
if i['country'] == 'United_Kingdom':
i['country'] = 'UK'
flags = rona_db.flag_images()
image = plt.imread(flags[i['country']])
im = OffsetImage(image, zoom=0.25)
ab = AnnotationBbox(im, (i['x'][-1], np.max(i['y'])), xycoords='data', frameon=False)
ax.add_artist(ab)
#plt.annotate(i['country'], (i['x'][-1], i['y'].max()), xytext=(-30, 10), textcoords='offset pixels', fontsize=14)
plt.annotate(f'{val[-1]["x"][-1]}', xy=(0.3, 0.95), xycoords='axes fraction', fontsize=14)
plt.ylim([100, 1500000])
plt.xlabel(f'Date')
plt.ylabel(f'Cases of COVID-19')
plt.xticks([], [])
plt.legend(loc='upper left')
plt.savefig(f'./animation/{i["x"][-1]}_covid19_case_rates.png', dpi=300)
plt.show() | [
"[email protected]"
] | |
031865b6ba4042039e83272cc66a8a9da5c92a56 | 199f34ce3f9cb80375007aae9c8f430116a0d5e1 | /profiles_api/permissions.py | 66e662b32f5d153b2b0a3251a65098cf6ed8601a | [
"MIT"
] | permissive | Ina02/profiles-rest-api | 0ea2c69c117e7e38a2d92b28930ec1adf445f260 | c34060ababc7a47b7f1118e0c359d9149841db61 | refs/heads/main | 2023-01-21T19:35:47.534718 | 2020-11-29T11:09:09 | 2020-11-29T11:09:09 | 313,114,940 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 738 | py | from rest_framework import permissions
class UpdateOwnProfile(permissions.BasePermission):
"""Allow user to edit their own profile"""
def has_object_permission(self, request, view, obj):
"""Check user is trying to edit their own profile"""
if request.method in permissions.SAFE_METHODS:
return True
return obj.id == request.user.id
class UpdateOwnStatus(permissions.BasePermission):
"""Allow users to update their own status"""
def has_object_permission(self, request, view, obj):
"""Check the user is trying to update their won status"""
if request.method in permissions.SAFE_METHODS:
return True
return obj.user_profile.id == request.user.id | [
"[email protected]"
] | |
81583590dd9cb00328114f5f8f2a1c2ad16662c0 | fbb99f2e82241be1562a890e4020c64278140167 | /layers/modules/__init__.py | 8d5e7e3faceee32409716a6c8089799c49eaee71 | [
"MIT"
] | permissive | lcwbupt/deepv_pytorch_ | e041ef22303a5cbcb6c3d682d22cfabffd306050 | f25a80d2b1beb64fb8148fe16ece3b35aa2362de | refs/heads/master | 2020-03-19T05:54:46.448094 | 2018-06-23T01:07:44 | 2018-06-23T01:07:44 | 135,973,731 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 248 | py | from .l2norm import L2Norm
from .multibox_loss import MultiBoxLoss
from .repulsion_loss import RepulsionLoss
from .multibox_repulsion_loss import MultiBoxRepulsionLoss
__all__ = ['L2Norm', 'MultiBoxLoss', 'RepulsionLoss' ,'MultiBoxRepulsionLoss']
| [
"[email protected]"
] | |
db0be62e06bba0b6794bf9910dc2bb019255914a | 697f9bbb47254853783c60de7ffc8d5fb411e73d | /evo_gym/wrappers/frame_stack.py | b69cf84097a81f73a03c884f8edce1441dfa4f3f | [
"MIT"
] | permissive | SamuelSchmidgall/EvolutionarySelfReplication | d27389191435ab679199fa06d59f15aef155cd8a | 1a6f8225378b59423a97b439b56710bbed2754e9 | refs/heads/main | 2023-07-30T12:34:27.720041 | 2021-09-29T18:49:16 | 2021-09-29T18:49:16 | 366,845,417 | 14 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,349 | py | from collections import deque
import numpy as np
from evo_gym.spaces import Box
from evo_gym import ObservationWrapper
class LazyFrames(object):
r"""Ensures common frames are only stored once to optimize memory use.
To further reduce the memory use, it is optionally to turn on lz4 to
compress the observations.
.. note::
This object should only be converted to numpy array just before forward pass.
"""
def __init__(self, frames, lz4_compress=False):
if lz4_compress:
from lz4.block import compress
self.shape = frames[0].shape
self.dtype = frames[0].dtype
frames = [compress(frame) for frame in frames]
self._frames = frames
self.lz4_compress = lz4_compress
def __array__(self, dtype=None):
if self.lz4_compress:
from lz4.block import decompress
frames = [np.frombuffer(decompress(frame), dtype=self.dtype).reshape(self.shape) for frame in self._frames]
else:
frames = self._frames
out = np.stack(frames, axis=0)
if dtype is not None:
out = out.astype(dtype)
return out
def __len__(self):
return len(self.__array__())
def __getitem__(self, i):
return self.__array__()[i]
class FrameStack(ObservationWrapper):
r"""Observation wrapper that stacks the observations in a rolling manner.
For example, if the number of stacks is 4, then the returned observation contains
the most recent 4 observations. For environment 'Pendulum-v0', the original observation
is an array with shape [3], so if we stack 4 observations, the processed observation
has shape [3, 4].
.. note::
To be memory efficient, the stacked observations are wrapped by :class:`LazyFrame`.
.. note::
The observation space must be `Box` type. If one uses `Dict`
as observation space, it should apply `FlattenDictWrapper` at first.
Example::
>>> import evo_gym
>>> env = evo_gym.make('PongNoFrameskip-v0')
>>> env = FrameStack(env, 4)
>>> env.observation_space
Box(4, 210, 160, 3)
Args:
env (Env): environment object
num_stack (int): number of stacks
"""
def __init__(self, env, num_stack, lz4_compress=False):
super(FrameStack, self).__init__(env)
self.num_stack = num_stack
self.lz4_compress = lz4_compress
self.frames = deque(maxlen=num_stack)
low = np.repeat(self.observation_space.low[np.newaxis, ...], num_stack, axis=0)
high = np.repeat(self.observation_space.high[np.newaxis, ...], num_stack, axis=0)
self.observation_space = Box(low=low, high=high, dtype=self.observation_space.dtype)
def _get_observation(self):
assert len(self.frames) == self.num_stack, (len(self.frames), self.num_stack)
return LazyFrames(list(self.frames), self.lz4_compress)
def step(self, action):
observation, reward, done, info = self.env.step(action)
self.frames.append(observation)
return self._get_observation(), reward, done, info
def reset(self, **kwargs):
observation = self.env.reset(**kwargs)
[self.frames.append(observation) for _ in range(self.num_stack)]
return self._get_observation()
| [
"[email protected]"
] | |
ddfc54a21be416b99146c962555566b36345dc00 | 98e2153999190fc9e391096dd45e0eeedf276ebd | /app/forms.py | 7df865abc6e3ee1e3b150141862875df0924d359 | [
"MIT"
] | permissive | Beaconsyh08/Real_Graph_Select | 41677d712d5cebe868c0e4fe8f840a66cb20243f | a164e76102ecd5aa78763050fd05029acb0b4993 | refs/heads/master | 2020-12-13T21:16:20.416279 | 2020-02-08T10:33:26 | 2020-02-08T10:33:26 | 234,531,596 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,268 | py | from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import ValidationError, DataRequired, Email, EqualTo
from app.models import User
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Remember Me')
submit = SubmitField('Sign In')
class RegistrationForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField(
'Repeat Password', validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Register')
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Please use a different username.')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user is not None:
raise ValidationError('Please use a different email address.')
| [
"[email protected]"
] | |
0eedf3ac560ea9a0002f2b3093809ae30c42d489 | 9b42a0264cf4fbdc519c0bfc3106232973dc463f | /回滚/5/鼠标型打飞机.py | 434bc60553754c15e9fa45ddaed84b43623b9231 | [] | no_license | zero1300/plane_war | b8ed7ae8c25569f7b1fe6a062cb24672b9315849 | 0a228e45100d2950f96bdae9be4319508b6ad2ce | refs/heads/master | 2020-09-05T04:20:25.463127 | 2019-11-06T11:24:46 | 2019-11-06T11:24:46 | 219,979,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,381 | py | import pygame
import random
from Vec2d import Vec2d
import copy
# 导入素材文件
background = './bg.png'
my_plan_picture = './plane01.png'
enemy_picture = './enemy.png'
my_bullet_picture = './bullet.png'
enemy_bullet_picture = './emenybullet.png'
pygame.init()
FPS = 120
fc_lock = pygame.time.Clock()
class Plan(object):
def __init__(self):
# 设置屏幕
self.screen = pygame.display.set_mode((480, 800))
# 加载素材图片
self.background = pygame.image.load(background).convert()
self.my_plan_picture = pygame.image.load(my_plan_picture).convert_alpha()
self.enemy_picture = pygame.image.load(enemy_picture).convert_alpha()
self.my_bullet_picture = pygame.image.load(my_bullet_picture).convert_alpha()
self.enemy_bullet_picture = pygame.image.load(enemy_bullet_picture).convert_alpha()
# 时间标志
# self.clock = pygame.time.Clock()
self.time_record = 0
# 存在的各种子弹数组
self.my_bullets = []
self.enemy_bullets = []
# 本机子弹速度和数量
self.my_bullet_speed = 20
self.my_bullet_number = 1
# 存在的敌机
self.live_enemy = []
# 正在爆炸的坐标
self.booming_pos = []
# 初始化本机
# 位置
self.my_plan_pos = [288, 703]
self.now_plan_picture = self.my_plan_picture
# 初始化敌机
# 位置
self.enemy_pos = [0, 0]
self.now_enemy_picture = self.enemy_picture
self.enemy_dic = 4
def move_my_plan(self):
# 获得鼠标偏移量, 赋值给my_plan_move
my_plan_move = pygame.mouse.get_rel()
# 超出边界则不移动
if 0 < self.my_plan_pos[0] + self.my_plan_picture.get_width() / 2 + my_plan_move[0] < 480:
# 如果飞机的的x坐标的中点+偏移量在(0, 480)内
self.my_plan_pos[0] += my_plan_move[0]
# 飞机移动
if 0 < self.my_plan_pos[1] + self.my_plan_picture.get_height() / 2 + my_plan_move[1] < 800:
# 如果飞机的的y坐标的中点+偏移量在(0, 800)内
self.my_plan_pos[1] += my_plan_move[1]
# 把飞机显示在屏幕上
self.screen.blit(self.now_plan_picture, self.my_plan_pos)
# 创造本机子弹
def create_my_bullet(self):
# 间隔某段时间就添加子弹
if self.time_record % self.my_bullet_speed == 0:
if self.my_bullet_number == 1:
self.my_bullets.append([self.my_plan_pos[0] + 40, self.my_plan_pos[1] - 13])
elif self.my_bullet_number == 2:
self.my_bullets.append([self.my_plan_pos[0] + 53, self.my_plan_pos[1] - 13])
self.my_bullets.append([self.my_plan_pos[0] + 83, self.my_plan_pos[1] - 13])
# 子弹轨迹
for bullet in self.my_bullets:
if bullet[1] <= 0:
# 销毁超出范围的子弹
self.my_bullets.remove(bullet)
else:
# 没超出范围则向上移动4像素
bullet[1] -= 6
self.screen.blit(self.my_bullet_picture, bullet)
def create_enemy(self):
if self.enemy_dic == 4:
self.enemy_pos[0] += 2
if self.enemy_pos[0] > 480-97:
self.enemy_dic = 3
if self.enemy_dic == 3:
self.enemy_pos[0] -= 2
if self.enemy_pos[0] < 0:
self.enemy_dic = 4
self.screen.blit(self.now_enemy_picture, self.enemy_pos)
def judge_damage_enemy(self):
width = self.enemy_picture.get_width()
height = self.enemy_picture.get_height()
for bullet_pos in self.my_bullets:
enemy_pos = copy.deepcopy([self.enemy_pos[0], self.enemy_pos[1]])
if enemy_pos[0] - width / 2 <= bullet_pos[0] <= enemy_pos[0] + width / 2 and enemy_pos[1] <= bullet_pos[1] <= enemy_pos[1] + height:
# 删除击中敌机的子弹
self.my_bullets.remove(bullet_pos)
print("dead")
# 开始主循环
def began(self):
over = 0
while 1:
# 隐藏鼠标
pygame.mouse.set_visible(False)
# 时间标志
self.time_record += 1
# time_pass = self.clock.tick()
# pass_second = self.time_pass/1000.0
# 判断退出
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.display.quit()
exit()
self.move_my_plan()
self.create_my_bullet()
self.create_enemy()
self.judge_damage_enemy()
# elif event.type == pygame.KEYDOWN:
# keys_press = pygame.key.get_pressed()
# # alt+f4退出
# if keys_press pygame.[K_LALT] and keys_press[K_F4]:
# pygame.display.quit()
# exit()
# # 调节难度
# elif keys_press[K_LCTRL]:
# if self.hard != 5:
# self.hard += 1
# else:
# self.hard = 0
# # 使用能量
# elif event.type == MOUSEBUTTONDOWN and self.my_power > 0:
# self.sub_my_power = 1
# elif event.type == MOUSEBUTTONUP:
# self.my_bullet_speed = 20
# self.my_bullet_number = 1
# self.sub_my_power = 0
# # 消耗能量
# if self.sub_my_power == 1 and self.my_power > 0:
# self.my_bullet_speed = 10
# self.my_bullet_number = 2
# self.my_power -= 1.9
# else:
# self.my_bullet_speed = 20
# self.my_bullet_number = 1
# # 恢复能量
# if self.my_power < 1000:
# self.my_power += 0.4
# self.screen.blit(self.background, (0, 0))
# self.remove_not_defeat()
# if self.boss_life > 0:
# self.create_boss()
# else:
# self.have_boss = 0
# self.move_my_plan()
# self.create_enemy()
# self.create_my_bullet()
# self.create_enemy_bullet()
# self.judge_damage_enemy()
# # 无敌状态
# if self.is_not_defeat == 0:
# self.judge_damage_me()
# self.move_box()
# self.show_box()
# self.boom()
# self.draw_life_count()
# 结束游戏
# if self.my_life < 0:
# over += 1
# if over == 1:
# font = pygame.font.SysFont('kaiti', 50, True)
# text = font.render('Game Over!', True, (255, 20, 50))
# self.screen.blit(text, (800, 400))
# # text = font.render('最终得分 %d' % self.my_count, True, (255, 20, 50))
# # self.screen.blit(text, (780, 500))
# pygame.display.update()
# if self.my_life >= 0:
pygame.display.update()
self.screen.blit(self.background, (0, 0))
fc_lock.tick(FPS)
plane = Plan()
plane.began()
| [
"[email protected]"
] | |
72f809f1c6cd5f222cfa9427d1f96645a6d49eb5 | 111255234e654bff3036ebd414bdef7533481595 | /main.py | 0bf59ec346d49df10d9f7d3646d074517a284ce7 | [] | no_license | wanghaisheng/YOLO-Streamlit | 2ab07c14a3da85f39d784c7d91025c6aa7fab260 | e8e51d538ac6bf91c4e7883599484db36be8d337 | refs/heads/main | 2023-08-18T19:03:13.986376 | 2021-10-10T11:45:38 | 2021-10-10T11:45:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,369 | py | import streamlit as st
import cv2
import numpy as np
from PIL import Image
from Inference.inference import Inference_pipeline
from Inference.visualizer import Object_detection_visualizer
import tempfile
st.title("Object detection with YOLO")
config_path = r'C:\Users\Ankan\Desktop\Github\FastAPI-model-serving\Inference\config.json'
inf_pipeline = Inference_pipeline(config_path)
st.subheader("Select Image or Video")
selected = st.radio("",["Images","Video"])
if selected =="Video":
video_file_buffer = st.file_uploader("Upload a Video", type=["mp4"])
if video_file_buffer:
st.subheader("Confidence")
conf = st.slider("",
key="1",
min_value=0.0,
max_value=1.0,
step=0.1,
value=0.5
)
tfile = tempfile.NamedTemporaryFile(delete=False)
tfile.write(video_file_buffer.read())
video = cv2.VideoCapture(tfile.name)
stframe = st.empty()
# if st.button("Detect Objects"):
while True:
ret, fr = video.read()
labels, nms_result, bboxes, conf,class_idx = inf_pipeline.inference_image(fr)
Object_detection_visualizer(fr, labels, nms_result, bboxes, conf, class_idx)
img_ = Image.fromarray(cv2.cvtColor(fr, cv2.COLOR_BGR2RGB))
stframe.image(img_, caption='Object Detection on Video')
else:
st.error("Please Upload a video")
else:
img_file_buffer = st.file_uploader("Upload an image", type=["png", "jpg", "jpeg"])
if img_file_buffer:
st.subheader("Confidence")
conf = st.slider("",
key="1",
min_value=0.0,
max_value=1.0,
step=0.1,
value=0.5
)
image_org = Image.open(img_file_buffer)
img = cv2.cvtColor(np.array(image_org), cv2.COLOR_RGB2BGR)
labels, nms_result, bboxes, conf, class_idx = inf_pipeline.inference_image(img)
Object_detection_visualizer(img, labels, nms_result, bboxes, conf, class_idx)
img_ = Image.fromarray(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
st.image(img_, caption='Object Detection on Image')
else:
st.error("Please Upload a Image") | [
"[email protected]"
] | |
5c8b0670ed8a065abad7b185453a00fe4a99dd69 | fa93e53a9eee6cb476b8998d62067fce2fbcea13 | /devel/.private/pal_visual_localization_msgs/lib/python2.7/dist-packages/pal_visual_localization_msgs/msg/_VisualLocAddPlaceActionGoal.py | d4a9ad6f98e70043363db2269d010129a2aee23c | [] | no_license | oyetripathi/ROS_conclusion_project | 2947ee2f575ddf05480dabc69cf8af3c2df53f73 | 01e71350437d57d8112b6cec298f89fc8291fb5f | refs/heads/master | 2023-06-30T00:38:29.711137 | 2021-08-05T09:17:54 | 2021-08-05T09:17:54 | 392,716,311 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 9,152 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from pal_visual_localization_msgs/VisualLocAddPlaceActionGoal.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import actionlib_msgs.msg
import genpy
import pal_visual_localization_msgs.msg
import std_msgs.msg
class VisualLocAddPlaceActionGoal(genpy.Message):
_md5sum = "4b30be6cd12b9e72826df56b481f40e0"
_type = "pal_visual_localization_msgs/VisualLocAddPlaceActionGoal"
_has_header = True # flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalID goal_id
VisualLocAddPlaceGoal goal
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
string frame_id
================================================================================
MSG: actionlib_msgs/GoalID
# The stamp should store the time at which this goal was requested.
# It is used by an action server when it tries to preempt all
# goals that were requested before a certain time
time stamp
# The id provides a way to associate feedback and
# result message with specific goal requests. The id
# specified must be unique.
string id
================================================================================
MSG: pal_visual_localization_msgs/VisualLocAddPlaceGoal
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
#goal definition
"""
__slots__ = ['header','goal_id','goal']
_slot_types = ['std_msgs/Header','actionlib_msgs/GoalID','pal_visual_localization_msgs/VisualLocAddPlaceGoal']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,goal_id,goal
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(VisualLocAddPlaceActionGoal, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.goal_id is None:
self.goal_id = actionlib_msgs.msg.GoalID()
if self.goal is None:
self.goal = pal_visual_localization_msgs.msg.VisualLocAddPlaceGoal()
else:
self.header = std_msgs.msg.Header()
self.goal_id = actionlib_msgs.msg.GoalID()
self.goal = pal_visual_localization_msgs.msg.VisualLocAddPlaceGoal()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.goal_id.stamp.secs, _x.goal_id.stamp.nsecs))
_x = self.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.goal_id is None:
self.goal_id = actionlib_msgs.msg.GoalID()
if self.goal is None:
self.goal = pal_visual_localization_msgs.msg.VisualLocAddPlaceGoal()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.goal_id.stamp.secs, _x.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.goal_id.id = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.goal_id.stamp.secs, _x.goal_id.stamp.nsecs))
_x = self.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.goal_id is None:
self.goal_id = actionlib_msgs.msg.GoalID()
if self.goal is None:
self.goal = pal_visual_localization_msgs.msg.VisualLocAddPlaceGoal()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.goal_id.stamp.secs, _x.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.goal_id.id = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
| [
"[email protected]"
] | |
287a1ae52153cdb84929d5ffc2c542165a6f16d6 | 2e299b2a0998a1d955365555dc5f9cc410cd88c4 | /models/review.py | 00acafedbeb6374d5e8ce4518b0df9b0f9c22828 | [] | no_license | Zevrov/AirBnB_clone | 3bf6a8639fd78c69f5afe06a678eea946f68e238 | 2e8259860f4edae43cf8aa46b1fab71c76365901 | refs/heads/master | 2020-06-12T10:31:06.487687 | 2019-07-15T00:25:29 | 2019-07-15T00:25:29 | 194,271,944 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 288 | py | #!/usr/bin/python3
"""class review"""
from models.base_model import BaseModel
class Review(BaseModel):
"""rep of review"""
place_id = ""
user_id = ""
text = ""
def __init__(self, *args, **kwargs):
"""init review"""
super().__init__(*args, **kwargs)
| [
"[email protected]"
] | |
05214a7008a75ce3f48fe29ceafea70414175845 | a733f427ac80ba43a8dee8e33b8566bab42a9d92 | /core/db_access_control/ddl_utils/exist_condition_patcher.py | d971d2a64e1bb67c5327502e7b2ea5c65a164e39 | [] | no_license | xyder/IOStorM | c1a56e03f1f9b6915615c6208d9ed39e2bd1903f | 81dd79412dd68bdaa55c335084c1b18c08d84979 | refs/heads/develop | 2020-06-15T16:57:46.563849 | 2017-03-19T01:08:44 | 2017-03-19T01:08:44 | 75,277,738 | 1 | 1 | null | 2016-12-29T08:12:10 | 2016-12-01T09:41:59 | Python | UTF-8 | Python | false | false | 5,339 | py | """
Module that provides functions to patch the existing DDL Elements by adding conditions that
take into account the pre-existence of elements in the databased.
-- based on gist https://gist.github.com/eirnym/afe8afb772a79407300a
-- by Arseny Nasokin
"""
import re
from copy import copy
from enum import Enum
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.schema import CreateTable, DropTable, CreateIndex, DropIndex
from sqlalchemy.sql.ddl import CreateSchema, DropSchema
# these will be the DDL statements to be patched by default
ELEMENTS_TO_PATCH = {
CreateTable,
CreateIndex,
CreateSchema,
DropTable,
DropIndex,
DropSchema
}
class ConditionVariants(Enum):
if_not_exists = 'IF NOT EXISTS'
if_exists = 'IF EXISTS'
class ExistConditionPatcher(object):
@staticmethod
def set_build_condition(self, value=True):
""" Sets the build condition to True which will enable the patcher to augment
the DDL statement.
:param self: the DDL statement instance
:type value: bool
:param value: if True, on compilation the patcher will augment the output
:return: a DDL statement with the build condition set to true. This is a
shallow copy that will enable chained calls such as:
connection.execute(CreateTable(table).if_not_exists())
"""
augmented_self = copy(self)
augmented_self._build_condition = value
return augmented_self
def append_condition_setter(self):
""" Appends a setter for the build condition boolean to the element. """
setattr(self.element, 'check_first', self.set_build_condition)
def inject_condition(self, command):
""" Injects the condition in the given command.
:type command: str
:param command: the command to be augmented
:rtype: str
:return: the modified command
"""
return re.sub(self.regex, self.condition, command, re.S)
@staticmethod
def split_camel_case(input_text):
""" Splits a string by the capital letters in it. Ex:
'CamelCaseString' --> ['Camel', 'Case', 'String']
:type input_text: str
:param input_text: the text to be processed
:rtype: list[str]
:return: the result of the split
"""
return re.findall('[A-Z][a-z]*', input_text)
def __init__(self, element, variant=None, method='', regex='', replacement=''):
""" Creates a patcher to augment DDL statements.
:param element: the DDL element to be augmented.
:type variant: ConditionVariants
:param variant: the type of injection to be made.
:type method: str
:param method: the compiler method
:type regex: str
:param regex: search string to identify the substring to be replaced
:type replacement: str
:param replacement: replacement string to be injected
"""
self.element = element
element_name = getattr(element, '__name__', '')
if not element_name and (not method or not regex or not replacement):
raise Exception('Operation type could not be determined.')
if variant:
self.variant = variant
elif 'Drop' in element_name:
self.variant = ConditionVariants.if_exists
elif 'Create' in element_name:
self.variant = ConditionVariants.if_not_exists
if not self.variant:
raise Exception('Variant could not be determined')
name_parts = self.split_camel_case(element_name)
self.method = method or 'visit_{}'.format('_'.join(name_parts).lower())
self.regex = regex or '{}'.format(' '.join(name_parts).upper())
self.condition = replacement or '{} {}'.format(
' '.join(name_parts).upper(), self.variant.value
)
self.append_condition_setter()
def create_patch(patcher, if_always=False):
""" Patches and compiles the statement using the specified patcher.
:type patcher: ExistConditionPatcher
:param patcher: a patcher used to patch and compile the statement.
:type if_always: bool
:param if_always: if True, the patch will always be in effect.
:rtype: collections.abc.Callable
:return: a function that compiles over the statement.
"""
@compiles(patcher.element)
def _if_exists_(element, compiler, **kw):
# get the original compiled statement
output = getattr(compiler, patcher.method)(element, **kw)
if if_always or getattr(element, '_build_condition', False):
# patch the statement
output = patcher.inject_condition(output)
return output
return _if_exists_
def enable_patches(statements=ELEMENTS_TO_PATCH, if_always=False):
""" Function that patches the default set of statements or a given set of statements.
:type statements: set
:param statements: the statements that will be patched
:type if_always: bool
:param if_always: if True, the condition will always be set to true.
"""
if not getattr(enable_patches, '_patches_applied', False):
setattr(enable_patches, '_patches_applied', True)
for statement in statements:
create_patch(ExistConditionPatcher(statement), if_always=if_always)
| [
"[email protected]"
] | |
e816b4b7e5cb489a487558067c1cc730b4997e8d | 513942a6f7d3d30999a538d7d6fbb491d7a33adc | /code/solution55.py | b608555415dbb90c9a9dd4326516a92b5c8f0e82 | [] | no_license | nemosharma6/100daysofcode | b51ba0d012956d9028746aa583f79200beaf14bd | 92ac930d6dcd0f7e03cb6fe2c2395e471297cd3b | refs/heads/master | 2020-04-20T10:06:44.305172 | 2019-06-04T04:25:45 | 2019-06-04T04:25:45 | 168,781,840 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | # https://leetcode.com/problems/remove-nth-node-from-end-of-list/
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def removeNthFromEnd(self, head: ListNode, n: int) -> ListNode:
second_head = head
for i in range(0, n):
second_head = second_head.next
prev = None
tmp = head
while second_head is not None:
prev = tmp
tmp = tmp.next
second_head = second_head.next
if prev is None:
return tmp.next
prev.next = tmp.next
return head
| [
"[email protected]"
] | |
85c1c05f414c65a634b7aa2e00e109053e5b8b0a | df720ce2a6bf1fb8d48d27c036d0dbae55fb5ddf | /examples/index.py | 317c7ce7a845760bd810fd9fc7177f465d4af887 | [] | no_license | DarwinInnovation/py-bulksms | aeafdbf80d266003764c56a405ffeb2b87c8fb78 | 007048d3d2afc2ce6431e8f2e53049618b475670 | refs/heads/master | 2021-01-22T04:02:15.768372 | 2017-09-08T13:07:23 | 2017-09-08T13:07:23 | 92,424,311 | 0 | 1 | null | 2017-05-25T16:47:24 | 2017-05-25T16:47:24 | null | UTF-8 | Python | false | false | 253 | py | #!/usr/bin/env python2.3
'''
index.py: Example of using the callback interface.
'''
from BulkSMS import ReplyHandler
print "Content-type: text/plain"
f = open('/tmp/bulksms_reply_callback', 'w')
f.write(repr(ReplyHandler.fetch_reply()))
print 1
| [
"devnull@localhost"
] | devnull@localhost |
7c0d5fcd466223947a6cf50a2aa5f2c297c752d0 | df374b00e90702a2dcb40a0ac01cf23aae903392 | /download-google-news-model.py | 2f9b3e7ad78c3134a556b377bc5cf5588dd7fc8a | [
"MIT"
] | permissive | afcarl/MediaCloud-WordEmbeddingsServer | b392249a1d723d7da9c8c1b0f482bfc0b4a96e71 | 0b1dcc99229c3563343c04723a524be6d15a6e35 | refs/heads/master | 2020-09-03T03:28:14.717883 | 2019-01-07T16:56:12 | 2019-01-07T16:56:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 842 | py | import os
import requests
import shutil
MODEL_GOOGLE_NEWS_URL = "https://s3.amazonaws.com/mediacloud-nytlabels-data/predict-news-labels/GoogleNews-vectors-negative300.bin"
model_dir = "./models"
model_name = "GoogleNews-vectors-negative300.bin"
path_to_model_file = os.path.join(model_dir, model_name)
if not os.path.exists(model_dir):
os.mkdir(model_dir)
# https://stackoverflow.com/a/39217788/1172063
def download_file(url, destination_file):
r = requests.get(url, stream=True)
with open(destination_file, 'wb') as f:
shutil.copyfileobj(r.raw, f)
if not os.path.isfile(path_to_model_file):
print "Google word2vec model not found, downloading model file from the cloud..."
download_file(MODEL_GOOGLE_NEWS_URL, path_to_model_file)
print " done!"
else:
print "Google word2vec model already exists."
| [
"[email protected]"
] | |
7af0311db516a4618388476fbfb5bb141c01a59d | 2612a21d776e08a16b6b1318dd60fc7851cd28de | /captainhook/checkers/yamllint_checker.py | 3a4e391cb7d5b4fedfd7521574b9ce7ae95cbae0 | [
"BSD-3-Clause"
] | permissive | haizaar/captainhook | 208122554ba19a2197cae859497093724e149ecd | 8062e7eea765f090baf1d892b9bec3ec2ac14913 | refs/heads/master | 2020-04-17T16:08:47.335570 | 2018-03-19T09:31:18 | 2018-03-19T09:31:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,035 | py | # # # # # # # # # # # # # #
# CAPTAINHOOK IDENTIFIER #
# # # # # # # # # # # # # #
import os
from .utils import bash, get_config_file
DEFAULT = 'off'
CHECK_NAME = 'yamllint'
NO_YAMLLINT_MSG = ("yamllint is required for the yamllint plugin.\n"
"`pip install yamllint` or turn it off in your {}"
" file.".format(get_config_file()))
REQUIRED_FILES = ['.yamllint']
def _filter_yaml_files(files):
"Get all yaml files from the list of files. The extention based filter"
yaml_files = []
for f in files:
extension = os.path.splitext(f)[-1]
if extension:
if extension in ('.yaml', 'yml'):
yaml_files.append(f)
return yaml_files
def run(files, temp_folder):
"""Check yaml format.
"""
try:
import yamllint # NOQA
except ImportError:
return NO_YAMLLINT_MSG
yaml_files = _filter_yaml_files(files)
if yaml_files:
return bash('yamllint {0}'.format(' '.join(yaml_files))).value()
return ''
| [
"[email protected]"
] | |
60f2931e70a8cddda2db81dbc1c5757c90ecd578 | 961931333838aebe8bd17c30c19f3994e32d76ce | /src/leetcode/sql/175. Combine Two Tables.py | 084dafc14d15b7212cff6436f4fa0d901afb9738 | [] | no_license | MTGTsunami/LeetPython | 5161f9e31dc2ab1855123c2a3a151eb6f4d889bc | f7f3839f631f08a9e5bf8a02398b940f82e43e67 | refs/heads/master | 2023-04-17T16:59:45.621291 | 2021-04-26T07:24:50 | 2021-04-26T07:24:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,191 | py | """
Table: Person
+-------------+---------+
| Column Name | Type |
+-------------+---------+
| PersonId | int |
| FirstName | varchar |
| LastName | varchar |
+-------------+---------+
PersonId is the primary key column for this table.
Table: Address
+-------------+---------+
| Column Name | Type |
+-------------+---------+
| AddressId | int |
| PersonId | int |
| City | varchar |
| State | varchar |
+-------------+---------+
AddressId is the primary key column for this table.
Write a sql query for a report that provides the following information for each person in the Person table, regardless if there is an address for each of those people:
FirstName, LastName, City, State
"""
# Write your MySQL query statement below
"""
SELECT per.FirstName, per.LastName, addr.City, addr.State
FROM Person per, Address addr
WHERE per.PersonId = addr.PersonId
UNION
SELECT per.FirstName, per.LastName, NULL as City, NULL as State
FROM Person per
WHERE per.PersonId NOT IN (SELECT PersonId from Address);
"""
# left join answer
"""
select FirstName, LastName, City, State
from Person
left join Address
on Person.PersonId = Address.PersonId;
"""
| [
"[email protected]"
] | |
a4a774637b9506a4fcd7d59ad3d3b00dbb6e00b4 | 1bd3076902117867ec048210905195ba2aaaaa6b | /exercise/leetcode/python_src/by2017_Sep/Leet257.py | db7fa7529bc48303517ff4c1dd0938360c20b150 | [] | no_license | SS4G/AlgorithmTraining | d75987929f1f86cd5735bc146e86b76c7747a1ab | 7a1c3aba65f338f6e11afd2864dabd2b26142b6c | refs/heads/master | 2021-01-17T20:54:31.120884 | 2020-06-03T15:04:10 | 2020-06-03T15:04:10 | 84,150,587 | 2 | 0 | null | 2017-10-19T11:50:38 | 2017-03-07T03:33:04 | Python | UTF-8 | Python | false | false | 1,877 | py | # Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
# @param {TreeNode} root
# @return {string[]}
def __init__(self):
self.path_stack=[]
self.path_result=[]
def binaryTreePaths0(self, root):
if root==None:
self.path_result.append(self.show_path())
elif root.left==None and root.right==None:#leaf node
self.path_stack.append(root.val)
self.path_result.append(self.show_path())
self.path_stack.pop()
elif root.left==None and root.right!=None:#single side node
self.path_stack.append(root.val)
self.binaryTreePaths0(root.right)
self.path_stack.pop()
elif root.left!=None and root.right==None:
self.path_stack.append(root.val)
self.binaryTreePaths0(root.left)
self.path_stack.pop()
elif root.left!=None and root.right!=None:#double side node
self.path_stack.append(root.val)
self.binaryTreePaths0(root.left)
self.binaryTreePaths0(root.right)
self.path_stack.pop()
def binaryTreePaths(self,root):
if root==None:
return []
else:
self.binaryTreePaths0(root)
return self.path_result
def show_path(self):
if len(self.path_stack)==0:
return None
elif len(self.path_stack)==1:
return str(self.path_stack[0])
else :
return "->".join([str(x) for x in self.path_stack])
tree_list=[
TreeNode(1),#0
TreeNode(2),#1
TreeNode(3),#2
TreeNode(5),#3
]
tree_list[0].left =tree_list[1]
tree_list[0].right=tree_list[2]
tree_list[1].right=tree_list[3]
s=Solution()
print(s.binaryTreePaths(tree_list[0])) | [
"[email protected]"
] | |
5483f0ba03c6be775bb4ee207361205bd339d979 | 83934c40b2bd835464732345fa516b2c657a6259 | /Pyrado/scripts/training/qcp-su_nes.py | d3b218d0f8e2488bf360010e8bb7ecc7b696ddf7 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | 1abner1/SimuRLacra | e0427bf4f2459dcb992206d3b2f347beab68a5b4 | d7e9cd191ccb318d5f1e580babc2fc38b5b3675a | refs/heads/master | 2023-05-25T04:52:17.917649 | 2021-06-07T07:26:44 | 2021-06-07T07:26:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,616 | py | # Copyright (c) 2020, Fabio Muratore, Honda Research Institute Europe GmbH, and
# Technical University of Darmstadt.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Fabio Muratore, Honda Research Institute Europe GmbH,
# or Technical University of Darmstadt, nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL FABIO MURATORE, HONDA RESEARCH INSTITUTE EUROPE GMBH,
# OR TECHNICAL UNIVERSITY OF DARMSTADT BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Train an agent to solve the Quanser Cart-Pole swing-up task using Natural Evolution Strategies.
"""
import pyrado
from pyrado.algorithms.episodic.nes import NES
from pyrado.environment_wrappers.action_normalization import ActNormWrapper
from pyrado.environments.pysim.quanser_cartpole import QCartPoleSwingUpSim
from pyrado.logger.experiment import save_dicts_to_yaml, setup_experiment
from pyrado.policies.recurrent.rnn import GRUPolicy
from pyrado.utils.argparser import get_argparser
if __name__ == "__main__":
# Parse command line arguments
args = get_argparser().parse_args()
# Experiment (set seed before creating the modules)
ex_dir = setup_experiment(QCartPoleSwingUpSim.name, f"{NES.name}_{GRUPolicy.name}")
# Set seed if desired
pyrado.set_seed(args.seed, verbose=True)
# Environment
env_hparams = dict(dt=1 / 250.0, max_steps=12 * 250, long=False)
env = QCartPoleSwingUpSim(**env_hparams)
env = ActNormWrapper(env)
# Policy
policy_hparam = dict(
hidden_size=32,
num_recurrent_layers=1,
# init_param_kwargs=dict(t_max=50)
)
# policy = LSTMPolicy(spec=env.spec, **policy_hparam)
policy = GRUPolicy(spec=env.spec, **policy_hparam)
# Algorithm
algo_hparam = dict(
max_iter=5000,
pop_size=50,
num_init_states_per_domain=6,
eta_mean=2.0,
eta_std=None,
expl_std_init=0.5,
symm_sampling=False,
transform_returns=True,
num_workers=10,
)
algo = NES(ex_dir, env, policy, **algo_hparam)
# Save the hyper-parameters
save_dicts_to_yaml(
dict(env=env_hparams, seed=args.seed),
dict(policy=policy_hparam),
dict(algo=algo_hparam, algo_name=algo.name),
save_dir=ex_dir,
)
# Jeeeha
algo.train(snapshot_mode="best", seed=args.seed)
| [
"[email protected]"
] | |
77550b313431e86fc2369d28a83666e70250982a | 1d482878230a6c6cbef7680f3910561a4b35c35c | /element/scripts/migrations/0012_auto_20180214_2136.py | 37f92c55454d0f9a6a8c68d228d14f48ad51c87f | [] | no_license | karthikvasudevan92/elem | ac5355fe029251b7de76428a558049ab949689df | f5dad5cdfaba736843d29c781ec253d2cee51ccd | refs/heads/master | 2021-04-28T03:47:23.643252 | 2018-03-17T13:10:07 | 2018-03-17T13:10:07 | 122,144,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 725 | py | # Generated by Django 2.0.1 on 2018-02-14 21:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scripts', '0011_script_lines'),
]
operations = [
migrations.CreateModel(
name='Sentence',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField(max_length=5000)),
('sentnum', models.IntegerField(blank=True)),
],
),
migrations.AlterField(
model_name='line',
name='text',
field=models.TextField(max_length=10000),
),
]
| [
"[email protected]"
] | |
83fbed47c19f6eda9ab2452593ce6e746ec85cda | f20e965e19b749e84281cb35baea6787f815f777 | /Bender/Ex/BenderExample/python/BenderExample/AP.py | 604d72c693f17381de5711c4bf10df4e096af680 | [] | no_license | marromlam/lhcb-software | f677abc9c6a27aa82a9b68c062eab587e6883906 | f3a80ecab090d9ec1b33e12b987d3d743884dc24 | refs/heads/master | 2020-12-23T15:26:01.606128 | 2016-04-08T15:48:59 | 2016-04-08T15:48:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,772 | py | #!/usr/bin/env python
# =============================================================================
# $Id$
# =============================================================================
## @file BenderExample/AP.py
#
# The simple Bender-based example: show " Lambda0 <-> K0S " reflection
#
# This file is a part of
# <a href="http://cern.ch/lhcb-comp/Analysis/Bender/index.html">Bender project</a>
# <b>``Python-based Interactive Environment for Smart and Friendly
# Physics Analysis''</b>
#
# The package has been designed with the kind help from
# Pere MATO and Andrey TSAREGORODTSEV.
# And it is based on the
# <a href="http://cern.ch/lhcb-comp/Analysis/LoKi/index.html">LoKi project:</a>
# ``C++ ToolKit for Smart and Friendly Physics Analysis''
#
# By usage of this code one clearly states the disagreement
# with the smear campaign of Dr.O.Callot et al.:
# ``No Vanya's lines are allowed in LHCb/Gaudi software.''
#
# @date 2009-09-01
# @author Vanya BELYAEV [email protected]
#
# Last modification $Date$
# by $Author$
# =============================================================================
"""
The simple Bender-based example: show ' Lambda0 <-> K0S ' reflection
This file is a part of BENDER project:
``Python-based Interactive Environment for Smart and Friendly Physics Analysis''
The project has been designed with the kind help from
Pere MATO and Andrey TSAREGORODTSEV.
And it is based on the
LoKi project: ``C++ ToolKit for Smart and Friendly Physics Analysis''
By usage of this code one clearly states the disagreement
with the smear campaign of Dr.O.Callot et al.:
``No Vanya's lines are allowed in LHCb/Gaudi software.''
Last modification $Date$
by $Author$
"""
# =============================================================================
__author__ = " Vanya BELYAEV [email protected] "
__date__ = " 2009-09-01 "
__version__ = " Version $Revision$ "
# =============================================================================
## import everything from Bender
import GaudiKernel.SystemOfUnits as Units
from Bender.Awesome import *
import LoKiMC.trees as Trees
import PartProp.Nodes as Nodes
from LoKiCore.functions import *
from LoKiPhys.Phys import *
# =============================================================================
## Simple class for access MC-truth
# @author Vanya BELYAEV [email protected]
# @date 2006-10-13
class AP(AlgoMC) :
"""
Simple class for access MC-truth
"""
## standard method for analyses
def analyse( self ) :
"""
Standard method for analyses
"""
mcks = self.mcselect ( 'ks' , 'KS0 -> pi+ pi-' )
mcl01 = self.mcselect ( 'l01' , 'Lambda0 -> p+ pi-' )
mcl02 = self.mcselect ( 'l02' , 'Lambda~0 -> p~- pi+' )
if mcks.empty() and mcl01.empty() and mcl02.empty() :
return self.Warning ( 'No mc-trees are found' , SUCCESS )
mcKS = NONE if mcks .empty() else MCTRUTH ( self.mcTruth () , mcks )
mcL01 = NONE if mcl01.empty() else MCTRUTH ( self.mcTruth () , mcl01 )
mcL02 = NONE if mcl02.empty() else MCTRUTH ( self.mcTruth () , mcl02 )
pions = self.select ( 'pi' ,
( 'pi+' == ABSID ) &
( mcKS | mcL01 | mcL02 ) )
self.select ( 'pi+' , pions , Q > 0 )
self.select ( 'pi-' , pions , Q < 0 )
tup = self.nTuple ( 'AP' )
## construct dipions
dipion = self.loop ( 'pi+ pi-' , 'KS0' )
for pi2 in dipion :
m12 = pi2.mass(1,2) / Units.GeV
if m12 > 1 : continue
chi2 = VCHI2 ( pi2 )
if not 0.0 <= chi2 < 25 : continue
mc1 = mcKS ( pi2 )
mc2 = mcL01 ( pi2 )
mc3 = mcL02 ( pi2 )
if mc1 or mc2 or mc3 :
self.plot ( M(pi2) / Units.GeV , 'MASS pi pi ' , 0.3 , 0.8 , 500 )
else: continue
tup.column ( 'm' , M(pi2) / Units.GeV )
tup.column ( 'mc1' , mc1 )
tup.column ( 'mc2' , mc2 )
tup.column ( 'mc3' , mc3 )
tup.column ( 'vchi2' , chi2 )
tup.column ( 'tr1' , TRTYPE ( pi2(1) ) )
tup.column ( 'tr2' , TRTYPE ( pi2(2) ) )
tup.column ( 'lv02' , LV02(pi2) )
tup.write()
return SUCCESS
# =============================================================================
## configure the job
def configure ( datafiles , catalogs = [] , castor = False ) :
"""
Configure the job
"""
##
## Static configuration using "Configurables"
##
from Configurables import DaVinci
daVinci = DaVinci (
DataType = '2012' ,
Simulation = True ,
HistogramFile = 'AP_Histos.root' ,
TupleFile = 'AP_Tuples.root'
)
from StandardParticles import StdNoPIDsPions
StdNoPIDsPions = StdNoPIDsPions.outputLocation()
## define/set the input data
setData ( datafiles , catalogs , castor )
##
## jump into the wonderful world of the actual Gaudi components!
##
## get the actual application manager (create if needed)
gaudi = appMgr()
## create local algorithm:
alg = AP (
'AP' ,
## MC-relations
PP2MCs = [ 'Relations/Rec/ProtoP/Charged' ] ,
## print histograms ?
HistoPrint = True ,
## input particles :
Inputs = [ StdNoPIDsPions ]
)
##gaudi.addAlgorithm ( alg )
gaudi.setAlgorithms( [alg] )
return SUCCESS
# =============================================================================
## job steering
if __name__ == '__main__' :
## make printout of the own documentations
print '*'*120
print __doc__
print ' Author : %s ' % __author__
print ' Version : %s ' % __version__
print ' Date : %s ' % __date__
print '*'*120
## configure the job:
inputfiles = [
'/lhcb/MC/DEV/ALLSTREAMS.DST/00018392/0000/00018392_00000%03d_1.allstreams.dst' % i for i in range ( 1 , 30 )
]
configure ( inputfiles , castor = True )
## run the job
run(1000)
# =============================================================================
# The END
# =============================================================================
| [
"[email protected]"
] | |
92e144e2dd2749cf268097192cad47256509ff90 | 0ea5c7efb4d3aafce3826105011a6c61af7f2ede | /test/test_runner.py | ee852af5aa19948e01bf1ab92840f6d665225ecf | [] | no_license | benwaldner/pyCryptoTrader | 4877d2e5239a0e71140ecb5bf353d7b13d5ef741 | 73ffc2aa7084204675f0c5acc0e2270d4a407adc | refs/heads/master | 2023-03-18T20:35:36.971335 | 2019-02-11T22:22:11 | 2019-02-11T22:22:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,111 | py | from pprint import pprint
from subprocess import Popen, PIPE
import sys
import os
cmds = [
["python", "db_test.py"],
["python", "analysis/hist_data_test.py"],
["python", "analysis/backtest_trader_test.py"],
["python", "analysis/backtest_test.py"],
["python", "analysis/plot_test.py"],
["python", "analysis/strategy_test.py"]
]
def main(argv):
cur_dir = os.path.dirname(os.path.abspath(__file__))
starting_test = int(argv[0]) if len(argv) > 0 else 0
for i, cmd in enumerate(cmds):
if i >= starting_test - 1:
test_file = cmd[1]
cmd[1] = cur_dir + '/' + cmd[1]
print(f"\n>>>>>>>>>> RUNNING {test_file} <<<<<<<<<<")
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
rc = p.returncode
if rc is not 0:
err = err.decode('utf-8')
print("Error Message:\n")
print(err)
sys.exit(1)
else:
print('Success\n')
if __name__ == '__main__':
main(sys.argv[1:]) | [
"[email protected]"
] | |
42a61862caec64f7c5ce248a6463b544eccc4a27 | 2a39ad7d9340fc557bc56d3022374748cc888dc9 | /thor/utils/api.py | e7974fc19f3ee2885e597b1c8d51f75826088b18 | [] | no_license | tiechuiwangwang/thor | 5e5c99191e0e94c956b25aeee150e5532b65346a | 0adbaac9b67e0df79afea35909d44bf83d6476d2 | refs/heads/master | 2020-04-26T05:02:44.583318 | 2019-03-09T15:36:50 | 2019-03-09T15:36:50 | 173,321,052 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,681 | py | import json
from flask import Response
def json_response(data, http_status):
return Response(data, status=http_status, mimetype="application/json")
class BusinessResponse(object):
def __init__(self, http_code, code, error=None):
self.http_code = http_code
self.code = code
self.error = error
def __call__(self, result=None, error=None):
return response(self, result=result, error=error)
def response(msg, result=None, error=None):
resp = {
'http_code': msg.http_code,
'code': msg.code,
'error': error or msg.error,
'result': result,
}
data = json.dumps(resp)
return json_response(data, msg.http_code)
HTTP_OK = 200
HTTP_BAD_REQUEST = 400
HTTP_UNAUTHORIZED = 401
HTTP_FORBBIDEN = 403
HTTP_NOT_FOUND = 404
ok = BusinessResponse(HTTP_OK, 0)
err_unauthorized_required = \
BusinessResponse(HTTP_UNAUTHORIZED, 1, 'Login required')
err_params_required = BusinessResponse(HTTP_BAD_REQUEST, 2, 'Params required')
err_params_error = BusinessResponse(HTTP_BAD_REQUEST, 3, 'Params Error')
err_unkown = BusinessResponse(HTTP_BAD_REQUEST, 9, 'Unkown Error')
# 1xxx for User module
err_invalid_username_or_password = \
BusinessResponse(HTTP_BAD_REQUEST, 1001, 'Invalid Username or Password')
err_user_is_inactive = \
BusinessResponse(HTTP_BAD_REQUEST, 1002, 'Inactive User')
err_username_exists = \
BusinessResponse(HTTP_BAD_REQUEST, 1003, 'Username Exists')
# 2xxx for Album module
err_photo_not_found = \
BusinessResponse(HTTP_NOT_FOUND, 2001, 'Photo Not Found')
err_photo_already_liked = \
BusinessResponse(HTTP_BAD_REQUEST, 2002, 'You Have Liked the Photo')
| [
"[email protected]"
] | |
cb8ad10707c3e94850cb0d509f41346b7c544bfa | 31c31345946b3526ffe3f1eafbc9dd7cdb48e03a | /URI Online Judge/Estruturas e Bibliotecas/2729 - Lista de Compras/2729.py | 94d7f79422f294e646b8e101383c24d51216da37 | [] | no_license | felipefoschiera/Competitive-Programming | 84967cb7c6b82df5990cccea5d5b6862b9e63f65 | fe664046d0161fd6a15d4b8d8f983e77c6dc3dcb | refs/heads/master | 2022-02-23T16:07:04.326089 | 2019-10-04T19:22:22 | 2019-10-04T19:22:22 | 198,893,046 | 0 | 0 | null | 2019-07-25T19:53:36 | 2019-07-25T19:53:36 | null | UTF-8 | Python | false | false | 227 | py | tests = int(input())
for _ in range(tests):
palavras = input().split()
unicas = []
for pal in palavras:
if pal not in unicas:
unicas.append(pal)
unicas.sort()
print(' '.join(unicas)) | [
"[email protected]"
] | |
244ea6eaeefdcc69834400b3af182d0f8e12dd1e | 7ddd62a2281e70374b3ff7bf93b987696ce8f72e | /Static/Reduced/SAE-CAT.py | 710acad841a3b15cdf30cf32d5b416d160778f8f | [] | no_license | Almondo4/ML-IDS-Framework | c76c56679b029e45520fb6acbcb1e5e4d6c283ab | dcd8c42c6fdb8e26cded9d10b813e5bd7a5b1eb4 | refs/heads/master | 2023-02-17T00:46:34.536026 | 2021-01-15T22:12:16 | 2021-01-15T22:12:16 | 329,980,971 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,004 | py | import os
root_logdir = os.path.join(os.curdir, "my_logs")
def get_run_logdir():
import time
run_id = time.strftime("run_%Y_%m_%d-%H_%M_%S")
return os.path.join(root_logdir, run_id)
run_logdir = get_run_logdir() # e.g., './my_logs/run_2019_06_07-15_15_22'
#########################################################################
#########################################################################
import pandas as pd
DataTrain = pd.read_pickle("../../Data/Static_Training.csv")
DataTest = pd.read_pickle("../../Data/Static_Testing.csv")
import tensorflow as tf
from tensorflow import keras
import pandas as pd
import numpy as np
dynamicModel = keras.models.load_model("../../Hybrid/Late Integration/SAE_DLS_2.h5")
dynamicModel = dynamicModel.layers[0]
featureMatrixTR = DataTrain.iloc[:,:-1].values
labelVectorTR = DataTrain.iloc[:,-1].values
featureMatrix = DataTest.iloc[:,:-1].values
labelVector = DataTest.iloc[:,-1].values
featureMatrixTR = dynamicModel.predict(featureMatrixTR)
featureMatrix =dynamicModel.predict(featureMatrix)
# # 3 Scaling the dataSet
# from sklearn.preprocessing import StandardScaler
# sc = StandardScaler()
# featureMatrixTR = sc.fit_transform(featureMatrixTR)
# featureMatrix = sc.fit_transform(featureMatrix)
from sklearn.preprocessing import LabelEncoder
labelencoder = LabelEncoder()
labelVectorTR = labelencoder.fit_transform(labelVectorTR)
labelVector = labelencoder.fit_transform(labelVector)
import catboost as cb
model = cb.CatBoostClassifier(depth=8,learning_rate=1,iterations=600,task_type="GPU",devices='0:1')
model.fit(featureMatrixTR, labelVectorTR)
# Report
from sklearn.metrics import classification_report
model_predictions = model.predict(featureMatrix)
# cm = confusion_matrix(labelVector, model_predictions)
print(classification_report(labelVector, model_predictions.round(),digits =4))
from sklearn.metrics import roc_auc_score
auc = roc_auc_score(labelVector, model_predictions.round())
print('ROC AUC: %f' % auc)
| [
"[email protected]"
] | |
971721283348a6ed96146c38df67305cce322f3f | 3d81f8b648f6bf227cd607eddfd7d14f67193d65 | /save_sequence.py | 556b6fcc5d2041c8990b70218c6f0fcd2696924d | [] | no_license | Marcin-Regulski/Sawyer4.0 | 3b3fc23fc544d51f0c8ad4305fe4198c8a83ca88 | bd5c34765b24345bb2037e08f64dbb7f7adfb44e | refs/heads/master | 2021-04-06T18:06:16.904568 | 2018-05-07T18:42:19 | 2018-05-07T18:42:19 | 125,240,338 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,085 | py | #!/usr/bin/env python
# Aim of this script is to detect current joint positions and x,y,z of
# gripper and save it in dictionary as set up locations for a robot for later use
import argparse
import struct
import sys
import copy
import rospy
import rospkg
import intera_interface
import imp
flask_server= imp.load_source("Server", "/home/nitro/webapp/app.py")
from geometry_msgs.msg import (
PoseStamped,
Pose,
Point,
Quaternion
)
class Save_Positions(object):
def __init__(self, limb="right", hover_distance = 0.3, tip_name = "right_gripper_tip"):
self._limb_name = limb # string
self._tip_name = tip_name # string
self._hover_distance = hover_distance # in meters
self._limb = intera_interface.Limb(limb)
self._gripper = intera_interface.Gripper()
self._sequence = []
# verify robot is enabled
print("Getting robot state... ")
self._rs = intera_interface.RobotEnable(intera_interface.CHECK_VERSION)
self._init_state = self._rs.state().enabled
print("Enabling robot... ")
self._rs.enable()
def _retract(self):
# retrieve current pose from endpoint
current_pose = self._limb.endpoint_pose()
ik_pose = Pose()
#ik_pose = current_pose
ik_pose.position.x = current_pose['position'].x
ik_pose.position.y = current_pose['position'].y
ik_pose.position.z = current_pose['position'].z
ik_pose.orientation.x = current_pose['orientation'].x
ik_pose.orientation.y = current_pose['orientation'].y
ik_pose.orientation.z = current_pose['orientation'].z
ik_pose.orientation.w = current_pose['orientation'].w
return ik_pose
def save_sequence(self,name_seq):
while not rospy.is_shutdown():
f = open("/home/nitro/sawyer_ws/src/sawyer_gripper/src/poses/"+name_seq+"_sequence.txt","w+")
for i in range(0,len(self._sequence)):
f.write(self._sequence[i]+" ")
f.close()
return
def save_values_to_file(self,name,grip_in):
grip_widths = []
self._sequence.append(name)
grip_widths = grip_in.split()
current_angles = self._limb.joint_angles()
cp = self._retract()
f = open("/home/nitro/sawyer_ws/src/sawyer_gripper/src/poses/"+name+".txt","w+")
f.write(str(cp)+"\n")
f.write("Joint angles:\n")
f.write(str(current_angles['right_j6'])+"\n")
f.write(str(current_angles['right_j5'])+"\n")
f.write(str(current_angles['right_j4'])+"\n")
f.write(str(current_angles['right_j3'])+"\n")
f.write(str(current_angles['right_j2'])+"\n")
f.write(str(current_angles['right_j1'])+"\n")
f.write(str(current_angles['right_j0'])+"\n")
f.write("Gripper widths - before and after: \n")
grip_widths = ['0' if x == 'c' else x for x in grip_widths]
grip_widths = ['MAX_POSITION' if x == 'o' else x for x in grip_widths]
f.write(grip_widths[0]+"\n")
f.write(grip_widths[0]+"\n")
f.close()
def get_info(sp):
while not rospy.is_shutdown():
name = raw_input("Name of position: ")
grippers_in = raw_input ("Set up width of grippers for beggining and end of position - leave space between values (c- close /o - open): ")
sp.save_values_to_file(name,grippers_in)
break
def main():
rospy.init_node("Save_sequence")
sp = Save_Positions()
while not rospy.is_shutdown():
raw_input( "Hello! Move Sawyer to the first location and click enter to save it!")
get_info(sp)
while True:
decision = raw_input("Would you like to add new position or save sequence? p/s ")
if decision == 'p':
get_info(sp)
elif decision == 's':
print sp._sequence
name_seq = raw_input("Name of sequence: ")
sp.save_sequence(name_seq)
break
if __name__ == '__main__':
sys.exit(main())
| [
"[email protected]"
] | |
300055fffdd0f469d53b856b350e4172c0be70a0 | 9bd297ef9430dbc42a4a00655f787bc7453b5fd8 | /Final Project/page2.py | 05e62089e041cd7873cd84382aec2f94011e0216 | [] | no_license | neilhsu70/DataAnalytics-Final-Project | fb26b12aa408af3d67fa35716e5268757fd87896 | 7c0f56ca0941ced962c6b9b453c43ca2a1ff6a24 | refs/heads/master | 2022-11-17T06:25:20.896902 | 2020-07-21T23:02:53 | 2020-07-21T23:02:53 | 277,962,173 | 1 | 2 | null | 2020-07-21T21:19:03 | 2020-07-08T01:42:46 | Jupyter Notebook | UTF-8 | Python | false | false | 3,509 | py | import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
import plotly
import pandas as pd
import numpy as np
from datetime import datetime
import plotly.graph_objects as go
import plotly.express as px
from datetime import datetime
#Data
US_country_df = pd.read_csv('https://raw.githubusercontent.com/CSSEGISandData/COVID-19/web-data/data/cases_country.csv')
US_country_df=US_country_df.loc[US_country_df['Country_Region'] == 'US']
US_country_df['Deaths'] = US_country_df['Deaths'].astype(int).apply(lambda x: "{:,}".format(x))
US_country_df['Recovered'] = US_country_df['Recovered'].astype(int).apply(lambda x: "{:,}".format(x))
US_country_df['Active'] = US_country_df['Active'].astype(int).apply(lambda x: "{:,}".format(x))
US_country_df['Confirmed'] = US_country_df['Confirmed'].astype(np.int64).apply(lambda x: "{:,}".format(x))
#title and contributors
US_title_contributors =dbc.Card([dbc.CardBody([dbc.Container([
html.H1(children='Remotely monitoring the COVID-19 pandemic: US', className='mt-5 py-4 pb-3 text-center'),
html.P("Dashboard contributors: Bianca A. Hernandez, Ningning Du, Neil Hsu, Youngjung Choi", style = {'font-weight': 'bold'}, className='mt-3 py-2 pb-1 text-center'),
])])])
#cards for tally us
US_first_card=dbc.Card([
dbc.CardBody(children=[html.H4('Confirmed', style = {'padding-top': '5px','font-weight':'bold', 'color':'#5e4fa2'}),
html.Div([dbc.Button(US_country_df['Confirmed'].sum(), color="#5e4fa2", size = "lg")])],
className='text-center')
]),
US_second_card=dbc.Card([
dbc.CardBody(children = [html.H4('Recovered', style = {'padding-top': '5px', 'font-weight':'bold', 'color':'#66c2a5'}),
html.Div([dbc.Button(US_country_df['Recovered'].sum(), color="#66c2a5", size = "lg")])],
className='text-center'),
]),
US_third_card=dbc.Card([
dbc.CardBody(children = [html.H4('Deaths', style = {'padding-top': '5px', 'font-weight':'bold', 'color':'#d53e50'}),
html.Div([dbc.Button(US_country_df['Deaths'].sum(), color="#d53e50", size = "lg")])],
className='text-center'),
]),
US_fourth_card=dbc.Card([
dbc.CardBody(children = [html.H4('Active', style = {'padding-top': '5px', 'font-weight':'bold', 'color':'#f46d43',}),
html.Div([dbc.Button(US_country_df['Active'].sum(), color="#f46d43", size = "lg")])],
className='text-center'),
])
container1 = dbc.Card([dbc.CardBody([dbc.Container([
html.H1(children='US Map Analysis', className='mt-3 py-2 pb-1 text-center'),
])])])
container2 = html.Div([
html.Iframe(src="https://public.tableau.com/views/USmapsdeathscases/Dashboard1?:embed=yes&:showVizHome=no", width = "100%", height = "1000")
])
US_first_row = html.Div([
html.Br(),
html.Iframe(src="https://public.tableau.com/views/Top_10_states_ACD/Dashboard2?:embed=yes&:showVizHome=no", width = "100%", height = "1000")
])
def US_title_authors():
heading = US_title_contributors
return heading
def us_tallies():
layout = dbc.Row([dbc.Col(US_first_card), dbc.Col(US_second_card), dbc.Col(US_third_card), dbc.Col(US_fourth_card)], className='justify-content-center',)
return layout
def container_box():
value = container1
return value
def container_box2():
value = container2
return value
def US_main():
value = US_first_row
return value
| [
"[email protected]"
] | |
34d30b873823b4abc98982c09013c47d8703f1e6 | b85c8edaf0d4edbc4ce820f19c9ecc7abcb2e3bc | /CSCI220/Week 09 - MAR12-16/tictactoeexample.py | 4c6d08e728d0beadd34962461f1c37393d59fddf | [
"Apache-2.0"
] | permissive | itsallvoodoo/csci-school | 3eba3390c595b281f3f94750b8b787ca3f8c9f29 | ecf0c737d31c077e54ddd24ddab50d73d8eb5b17 | refs/heads/master | 2020-05-24T15:37:46.999396 | 2014-03-24T16:19:17 | 2014-03-24T16:19:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,504 | py | from graphics import *
def create_board():
board = [['','',''],['','',''],['','','']]
wwin = GraphWin("Tic Tac Toe",300,300)
wwin.setCoords(30,30,0,0)
Line(Point(10,0),Point(10,30)).draw(wwin)
Line(Point(20,0),Point(20,30)).draw(wwin)
Line(Point(0,10),Point(30,10)).draw(wwin)
Line(Point(0,20),Point(30,20)).draw(wwin)
return wwin,board
def get_column(board,i):
return board[0][i] + board[1][i] + board[2][i]
def get_row(board,i):
return board[i][0] + board[i][1] + board[i][2]
def check_winner(board):
for i in range(3):
row = get_row(board,i)
if row1 == 'XXX':
return 'X'
if row1 == 'OOO':
return 'O'
for i in range(3):
col = get_column(board,i)
if col == 'XXX':
return 'X'
if col == 'OOO':
return 'O'
diag = board[0][0] + board[1][1] + board[2][2]
if diag == 'XXX':
return 'X'
if diag == 'OOO':
return 'O'
diag = board[2][0] + board[1][1] + board[0][2]
if diag == 'XXX':
return 'X'
if diag == 'OOO':
return 'O'
return None
def take_turn(win,board,who):
# Get Move
p = win.getMouse()
col = int(p.getX() // 10)
row = int(p.getY() // 10)
Text(Point(col*10 + 5, row*10 + 5),who).draw(win)
board[row][col] = who
def run_game():
win,board = create_board()
for turn in range(9):
if turn % 2 == 0: # Even -> X
who = 'X'
else:
who = 'O'
take_turn(win,board,who)
if check_winner(board) != None:
win.close()
return check_winner(board) # Exit the game
win.close()
return check_winner(board)
def valid_user(u,p):
users = ["Paul","Joe"]
passwords = ["Anderson","Smith"]
for i in range(len(users)):
if users[i] == u and passwords[i] == p:
return True
return False
def button_clicked(button,p):
p1 = button.getP1()
p2 = button.getP2()
big_x = max([p1.getX(),p2.getX()])
small_x = min([p1.getX(),p2.getX()])
big_y = max([p1.getY(),p2.getY()])
small_y = min([p1.getY(),p2.getY()])
x = p.getX()
y = p.getY()
if y <= big_y and y >= small_y and x <= big_x and x >= small_x:
return True
return False
def login():
win = GraphWin("Login",300,300)
win.setCoords(0,0,100,100)
Text(Point(20,60),"User: ").draw(win)
Text(Point(20,40),"Password: ").draw(win)
user_entry = Entry(Point(50,60),10)
user_entry.draw(win)
password_entry = Entry(Point(50,40),10)
password_entry.draw(win)
message = Text(Point(50,90),"")
message.draw(win)
# Create a login button and a quit button
login_button = Rectangle(Point(5,15),Point(25,25))
login_button.draw(win)
p1 = login_button.getP1()
p2 = login_button.getP2()
Text(Point((p1.getX() + p2.getX())/2,(p1.getY() + p2.getY())/2),"Login").draw(win)
quit_button = Rectangle(Point(35,15),Point(55,25))
quit_button.draw(win)
p1 = quit_button.getP1()
p2 = quit_button.getP2()
Text(Point((p1.getX() + p2.getX())/2,(p1.getY() + p2.getY())/2),"Quit").draw(win)
for i in range(3): # Maximum number of clicks
p = win.getMouse()
if button_clicked(login_button,p):
user = user_entry.getText()
password = password_entry.getText()
if valid_user(user,password):
win.close()
return True
else:
message.setText("Invalid user and/or password")
elif button_clicked(quit_button,p):
win.close()
return None
win.close()
return False
def show_winner(wins_X,wins_O):
# Display the results
win = GraphWin("Final Results",300,300)
win.setCoords(0,0,100,100)
Text(Point(50,75),"Number of wins for X: "+str(wins_X)).draw(win)
Text(Point(50,25),"Number of wins for O: "+str(wins_O)).draw(win)
win.getMouse()
win.close()
def main():
is_valid = login()
if not is_valid:
return
wins_X = 0
wins_O = 0
for g in range(3):
winner = run_game()
if winner == 'X':
wins_X = wins_X + 1
elif winner == 'O':
wins_O = wins_O + 1
show_winner(wins_X,wins_O)
main()
| [
"[email protected]"
] | |
5234f46aeb64a207e5d0099f2b162292e8f5c6fb | 3026c00ff1f0b2cd700a992f84e9e36f85df6da9 | /Assignment 2/familiarization.py | 74045312f51547f616bbe7a428bb261bff8f0fd9 | [] | no_license | jwbambacht/cyber-data-analytics | 9e703e350ccdce18868c742c5dae35ae0f56424d | de511b24169fd176b22cf52af4c50f9453dd2a0b | refs/heads/master | 2022-11-29T10:14:06.507649 | 2020-08-17T10:28:24 | 2020-08-17T10:28:24 | 259,739,901 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,738 | py | import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import familiarization as fam
from statsmodels.tsa.ar_model import AutoReg
from sklearn.metrics import mean_squared_error
from math import sqrt
sns.set_style("darkgrid")
def load_data(file_name):
data = pd.read_csv(file_name)
data.head()
return data
# Preprocess the data set
def pre_process(data):
# Slice the datetime col to a separate date and time of day col
# Drop original datetime column
data["date"] = data.DATETIME.str[:-2]
data["hour"] = data.DATETIME.str[9:].astype(str).astype(int)
data.drop(columns=["DATETIME"],axis=1,inplace=True)
return data
# Plot a signal
def plot_signal(data,col,start=0,end=720,title="",color="blue"):
f,ax = plt.subplots(figsize=(22,3))
f.suptitle(title)
ax = sns.lineplot(data=data.loc[start:end,col],color=color)
ax.set_xticks(range(start,end+1,24))
plt.show()
# Generate heatmap of all correlations between any columns
def plot_correlation(data):
tdata = data.drop(columns=["date","hour","ATT_FLAG"],axis=1)
correlation = tdata.corr()
for col in correlation.columns:
correlation[col] = correlation[col].fillna(0)
f, ax = plt.subplots(figsize=(25,20))
ax = sns.heatmap(correlation)
ax.set_title('Correlation between signals')
plt.show()
# Predict next point based on model and lag
def predict(coefficients, history):
y_hat = coefficients[0]
for i in range(1, len(coefficients)):
y_hat += coefficients[i] * history[-i]
return y_hat
# Predict signal
def predict_signal(data,col,ratio):
tdata = data[col]
train_size = int(len(tdata)*ratio)
# Determine the differences from each point to its predecessor
differences = np.array([tdata[i]-tdata[i-1] for i in range(1,len(tdata))])
# Create a train and test set
X_train, X_test = differences[0:train_size], differences[train_size:]
# Train the AutoRegression model using training data and defined lag
reg = AutoReg(X_train, lags=4)
reg_fit = reg.fit()
coefficients = reg_fit.params
# Save the training points and predict new points based on the coefficients of the model and the training points
history = [X_train[i] for i in range(len(X_train))]
predictions = list()
for t in range(len(X_test)):
yhat = predict(coefficients, history)
obs = X_test[t]
predictions.append(yhat)
history.append(obs)
# Calculate RMSE
rmse = sqrt(mean_squared_error(X_test, predictions))
# Plot the original and predicted signal
f,ax = plt.subplots(figsize=(22,3))
sns.lineplot(data=X_test,color='blue', label="original")
sns.lineplot(data=np.array(predictions),color='red', label='predicted')
f.suptitle("Original vs Predicted signal of "+col+", RMSE: "+str(rmse))
plt.show()
return rmse
| [
"[email protected]"
] | |
dc687ce6f7b886a107577fc6a8a2c650f47eb41a | 3e93dd2ee190bc7eb928f6b9eec34d4d9b23e09a | /quora_clone/questions/api/views.py | e5e01dcda32b0f0856b3b876ea9dd5cad0d092d7 | [] | no_license | jordanengstrom/quora_clone | 0cdaec0bd0ff50013ba04db502817bcb965c659d | c55e0008f842d7cc43e21708be0a9cb540120dc3 | refs/heads/main | 2023-02-13T16:38:31.934285 | 2021-01-11T01:22:11 | 2021-01-11T01:22:11 | 320,454,504 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,758 | py | from rest_framework import generics, status, viewsets
from rest_framework.generics import get_object_or_404
from rest_framework.exceptions import ValidationError
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from .serializers import AnswerSerializer, QuestionSerializer
from .permissions import IsAuthorOrReadOnly
from ..models import Answer, Question
class QuestionViewSet(viewsets.ModelViewSet):
queryset = Question.objects.all().order_by('-created_at')
lookup_field = 'slug'
serializer_class = QuestionSerializer
permission_classes = [IsAuthenticated, IsAuthorOrReadOnly]
def perform_create(self, serializer):
serializer.save(author=self.request.user)
class AnswerCreateAPIView(generics.CreateAPIView):
queryset = Answer.objects.all()
serializer_class = AnswerSerializer
permission_classes = [IsAuthenticated]
def perform_create(self, serializer):
request_user = self.request.user
kwarg_slug = self.kwargs.get('slug')
question = get_object_or_404(Question, slug=kwarg_slug)
if question.answers.filter(author=request_user):
raise ValidationError('You have already answered this question!')
serializer.save(author=request_user, question=question)
class AnswerListAPIView(generics.ListAPIView):
serializer_class = AnswerSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
kwarg_slug = self.kwargs.get('slug')
return Answer.objects.filter(question__slug=kwarg_slug).order_by('-created_at')
class AnswerRetrieveUpdateDestroyAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Answer.objects.all()
serializer_class = AnswerSerializer
permission_classes = [IsAuthenticated, IsAuthorOrReadOnly]
class AnswerLikeAPIView(APIView):
serializer_class = AnswerSerializer
permission_classes = [IsAuthenticated]
# Removes a like
def delete(self, request, pk):
answer = get_object_or_404(Answer, pk=pk)
user = request.user
answer.voters.remove(user)
answer.save()
serializer_context = {'request': request}
serializer = self.serializer_class(answer, context=serializer_context)
return Response(serializer.data, status=status.HTTP_200_OK)
# Adds a like
def post(self, request, pk):
answer = get_object_or_404(Answer, pk=pk)
user = request.user
answer.voters.add(user)
answer.save()
serializer_context = {'request': request}
serializer = self.serializer_class(answer, context=serializer_context)
return Response(serializer.data, status=status.HTTP_200_OK)
| [
"[email protected]"
] | |
9e04a3a470b7c4827a6790f552542dc33b86ca25 | a00ed711e3e08b50ad6e91cc07a2cddc4a1de5ea | /airflow/decorators/__init__.py | f375fbf7167ba51bf4fa366449265f28793ea4e1 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | ishiis/airflow | 4305794e36b611d01f49e3f2401be3dc49782670 | 292440d54f4db84aaf0c5a98cf5fcf34303f2fa8 | refs/heads/master | 2022-07-30T00:51:28.806940 | 2022-07-14T12:07:11 | 2022-07-14T12:07:11 | 209,801,072 | 1 | 0 | Apache-2.0 | 2019-09-20T13:47:26 | 2019-09-20T13:47:26 | null | UTF-8 | Python | false | false | 2,175 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any
from airflow.decorators.base import TaskDecorator
from airflow.decorators.branch_python import branch_task
from airflow.decorators.python import python_task
from airflow.decorators.python_virtualenv import virtualenv_task
from airflow.decorators.task_group import task_group
from airflow.models.dag import dag
from airflow.providers_manager import ProvidersManager
# Please keep this in sync with the .pyi's __all__.
__all__ = [
"TaskDecorator",
"TaskDecoratorCollection",
"dag",
"task",
"task_group",
"python_task",
"virtualenv_task",
"branch_task",
]
class TaskDecoratorCollection:
"""Implementation to provide the ``@task`` syntax."""
python = staticmethod(python_task)
virtualenv = staticmethod(virtualenv_task)
branch = staticmethod(branch_task)
__call__: Any = python # Alias '@task' to '@task.python'.
def __getattr__(self, name: str) -> TaskDecorator:
"""Dynamically get provider-registered task decorators, e.g. ``@task.docker``."""
if name.startswith("__"):
raise AttributeError(f"{type(self).__name__} has no attribute {name!r}")
decorators = ProvidersManager().taskflow_decorators
if name not in decorators:
raise AttributeError(f"task decorator {name!r} not found")
return decorators[name]
task = TaskDecoratorCollection()
| [
"[email protected]"
] | |
d123e74afa86ffc6e4f45ab02bf16a9737f44e58 | aa4aa51465d79e0447cbe22281f0402ca95bdaa2 | /matlab/acc90/train.py | a013d93c11352252919e764cbacc4e8aeb9f42c5 | [] | no_license | zuozuo12/usualProject | 2ca06bb7a1ff6f99343f1997053ba8d5a48e00a7 | 335bcef5d76d6cf0c84dd3209176089b3b07fbba | refs/heads/master | 2020-11-27T17:02:33.252884 | 2019-10-22T06:46:32 | 2019-10-22T06:46:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,669 | py | # coding: utf-8
# import the necessary packages
from keras.models import Sequential
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.layers.core import Activation
from keras.layers.core import Flatten
from keras.layers.core import Dense
from keras import backend as K
from keras.preprocessing.image import ImageDataGenerator
from keras.optimizers import Adam
from sklearn.model_selection import train_test_split
from keras.preprocessing.image import img_to_array
from keras.utils import to_categorical
from imutils import paths
import matplotlib.pyplot as plt
import matplotlib
matplotlib.use("Agg")
import numpy as np
import argparse
import random
import cv2
import os
import sys
class LeNet:
@staticmethod
def build(width, height, depth, classes):
# initialize the model
model = Sequential()
inputShape = (height, width, depth)
# if we are using "channels last", update the input shape
if K.image_data_format() == "channels_first": #for tensorflow
inputShape = (depth, height, width)
# first set of CONV => RELU => POOL layers
model.add(Conv2D(20, (5, 5),padding="same",input_shape=inputShape))
model.add(Activation("relu"))
model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
#second set of CONV => RELU => POOL layers
model.add(Conv2D(50, (5, 5), padding="same"))
model.add(Activation("relu"))
model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
# first (and only) set of FC => RELU layers
model.add(Flatten())
model.add(Dense(500))
model.add(Activation("relu"))
# softmax classifier
model.add(Dense(classes))
model.add(Activation("softmax"))
# return the constructed network architecture
return model
def load_data(path):
print(path)
labels_list=os.listdir(path);
labels_list.sort();
print("[INFO] loading images...")
data = []
labels = []
# grab the image paths and randomly shuffle them
imagePaths = sorted(list(paths.list_images(path)))
random.seed(42)
random.shuffle(imagePaths)
# loop over the input images
for imagePath in imagePaths:
# load the image, pre-process it, and store it in the data list
# print(imagePath)
image = cv2.imread(imagePath)
image = cv2.resize(image, (norm_size, norm_size))
image = img_to_array(image)
data.append(image)
# extract the class label from the image path and update the
# labels list
# print(imagePath.split(os.path.sep)[-2])
label = labels_list.index(imagePath.split(os.path.sep)[-2])
labels.append(label)
# scale the raw pixel intensities to the range [0, 1]
data = np.array(data, dtype="float") / 255.0
labels = np.array(labels)
# convert the labels from integers to vectors
labels = to_categorical(labels, num_classes=CLASS_NUM)
return data, labels
def train(aug, trainX, trainY, testX, testY):
# initialize the model
print("[INFO] compiling model...")
model = LeNet.build(width=norm_size, height=norm_size, depth=3, classes=CLASS_NUM)
opt = Adam(lr=INIT_LR, decay=INIT_LR / EPOCHS)
model.compile(loss="categorical_crossentropy", optimizer=opt,
metrics=["accuracy"])
# train the network
print("[INFO] training network...")
H = model.fit_generator(aug.flow(trainX, trainY, batch_size=BS),
validation_data=(testX, testY), steps_per_epoch=len(trainX) // BS,
epochs=EPOCHS, verbose=1)
# save the model to disk
print("[INFO] serializing network...")
model.save(".//lenet5.model")
# plot the training loss and accuracy
plt.style.use("ggplot")
plt.figure()
N = EPOCHS
plt.plot(np.arange(0, N), H.history["loss"], label="train_loss")
plt.plot(np.arange(0, N), H.history["val_loss"], label="val_loss")
plt.plot(np.arange(0, N), H.history["acc"], label="train_acc")
plt.plot(np.arange(0, N), H.history["val_acc"], label="val_acc")
plt.title("Training Loss and Accuracy on traffic-sign classifier")
plt.xlabel("Epoch #")
plt.ylabel("Loss/Accuracy")
plt.legend(loc="lower left")
plt.savefig(".//")
# 我们还需要为训练设置一些参数,比如训练的epoches,batch_szie等。这些参数不是随便设的,
# 比如batch_size的数值取决于你电脑内存的大小,内存越大,batch_size就可以设为大一点。
# 又比如norm_size(图片归一化尺寸)是根据你得到的数据集,经过分析后得出的,因为我们这个数据集大多数图片的尺度都在这个范围内,
# 所以我觉得64这个尺寸应该比较合适,但是不是最合适呢?那还是要通过实验才知道的,也许64的效果更好呢?
# initialize the number of epochs to train for, initial learning rate,
# and batch size
CLASS_NUM=0
EPOCHS = 100
INIT_LR = 1e-3
BS = 32
norm_size = 128
if __name__ == '__main__':
train_file_path = ".\\simple_classes";
test_file_path = ".\\simple_classes"
CLASS_NUM = len(os.listdir(train_file_path))
trainX, trainY = load_data(train_file_path)
testX, testY = load_data(test_file_path)
# construct the image generator for data augmentation
aug = ImageDataGenerator(rotation_range=30, width_shift_range=0.1,
height_shift_range=0.1, shear_range=0.2, zoom_range=0.2,
horizontal_flip=True, fill_mode="nearest")
train(aug, trainX, trainY, testX, testY)
| [
"[email protected]"
] | |
9656a8cf0063f25f89370d2950a610595094a3b6 | 1f3cc953ed9d1a799e732a2c03774c5be1970188 | /resolution_1/resolution_1.py | 28e60b944b8030783d084d5a16da5a94c68c4ede | [] | no_license | Murray2015/thesis_python_code | 0483d9552d04bc63b59d0fe6403b89af1847f4a7 | 6745a6e7d06ccc9343f0e40cccf91752096fba24 | refs/heads/master | 2021-01-20T05:46:45.824680 | 2018-04-02T16:53:51 | 2018-04-02T16:53:51 | 101,470,416 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,789 | py | # -*- coding: utf-8 -*-
"""
Created on Wed May 3 12:36:54 2017
@author: mxh909
"""
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
log_data = pd.read_csv("Wireline/resolution-1_final.las", delim_whitespace=True,
header=None, names=['DEPTH', 'BS', 'CALI', 'DTC', 'GR', 'GR_CORR',
'RESD', 'RESS', 'SP'], na_values=-999.2500, skiprows=56)
fig = plt.figure(figsize=(8,4))
fig.set_figheight(12)
fig.set_figwidth(10)
#f.title("Resolution-1 Well Log")
ax1 = fig.add_subplot(611)
ax1.set_ylabel('Calliper')
log_data.plot('DEPTH', 'CALI', ax=ax1, color='red', sharex=True,subplots=True, legend=False)
ax2 = fig.add_subplot(612)
ax2.set_ylabel('DT')
ax2.yaxis.set_label_position("right")
log_data.plot('DEPTH', 'DTC', ax=ax2, color='blue', sharex=True,subplots=True,legend=False)
ax2.yaxis.tick_right()
ax3 = fig.add_subplot(613)
ax3.set_ylabel('GR')
log_data.plot('DEPTH', 'GR_CORR', color='black', ax=ax3, sharex=True,subplots=True,legend=False)
ax4 = fig.add_subplot(614)
ax4.set_ylabel('RESD')
ax4.yaxis.set_label_position("right")
log_data.plot('DEPTH', 'RESD', logy=True, color='green', ax=ax4, sharex=True,subplots=True,legend=False)
ax4.yaxis.tick_right()
ax5 = fig.add_subplot(615)
ax5.set_ylabel('RESS')
log_data.plot('DEPTH', 'RESS', logy=True, color='purple', ax=ax5, sharex=True,subplots=True,legend=False)
ax6 = fig.add_subplot(616)
ax6.set_ylabel('SP')
ax6.yaxis.set_label_position("right")
log_data.plot('DEPTH', 'SP', color='darkblue', ax=ax6, sharex=True, subplots=True, legend=False)
ax6.yaxis.tick_right()
plt.xlabel("Depth")
plt.savefig("Resolution-1_well_plot.pdf")
plt.show()
# Make a Vp log. For no real reason!
log_data['VP_m_per_s'] = 1/(log_data['DTC'] * (1e-6) /0.3048)
log_data.plot('DEPTH','VP_m_per_s', legend=False)
### Make time-depth curve by integrating the sonic log
# Convert from microseconds per ft, to seconds per meter.
log_data['DT_s_per_m'] = (log_data['DTC'] * (1e-6) * (1/0.3048))
# Fill missing values to enable the cumulative integration
log_data.loc[0,'DT_s_per_m'] = (1.0/1500)
log_data['DT_s_per_m'] = log_data['DT_s_per_m'].interpolate()
## Cumulativly integrate to form the time depth curve
from scipy.integrate import cumtrapz
time_s = 2*(cumtrapz(y=log_data['DT_s_per_m'], x=log_data['DEPTH'])) # x2 as log is in one-way-time
time_s += 0.1 # add in the value of the seafloor from the seismic. Entered here rather than calculated, as well is a little upslope.
plt.figure()
plt.plot(time_s, log_data['DEPTH'][:-1])
plt.gca().invert_yaxis()
plt.title("Time-depth curve")
plt.xlabel("Time (s)")
plt.ylabel("Depth (m)")
plt.show()
np.savetxt("Resolution_1_time_depth_curve.txt", np.vstack((log_data['DEPTH'].values[:-1], time_s)).T, header="Depth_m Time_s")
| [
"[email protected]"
] | |
591cd9a8ae0ca539e11b66a5bfc9a62cff31b1b8 | 5c285b9b907604ce6721d1d5f1311688e6b57940 | /ihome/ih/utils/captcha/captcha.py | 5564c4d520cd0188d8bedb4baedd4bfaf6f7bb5e | [] | no_license | luqingfei-it-python/flask-python | 9d6f3bfdb668d41805bcea8f7068794c47a43169 | 54a05821e933b56cb6e641d658288badf8ac5ba5 | refs/heads/master | 2020-06-21T23:32:48.754019 | 2019-07-18T12:01:27 | 2019-07-18T12:01:27 | 197,578,674 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,963 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# refer to `https://bitbucket.org/akorn/wheezy.captcha`
import random
import string
import os.path
from io import BytesIO
from PIL import Image
from PIL import ImageFilter
from PIL.ImageDraw import Draw
from PIL.ImageFont import truetype
class Bezier:
def __init__(self):
self.tsequence = tuple([t / 20.0 for t in range(21)])
self.beziers = {}
def pascal_row(self, n):
""" Returns n-th row of Pascal's triangle
"""
result = [1]
x, numerator = 1, n
for denominator in range(1, n // 2 + 1):
x *= numerator
x /= denominator
result.append(x)
numerator -= 1
if n & 1 == 0:
result.extend(reversed(result[:-1]))
else:
result.extend(reversed(result))
return result
def make_bezier(self, n):
""" Bezier curves:
http://en.wikipedia.org/wiki/B%C3%A9zier_curve#Generalization
"""
try:
return self.beziers[n]
except KeyError:
combinations = self.pascal_row(n - 1)
result = []
for t in self.tsequence:
tpowers = (t ** i for i in range(n))
upowers = ((1 - t) ** i for i in range(n - 1, -1, -1))
coefs = [c * a * b for c, a, b in zip(combinations,
tpowers, upowers)]
result.append(coefs)
self.beziers[n] = result
return result
class Captcha(object):
def __init__(self):
self._bezier = Bezier()
self._dir = os.path.dirname(__file__)
# self._captcha_path = os.path.join(self._dir, '..', 'static', 'captcha')
@staticmethod
def instance():
if not hasattr(Captcha, "_instance"):
Captcha._instance = Captcha()
return Captcha._instance
def initialize(self, width=200, height=75, color=None, text=None, fonts=None):
# self.image = Image.new('RGB', (width, height), (255, 255, 255))
self._text = text if text else random.sample(string.ascii_lowercase + string.ascii_lowercase + '3456789', 4)
self.fonts = fonts if fonts else \
[os.path.join(self._dir, 'fonts', font) for font in ['Arial.ttf', 'Georgia.ttf', 'actionj.ttf']]
self.width = width
self.height = height
self._color = color if color else self.random_color(0, 200, random.randint(220, 255))
@staticmethod
def random_color(start, end, opacity=None):
red = random.randint(start, end)
green = random.randint(start, end)
blue = random.randint(start, end)
if opacity is None:
return red, green, blue
return red, green, blue, opacity
# draw image
def background(self, image):
Draw(image).rectangle([(0, 0), image.size], fill=self.random_color(238, 255))
return image
@staticmethod
def smooth(image):
return image.filter(ImageFilter.SMOOTH)
def curve(self, image, width=4, number=6, color=None):
dx, height = image.size
dx /= number
path = [(dx * i, random.randint(0, height))
for i in range(1, number)]
bcoefs = self._bezier.make_bezier(number - 1)
points = []
for coefs in bcoefs:
points.append(tuple(sum([coef * p for coef, p in zip(coefs, ps)])
for ps in zip(*path)))
Draw(image).line(points, fill=color if color else self._color, width=width)
return image
def noise(self, image, number=50, level=2, color=None):
width, height = image.size
dx = width / 10
width -= dx
dy = height / 10
height -= dy
draw = Draw(image)
for i in range(number):
x = int(random.uniform(dx, width))
y = int(random.uniform(dy, height))
draw.line(((x, y), (x + level, y)), fill=color if color else self._color, width=level)
return image
def text(self, image, fonts, font_sizes=None, drawings=None, squeeze_factor=0.75, color=None):
color = color if color else self._color
fonts = tuple([truetype(name, size)
for name in fonts
for size in font_sizes or (65, 70, 75)])
draw = Draw(image)
char_images = []
for c in self._text:
font = random.choice(fonts)
c_width, c_height = draw.textsize(c, font=font)
char_image = Image.new('RGB', (c_width, c_height), (0, 0, 0))
char_draw = Draw(char_image)
char_draw.text((0, 0), c, font=font, fill=color)
char_image = char_image.crop(char_image.getbbox())
for drawing in drawings:
d = getattr(self, drawing)
char_image = d(char_image)
char_images.append(char_image)
width, height = image.size
offset = int((width - sum(int(i.size[0] * squeeze_factor)
for i in char_images[:-1]) -
char_images[-1].size[0]) / 2)
for char_image in char_images:
c_width, c_height = char_image.size
mask = char_image.convert('L').point(lambda i: i * 1.97)
image.paste(char_image,
(offset, int((height - c_height) / 2)),
mask)
offset += int(c_width * squeeze_factor)
return image
# draw text
@staticmethod
def warp(image, dx_factor=0.27, dy_factor=0.21):
width, height = image.size
dx = width * dx_factor
dy = height * dy_factor
x1 = int(random.uniform(-dx, dx))
y1 = int(random.uniform(-dy, dy))
x2 = int(random.uniform(-dx, dx))
y2 = int(random.uniform(-dy, dy))
image2 = Image.new('RGB',
(width + abs(x1) + abs(x2),
height + abs(y1) + abs(y2)))
image2.paste(image, (abs(x1), abs(y1)))
width2, height2 = image2.size
return image2.transform(
(width, height), Image.QUAD,
(x1, y1,
-x1, height2 - y2,
width2 + x2, height2 + y2,
width2 - x2, -y1))
@staticmethod
def offset(image, dx_factor=0.1, dy_factor=0.2):
width, height = image.size
dx = int(random.random() * width * dx_factor)
dy = int(random.random() * height * dy_factor)
image2 = Image.new('RGB', (width + dx, height + dy))
image2.paste(image, (dx, dy))
return image2
@staticmethod
def rotate(image, angle=25):
return image.rotate(
random.uniform(-angle, angle), Image.BILINEAR, expand=1)
def captcha(self, path=None, fmt='JPEG'):
"""Create a captcha.
Args:
path: save path, default None.
fmt: image format, PNG / JPEG.
Returns:
A tuple, (name, text, StringIO.value).
For example:
('fXZJN4AFxHGoU5mIlcsdOypa', 'JGW9', '\x89PNG\r\n\x1a\n\x00\x00\x00\r...')
"""
image = Image.new('RGB', (self.width, self.height), (255, 255, 255))
image = self.background(image)
image = self.text(image, self.fonts, drawings=['warp', 'rotate', 'offset'])
image = self.curve(image)
image = self.noise(image)
image = self.smooth(image)
name = "".join(random.sample(string.ascii_lowercase + string.ascii_uppercase + '3456789', 24))
text = "".join(self._text)
out = BytesIO()
image.save(out, format=fmt)
if path:
image.save(os.path.join(path, name), fmt)
return name, text, out.getvalue()
def generate_captcha(self):
self.initialize()
return self.captcha("")
captcha = Captcha.instance()
if __name__ == '__main__':
captcha.generate_captcha()
| [
"[email protected]"
] | |
ce3b0296e817c31a877a46b8b8050cf3cb6b1aa7 | d37c7303895b378b603f8b1419f6aa9f89f38b2f | /Multiplacation.py | eb09bf818b43437e04ecd567c503282f72d14866 | [] | no_license | mdaiello/Year9Design-PythonMD | 7027e92c0bdf09b8c11719b513b8f253a9c4474b | 32621487fd4bce054c4c1825f74f247f9789b885 | refs/heads/master | 2021-08-24T16:20:58.737700 | 2018-12-13T15:37:06 | 2018-12-13T15:37:06 | 148,362,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | #print("enter a number: ")
num = input("enter a number: ")
num2 = input("enter a number: ")
x = int(num)
y = int(num2)
print(x * y) | [
"[email protected]"
] | |
4867c97d7f878f81f9b9bd52d676da267f010d5a | b76615ff745c6d66803506251c3d4109faf50802 | /pyobjc-core/libffi-src/tests/run-tests.py | 56839645e43efe8b13876c8beb213c24f1e6d9b7 | [
"MIT"
] | permissive | danchr/pyobjc-git | 6ef17e472f54251e283a0801ce29e9eff9c20ac0 | 62b787fddeb381184043c7ff136f1c480755ab69 | refs/heads/master | 2021-01-04T12:24:31.581750 | 2020-02-02T20:43:02 | 2020-02-02T20:43:02 | 240,537,392 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 376 | py | """
'test' action for setup.py
"""
import sys, os, string, glob
from os.path import basename, dirname, splitext, join, expanduser, walk
from fnmatch import fnmatch
import unittest
import dejagnu
deja_suite = dejagnu.testSuiteForDirectory("tests/testsuite/libffi.call")
suite = unittest.TestSuite((deja_suite,))
runner = unittest.TextTestRunner(verbosity=1)
runner.run(suite)
| [
"[email protected]"
] | |
20f25bde0fda68fd993b8846c8f76daf3475d859 | f9c2687e02c538afd9e9a37f1f3f50578e6204d9 | /intake_cmip/database.py | a4d604a34f744203d0c1a8a71f8c1d24d869a7b7 | [
"Apache-2.0"
] | permissive | wydh/intake-cmip | 4ca2a9e8678fec8cb637aef8f6419875be6e4208 | f3f61e1504f2c8a7cf79d8a355894757e82be5e2 | refs/heads/master | 2021-01-01T07:53:07.922659 | 2019-03-02T04:55:53 | 2019-03-02T04:55:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,090 | py | #!/usr/bin/env python
"""Contains functions to generate CMIP5 data sets database."""
import functools
import os
import re
import shutil
from pathlib import Path
import dask.dataframe as dd
import pandas as pd
from dask import delayed
HOME = os.environ["HOME"]
INTAKE_CMIP_DIR = f"{HOME}/.intake_cmip"
@functools.lru_cache(maxsize=1024, typed=False)
def _parse_dirs(root_dir):
institution_dirs = [
os.path.join(root_dir, activity, institution)
for activity in os.listdir(root_dir)
for institution in os.listdir(os.path.join(root_dir, activity))
if os.path.isdir(os.path.join(root_dir, activity, institution))
]
model_dirs = [
os.path.join(institution_dir, model)
for institution_dir in institution_dirs
for model in os.listdir(institution_dir)
if os.path.isdir(os.path.join(institution_dir, model))
]
experiment_dirs = [
os.path.join(model_dir, exp)
for model_dir in model_dirs
for exp in os.listdir(model_dir)
if os.path.isdir(os.path.join(model_dir, exp))
]
freq_dirs = [
os.path.join(experiment_dir, freq)
for experiment_dir in experiment_dirs
for freq in os.listdir(experiment_dir)
if os.path.isdir(os.path.join(experiment_dir, freq))
]
realm_dirs = [
os.path.join(freq_dir, realm)
for freq_dir in freq_dirs
for realm in os.listdir(freq_dir)
if os.path.isdir(os.path.join(freq_dir, realm))
]
return realm_dirs
def _get_entry(directory):
dir_split = directory.split("/")
entry = {}
entry["realm"] = dir_split[-1]
entry["frequency"] = dir_split[-2]
entry["experiment"] = dir_split[-3]
entry["model"] = dir_split[-4]
entry["institution"] = dir_split[-5]
return entry
@delayed
def _parse_directory(directory):
exclude = set(["files", "latests"]) # directories to exclude
columns = [
"ensemble",
"experiment",
"file_basename",
"file_fullpath",
"frequency",
"institution",
"model",
"root",
"realm",
"varname",
]
df = pd.DataFrame(columns=columns)
entry = _get_entry(directory)
for root, dirs, files in os.walk(directory):
# print(root)
dirs[:] = [d for d in dirs if d not in exclude]
if not files:
continue
sfiles = sorted([f for f in files if os.path.splitext(f)[1] == ".nc"])
if not sfiles:
continue
fs = []
for f in sfiles:
try:
f_split = f.split("_")
entry["varname"] = f_split[0]
entry["ensemble"] = f_split[-2]
entry["root"] = root
entry["file_basename"] = f
entry["file_fullpath"] = os.path.join(root, f)
fs.append(entry)
except BaseException:
continue
if fs:
temp_df = pd.DataFrame(fs)
else:
temp_df = pd.DataFrame()
temp_df.columns = df.columns
df = pd.concat([temp_df, df], ignore_index=True, sort=False)
return df
def _persist_database(df, INTAKE_CMIP_DIR=INTAKE_CMIP_DIR, path=None):
vYYYYMMDD = (
r"v\d{4}\d{2}\d{2}"
) # TODO: Very dangerous in case the root dir matches the pattern
vN = r"v\d{1}"
v = re.compile("|".join([vYYYYMMDD, vN])) # Combine both regex into one
df["version"] = df.root.str.findall(v)
df["version"] = df["version"].apply(lambda x: x[0] if x else "v0")
sorted_df = (
df.sort_values("version")
.drop_duplicates(subset="file_basename", keep="last")
.reset_index(drop=True)
)
if path:
INTAKE_CMIP_DIR = path
DB_FILE_PATH = f"{INTAKE_CMIP_DIR}/cmip5.csv"
RAW_DB_FILE_PATH = f"{INTAKE_CMIP_DIR}/raw_cmip5.csv"
print(f"**** Persisting CMIP5 database: {DB_FILE_PATH} ****")
if os.path.isdir(INTAKE_CMIP_DIR):
shutil.rmtree(INTAKE_CMIP_DIR)
os.makedirs(INTAKE_CMIP_DIR, exist_ok=True)
sorted_df.to_csv(DB_FILE_PATH, index=False)
df.to_csv(RAW_DB_FILE_PATH, index=False)
return sorted_df
def create_cmip5_database(root_dir=None, db_path=None):
"""Generates database for CMIP5 data sets
Parameters
----------
root_dir : string or directory handle, default None
Directory path for CMIP5 root directory.
db_path : string or directory handle, default None
Directory path where the generated database will be saved. If
None, the database in persisted under the home directory.
Raises
------
NotADirectoryError
if the `root_dir` does not exist
Returns
-------
pd.DataFrame
"""
if not os.path.exists(root_dir):
raise NotADirectoryError(f"{root_dir} does not exist")
dirs = _parse_dirs(root_dir)
dfs = [_parse_directory(directory) for directory in dirs]
df = dd.from_delayed(dfs).compute()
df = _persist_database(df=df, path=db_path)
return df
| [
"[email protected]"
] | |
ee9d343201ef5dfbe34420fce69956f597eadadd | 0556754cd4765d05a1d831c48933c5f299bb095d | /Oct-27-2020/Exam.py | a421e7a4b0f0eb13465f746413cf40046b5aecc0 | [] | no_license | rohitbhatghare/python | 4fa5e5883743023ced841892a13a9798b7686f39 | 248d265e02ecbc1270a87081af26537eb401e535 | refs/heads/main | 2023-02-03T04:32:15.716805 | 2020-12-21T11:33:27 | 2020-12-21T11:33:27 | 302,831,244 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 271 | py | #Which of the following expression involves corcion when evaluated in python?
a=1.7%2
print(a)
#What is result of cmp (3,1)?
#a=cmp(3,1)
#print(a)
import sys
l1=tuple()
print(sys.getsizeof(l1),end="\n")
d1=[10,20,30,40,50]
d2=[1,2,3,4,5]
print(d1-d2)
| [
"[email protected]"
] | |
00c549a45d06dafa57e35a4c376cb501be51497b | 7ae5c0b74940f322296fd2f4abac4eb3336a2943 | /fizzbuzz.py | f82102eb46663590743125c7238e783adfd934c0 | [] | no_license | LA638/FizzBuzz | 15bc81feabcf83e55efc7a5f06aded7538d208fc | e338e85b1ad5dfba1e136b47c9411d92c58fe1aa | refs/heads/main | 2023-07-04T04:53:46.690832 | 2021-08-10T19:13:12 | 2021-08-10T19:13:12 | 394,757,771 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | for i in range(1,100):
if (i % 3 == 0) and (i % 5 == 0): print('FizzBuzz')
elif i % 3 == 0: print('Buzz')
elif i % 5 == 0: print('Fizz')
else: print(i) | [
"[email protected]"
] | |
a5c41e9e0898096cddc7ed9a5e5614873d7b603b | b6374d1ee7120cd81698d8fb614a05e71f24933c | /Book/Greedy/백준/단어수학.py | afc54b4100059888d574a68805d4135a9fd01608 | [] | no_license | yongmon01/AlgorithmStudy | 92022442cba4e4144f98bf3852657c8c60758d0d | 7d42032065683e123477d19ee24754a9c3e26bc5 | refs/heads/master | 2023-04-12T00:25:50.342230 | 2021-05-12T15:35:42 | 2021-05-12T15:35:42 | 307,395,591 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 669 | py | # 1339번 gold4
n = int(input())
words = []
answer = 0
for i in range(n):
words.append(input())
# print(words)
chars = []
numbers = []
for word in words:
length = len(word) - 1
for char in word:
if char in chars:
index = chars.index(char)
numbers[index] += (10 ** length)
else:
chars.append(char)
numbers.append(10 ** length)
length -= 1
# print(chars)
# print(numbers)
answers = []
for i in range(len(chars)):
answers.append((numbers[i], chars[i]))
answers.sort(reverse=True)
# print(answers)
for i in range(len(answers)):
answer += (answers[i][0] * (9-i))
print(answer)
| [
"[email protected]"
] | |
ea6272e3d7bfd94f32693cd53759237e03a97ce2 | a074a34afadbc420bdecb05669d0bd046eecf86d | /实例源代码/第04章/4.3PM2.5Warning.py | 0903478cee40a3c707850db243ea31b72700627a | [] | no_license | apollowesley/Python2019_XXMU | c9a825a63bfb9a0eb45366401cc9c3fe605a31e6 | c87bf9b4245d523e43c4452d45ee9efe3ceb0663 | refs/heads/master | 2022-03-25T13:46:53.668061 | 2019-12-26T09:14:24 | 2019-12-26T09:14:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | # 4.3PM2.5Warning
PM = eval(input("请输入PM2.5数值:"))
if 0 <= PM < 35:
print("空气质量优,建议户外活动。")
elif 35 <= PM < 75:
print("空气质量良,适度户外活动。")
else:
print("空气污染,出门时请佩戴口罩。") | [
"[email protected]"
] | |
5fa5f1f76da44ffbc4a0ec0b851589cd62db6950 | cbc2f8353eb81ef6f790ce9f28c3bcb2783f082e | /binary_search_tree/singly_linked_list.py | fd07ce6d6d8b3894ea45118d49f6a1659b6a2060 | [] | no_license | manzur1990/Data-Structures | f7420cca05eb3f21a6934c8f3dd3e87815fa9a48 | cb9b8815042d6199c0d45c157d7ce8c4abbcf7ab | refs/heads/master | 2022-12-13T10:06:35.377804 | 2020-09-11T00:12:58 | 2020-09-11T00:12:58 | 293,835,463 | 0 | 0 | null | 2020-09-11T00:13:00 | 2020-09-08T14:29:42 | Python | UTF-8 | Python | false | false | 2,095 | py | class Node:
def __init__(self, value=None, next=None):
self.value = value
self.next = next
def __repr__(self):
return self.value
def get_value(self):
return self.value
def get_next(self):
return self.next
def set_next(self, new_next):
self.next = new_next
class LinkedList:
def __init__(self):
self.head = None
self.tail = None
def __repr__(self):
node = self.head
nodes = []
while node is not None:
nodes.append(node.data)
node = node.next
nodes.append("None")
return " -> ".join(nodes)
def add_to_head(self, value):
new_node = Node(value, self.head)
self.head = new_node
if not self.tail:
self.tail = new_node
def add_to_tail(self, value):
new_node = Node(value)
if not self.head:
self.head = new_node
self.tail = new_node
else:
self.tail.set_next(new_node)
self.tail = new_node
def remove_head(self):
if not self.head:
return None
removed_value = self.head.get_value()
self.head = self.head.next
if not self.head:
self.tail = None
return removed_value
def remove_tail(self):
if not self.head:
return None
curr = self.head
prev = curr
while curr.get_next() != None:
prev = curr
curr = curr.get_next()
prev.set_next(None)
self.tail = prev
return curr
def get_max(self):
if not self.head:
return None
curr = self.head
max_value = curr.get_value()
while curr != None:
max_value = max(max_value, curr.get_value())
curr = curr.get_next()
return max_value
def contains(self, value):
curr = self.head
while curr != None:
if curr.get_value() is value:
return True
curr = curr.get_next()
return False
| [
"[email protected]"
] | |
0820be805b1c29f6515a2c2a84b794b3f54cfa54 | cd74ae9acd803153b2a12a5c204a5bf53970ee1a | /chroma_instance/source/mrcnn/coco.py | 1342b993ba1e2ea50e96b6706902b48bdf227e1e | [] | no_license | owen8877/chroma-instance | 71304f33a018b0b12663313d2455a692aab27adf | d0385163c6a7b563e7f1b816cbe13763ddad4819 | refs/heads/master | 2023-02-03T17:33:08.441092 | 2020-12-21T23:54:09 | 2020-12-21T23:55:42 | 315,153,113 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,554 | py | """
Mask R-CNN
Configurations and data loading code for MS COCO.
Copyright (c) 2017 Matterport, Inc.
Licensed under the MIT License (see LICENSE for details)
Written by Waleed Abdulla
------------------------------------------------------------
Usage: import the module (see Jupyter notebooks for examples), or run from
the command line as such:
# Train a new model starting from pre-trained COCO weights
python3 coco.py train --dataset=/path/to/coco/ --model=coco
# Train a new model starting from ImageNet weights. Also auto download COCO dataset
python3 coco.py train --dataset=/path/to/coco/ --model=imagenet --download=True
# Continue training a model that you had trained earlier
python3 coco.py train --dataset=/path/to/coco/ --model=/path/to/weights.h5
# Continue training the last model you trained
python3 coco.py train --dataset=/path/to/coco/ --model=last
# Run COCO evaluatoin on the last model you trained
python3 coco.py evaluate --dataset=/path/to/coco/ --model=last
"""
import os
import sys
import time
import numpy as np
# import imgaug # https://github.com/aleju/imgaug (pip3 install imgaug)
#
# Download and install the Python COCO tools from https://github.com/waleedka/coco
# That's a fork from the original https://github.com/pdollar/coco with a bug
# fix for Python 3.
# I submitted a pull request https://github.com/cocodataset/cocoapi/pull/50
# If the PR is merged then use the original repo.
# Note: Edit PythonAPI/Makefile and replace "python" with "python3".
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
# from pycocotools import mask as maskUtils
import zipfile
import urllib.request
import shutil
# Root directory of the project
ROOT_DIR = os.path.abspath("../../")
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
from mrcnn.config import Config
from mrcnn import model as modellib, utils
# Path to trained weights file
COCO_MODEL_PATH = os.path.join(ROOT_DIR, "mask_rcnn_coco.h5")
# Directory to save logs and model checkpoints, if not provided
# through the command line argument --logs
DEFAULT_LOGS_DIR = os.path.join(ROOT_DIR, "logs")
DEFAULT_DATASET_YEAR = "2014"
############################################################
# Configurations
############################################################
class CocoConfig(Config):
"""Configuration for training on MS COCO.
Derives from the base Config class and overrides values specific
to the COCO dataset.
"""
# Give the configuration a recognizable name
NAME = "coco"
# We use a GPU with 12GB memory, which can fit two images.
# Adjust down if you use a smaller GPU.
IMAGES_PER_GPU = 2
# Uncomment to train on 8 GPUs (default is 1)
# GPU_COUNT = 8
# Number of classes (including background)
NUM_CLASSES = 1 + 80 # COCO has 80 classes
############################################################
# Dataset
############################################################
class CocoDataset(utils.Dataset):
def load_coco(self, dataset_dir, subset, year=DEFAULT_DATASET_YEAR, class_ids=None,
class_map=None, return_coco=False, auto_download=False):
"""Load a subset of the COCO dataset.
dataset_dir: The root directory of the COCO dataset.
subset: What to load (train, val, minival, valminusminival)
year: What dataset year to load (2014, 2017) as a string, not an integer
class_ids: If provided, only loads images that have the given classes.
class_map: TODO: Not implemented yet. Supports maping classes from
different datasets to the same class ID.
return_coco: If True, returns the COCO object.
auto_download: Automatically download and unzip MS-COCO images and annotations
"""
if auto_download is True:
self.auto_download(dataset_dir, subset, year)
coco = COCO("{}/annotations/instances_{}{}.json".format(dataset_dir, subset, year))
if subset == "minival" or subset == "valminusminival":
subset = "val"
image_dir = "{}/{}{}".format(dataset_dir, subset, year)
# Load all classes or a subset?
if not class_ids:
# All classes
class_ids = sorted(coco.getCatIds())
# All images or a subset?
if class_ids:
image_ids = []
for id in class_ids:
image_ids.extend(list(coco.getImgIds(catIds=[id])))
# Remove duplicates
image_ids = list(set(image_ids))
else:
# All images
image_ids = list(coco.imgs.keys())
# Add classes
for i in class_ids:
self.add_class("coco", i, coco.loadCats(i)[0]["name"])
# Add images
for i in image_ids:
self.add_image(
"coco", image_id=i,
path=os.path.join(image_dir, coco.imgs[i]['file_name']),
width=coco.imgs[i]["width"],
height=coco.imgs[i]["height"],
annotations=coco.loadAnns(coco.getAnnIds(
imgIds=[i], catIds=class_ids, iscrowd=None)))
if return_coco:
return coco
def auto_download(self, dataDir, dataType, dataYear):
"""Download the COCO dataset/annotations if requested.
dataDir: The root directory of the COCO dataset.
dataType: What to load (train, val, minival, valminusminival)
dataYear: What dataset year to load (2014, 2017) as a string, not an integer
Note:
For 2014, use "train", "val", "minival", or "valminusminival"
For 2017, only "train" and "val" annotations are available
"""
# Setup paths and file names
if dataType == "minival" or dataType == "valminusminival":
imgDir = "{}/{}{}".format(dataDir, "val", dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, "val", dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format("val", dataYear)
else:
imgDir = "{}/{}{}".format(dataDir, dataType, dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, dataType, dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format(dataType, dataYear)
# print("Image paths:"); print(imgDir); print(imgZipFile); print(imgURL)
# Create main folder if it doesn't exist yet
if not os.path.exists(dataDir):
os.makedirs(dataDir)
# Download images if not available locally
if not os.path.exists(imgDir):
os.makedirs(imgDir)
print("Downloading images to " + imgZipFile + " ...")
with urllib.request.urlopen(imgURL) as resp, open(imgZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + imgZipFile)
with zipfile.ZipFile(imgZipFile, "r") as zip_ref:
zip_ref.extractall(dataDir)
print("... done unzipping")
print("Will use images in " + imgDir)
# Setup annotations data paths
annDir = "{}/annotations".format(dataDir)
if dataType == "minival":
annZipFile = "{}/instances_minival2014.json.zip".format(dataDir)
annFile = "{}/instances_minival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/o43o90bna78omob/instances_minival2014.json.zip?dl=0"
unZipDir = annDir
elif dataType == "valminusminival":
annZipFile = "{}/instances_valminusminival2014.json.zip".format(dataDir)
annFile = "{}/instances_valminusminival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/s3tw5zcg7395368/instances_valminusminival2014.json.zip?dl=0"
unZipDir = annDir
else:
annZipFile = "{}/annotations_trainval{}.zip".format(dataDir, dataYear)
annFile = "{}/instances_{}{}.json".format(annDir, dataType, dataYear)
annURL = "http://images.cocodataset.org/annotations/annotations_trainval{}.zip".format(dataYear)
unZipDir = dataDir
# print("Annotations paths:"); print(annDir); print(annFile); print(annZipFile); print(annURL)
# Download annotations if not available locally
if not os.path.exists(annDir):
os.makedirs(annDir)
if not os.path.exists(annFile):
if not os.path.exists(annZipFile):
print("Downloading zipped annotations to " + annZipFile + " ...")
with urllib.request.urlopen(annURL) as resp, open(annZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + annZipFile)
with zipfile.ZipFile(annZipFile, "r") as zip_ref:
zip_ref.extractall(unZipDir)
print("... done unzipping")
print("Will use annotations in " + annFile)
def load_mask(self, image_id):
"""Load instance masks for the given image.
Different datasets use different ways to store masks. This
function converts the different mask format to one format
in the form of a bitmap [height, width, instances].
Returns:
masks: A bool array of shape [height, width, instance count] with
one mask per instance.
class_ids: a 1D array of class IDs of the instance masks.
"""
# If not a COCO image, delegate to parent class.
image_info = self.image_info[image_id]
if image_info["source"] != "coco":
return super(CocoDataset, self).load_mask(image_id)
instance_masks = []
class_ids = []
annotations = self.image_info[image_id]["annotations"]
# Build mask of shape [height, width, instance_count] and list
# of class IDs that correspond to each channel of the mask.
for annotation in annotations:
class_id = self.map_source_class_id(
"coco.{}".format(annotation['category_id']))
if class_id:
m = self.annToMask(annotation, image_info["height"],
image_info["width"])
# Some objects are so small that they're less than 1 pixel area
# and end up rounded out. Skip those objects.
if m.max() < 1:
continue
# Is it a crowd? If so, use a negative class ID.
if annotation['iscrowd']:
# Use negative class ID for crowds
class_id *= -1
# For crowd masks, annToMask() sometimes returns a mask
# smaller than the given dimensions. If so, resize it.
if m.shape[0] != image_info["height"] or m.shape[1] != image_info["width"]:
m = np.ones([image_info["height"], image_info["width"]], dtype=bool)
instance_masks.append(m)
class_ids.append(class_id)
# Pack instance masks into an array
if class_ids:
mask = np.stack(instance_masks, axis=2).astype(np.bool)
class_ids = np.array(class_ids, dtype=np.int32)
return mask, class_ids
else:
# Call super class to return an empty mask
return super(CocoDataset, self).load_mask(image_id)
def image_reference(self, image_id):
"""Return a link to the image in the COCO Website."""
info = self.image_info[image_id]
if info["source"] == "coco":
return "http://cocodataset.org/#explore?id={}".format(info["id"])
else:
super(CocoDataset, self).image_reference(image_id)
# The following two functions are from pycocotools with a few changes.
def annToRLE(self, ann, height, width):
"""
Convert annotation which can be polygons, uncompressed RLE to RLE.
:return: binary mask (numpy 2D array)
"""
segm = ann['segmentation']
if isinstance(segm, list):
# polygon -- a single object might consist of multiple parts
# we merge all parts into one mask rle code
rles = maskUtils.frPyObjects(segm, height, width)
rle = maskUtils.merge(rles)
elif isinstance(segm['counts'], list):
# uncompressed RLE
rle = maskUtils.frPyObjects(segm, height, width)
else:
# rle
rle = ann['segmentation']
return rle
def annToMask(self, ann, height, width):
"""
Convert annotation which can be polygons, uncompressed RLE, or RLE to binary mask.
:return: binary mask (numpy 2D array)
"""
rle = self.annToRLE(ann, height, width)
m = maskUtils.decode(rle)
return m
############################################################
# COCO Evaluation
############################################################
def build_coco_results(dataset, image_ids, rois, class_ids, scores, masks):
"""Arrange resutls to match COCO specs in http://cocodataset.org/#format
"""
# If no results, return an empty list
if rois is None:
return []
results = []
for image_id in image_ids:
# Loop through detections
for i in range(rois.shape[0]):
class_id = class_ids[i]
score = scores[i]
bbox = np.around(rois[i], 1)
mask = masks[:, :, i]
result = {
"image_id": image_id,
"category_id": dataset.get_source_class_id(class_id, "coco"),
"bbox": [bbox[1], bbox[0], bbox[3] - bbox[1], bbox[2] - bbox[0]],
"score": score,
"segmentation": maskUtils.encode(np.asfortranarray(mask))
}
results.append(result)
return results
def evaluate_coco(model, dataset, coco, eval_type="bbox", limit=0, image_ids=None):
"""Runs official COCO evaluation.
dataset: A Dataset object with valiadtion data
eval_type: "bbox" or "segm" for bounding box or segmentation evaluation
limit: if not 0, it's the number of images to use for evaluation
"""
# Pick COCO images from the dataset
image_ids = image_ids or dataset.image_ids
# Limit to a subset
if limit:
image_ids = image_ids[:limit]
# Get corresponding COCO image IDs.
coco_image_ids = [dataset.image_info[id]["id"] for id in image_ids]
t_prediction = 0
t_start = time.time()
results = []
for i, image_id in enumerate(image_ids):
# Load image
image = dataset.load_image(image_id)
# Run detection
t = time.time()
r = model.detect([image], verbose=0)[0]
t_prediction += (time.time() - t)
# Convert results to COCO format
# Cast masks to uint8 because COCO tools errors out on bool
image_results = build_coco_results(dataset, coco_image_ids[i:i + 1],
r["rois"], r["class_ids"],
r["scores"],
r["masks"].astype(np.uint8))
results.extend(image_results)
# Load results. This modifies results with additional attributes.
coco_results = coco.loadRes(results)
# Evaluate
cocoEval = COCOeval(coco, coco_results, eval_type)
cocoEval.params.imgIds = coco_image_ids
cocoEval.evaluate()
cocoEval.accumulate()
cocoEval.summarize()
print("Prediction time: {}. Average {}/image".format(
t_prediction, t_prediction / len(image_ids)))
print("Total time: ", time.time() - t_start)
############################################################
# Training
############################################################
if __name__ == '__main__':
import argparse
# Parse command line arguments
parser = argparse.ArgumentParser(
description='Train Mask R-CNN on MS COCO.')
parser.add_argument("command",
metavar="<command>",
help="'train' or 'evaluate' on MS COCO")
parser.add_argument('--dataset', required=True,
metavar="/path/to/coco/",
help='Directory of the MS-COCO dataset')
parser.add_argument('--year', required=False,
default=DEFAULT_DATASET_YEAR,
metavar="<year>",
help='Year of the MS-COCO dataset (2014 or 2017) (default=2014)')
parser.add_argument('--model', required=True,
metavar="/path/to/weights.h5",
help="Path to weights .h5 file or 'coco'")
parser.add_argument('--logs', required=False,
default=DEFAULT_LOGS_DIR,
metavar="/path/to/logs/",
help='Logs and checkpoints directory (default=logs/)')
parser.add_argument('--limit', required=False,
default=500,
metavar="<image count>",
help='Images to use for evaluation (default=500)')
parser.add_argument('--download', required=False,
default=False,
metavar="<True|False>",
help='Automatically download and unzip MS-COCO files (default=False)',
type=bool)
args = parser.parse_args()
print("Command: ", args.command)
print("Model: ", args.model)
print("Dataset: ", args.dataset)
print("Year: ", args.year)
print("Logs: ", args.logs)
print("Auto Download: ", args.download)
# Configurations
if args.command == "train":
config = CocoConfig()
else:
class InferenceConfig(CocoConfig):
# Set batch size to 1 since we'll be running inference on
# one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
GPU_COUNT = 1
IMAGES_PER_GPU = 1
DETECTION_MIN_CONFIDENCE = 0
config = InferenceConfig()
config.display()
# Create model
if args.command == "train":
model = modellib.MaskRCNN(mode="training", config=config,
model_dir=args.logs)
else:
model = modellib.MaskRCNN(mode="inference", config=config,
model_dir=args.logs)
# Select weights file to load
if args.model.lower() == "coco":
model_path = COCO_MODEL_PATH
elif args.model.lower() == "last":
# Find last trained weights
model_path = model.find_last()
elif args.model.lower() == "imagenet":
# Start from ImageNet trained weights
model_path = model.get_imagenet_weights()
else:
model_path = args.model
# Load weights
print("Loading weights ", model_path)
model.load_weights(model_path, by_name=True)
# Train or evaluate
if args.command == "train":
# Training dataset. Use the training set and 35K from the
# validation set, as as in the Mask RCNN paper.
dataset_train = CocoDataset()
dataset_train.load_coco(args.dataset, "train", year=args.year, auto_download=args.download)
if args.year in '2014':
dataset_train.load_coco(args.dataset, "valminusminival", year=args.year, auto_download=args.download)
dataset_train.prepare()
# Validation dataset
dataset_val = CocoDataset()
val_type = "val" if args.year in '2017' else "minival"
dataset_val.load_coco(args.dataset, val_type, year=args.year, auto_download=args.download)
dataset_val.prepare()
# Image Augmentation
# Right/Left flip 50% of the time
augmentation = imgaug.augmenters.Fliplr(0.5)
# *** This training schedule is an example. Update to your needs ***
# Training - Stage 1
print("Training network heads")
model.train(dataset_train, dataset_val,
learning_rate=config.LEARNING_RATE,
epochs=40,
layers='heads',
augmentation=augmentation)
# Training - Stage 2
# Finetune layers from ResNet stage 4 and up
print("Fine tune Resnet stage 4 and up")
model.train(dataset_train, dataset_val,
learning_rate=config.LEARNING_RATE,
epochs=120,
layers='4+',
augmentation=augmentation)
# Training - Stage 3
# Fine tune all layers
print("Fine tune all layers")
model.train(dataset_train, dataset_val,
learning_rate=config.LEARNING_RATE / 10,
epochs=160,
layers='all',
augmentation=augmentation)
elif args.command == "evaluate":
# Validation dataset
dataset_val = CocoDataset()
val_type = "val" if args.year in '2017' else "minival"
coco = dataset_val.load_coco(args.dataset, val_type, year=args.year, return_coco=True, auto_download=args.download)
dataset_val.prepare()
print("Running COCO evaluation on {} images.".format(args.limit))
evaluate_coco(model, dataset_val, coco, "bbox", limit=int(args.limit))
else:
print("'{}' is not recognized. "
"Use 'train' or 'evaluate'".format(args.command))
| [
"[email protected]"
] | |
3b41378302a107c20743347f7770a2781913138e | 2816e6e548ebe053a7cbf84d51f095a31faf4d1a | /apps/gallery/forms.py | c2a1d7ba8839ac8dd761e4fca3384469d4e4c7b4 | [
"MIT"
] | permissive | F483/bikesurf.org | 906b902d3f76a1a67fc8ad9c3b7852bf33f77bc6 | c85680f7443920a4dbf95e24f58e0a095c3da42f | refs/heads/master | 2020-05-22T07:02:29.699828 | 2017-12-06T14:41:37 | 2017-12-06T14:41:37 | 25,440,210 | 7 | 3 | MIT | 2017-12-06T14:41:38 | 2014-10-19T21:06:05 | HTML | UTF-8 | Python | false | false | 432 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2012 Fabian Barkhau <[email protected]>
# License: MIT (see LICENSE.TXT file)
from django.utils.translation import ugettext as _
from django.forms import Form
from django.forms import ImageField
from django.forms import ModelChoiceField
class Create(Form):
image = ImageField(label=_("IMAGE"))
class Add(Form):
image = ImageField(label=_("IMAGE"))
| [
"devnull@localhost"
] | devnull@localhost |
8464b0da53163520361858e1ce4ee1cf43b1ae29 | 1f1931b2d9d02b9b8dfa8b08883134f664e642fd | /celery_task.py | f86bc796de70367f2f206cebcdceea4a4c77eb59 | [] | no_license | saketkc/iitb-library-sms-interface | 5dd96efe0fd2adbe244226998f726ea604f3f9f9 | 90c99e785059d2101d1ea7f946653bf0852a5c4b | refs/heads/master | 2020-05-17T07:10:08.021416 | 2015-01-10T02:01:49 | 2015-01-10T02:01:49 | 2,713,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | from celery import Celery
celery = Celery('celery_task', broker='amqp://guest@localhost//')
@celery.task
def add(x, y):
return x + y
| [
"[email protected]"
] | |
ec62bf2f298a1cce55afc00cd8894a74153dace7 | 10ef21efddff149e061b179f7c4334da4053a560 | /16/b.py | 53934393c4f8e1b7c691d5f8102c71b74cb7f83f | [] | no_license | leninexe/aoc2020 | 0e6e95e65dbdf29d00375625d6d53002878113aa | 619ac47ae2a70a191d4dcec9dc817ad387ec7e9b | refs/heads/main | 2023-02-12T23:45:33.294200 | 2020-12-29T16:49:34 | 2020-12-29T16:49:34 | 317,458,061 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,988 | py | # Read Input
f = open("input.txt", "r")
RULES = 0
MYTICKET = 1
NEARBYTICKETS = 2
rules = {}
allRuleValues = []
myTicket = []
nearbyTickets = []
state = RULES
for line in f:
if line.strip() == "":
continue
if line.strip() == "your ticket:":
state = MYTICKET
continue
elif line.strip() == "nearby tickets:":
state = NEARBYTICKETS
continue
if state == RULES:
splitter = line.split(":")
pairs = splitter[1].split("or")
values = []
for pair in pairs:
minValue = int(pair.strip().split("-")[0])
maxValue = int(pair.strip().split("-")[1])
values.append([minValue, maxValue])
allRuleValues.append([minValue, maxValue])
rules[splitter[0].strip()] = values
elif state == MYTICKET:
for i in line.split(","):
myTicket.append(int(i.strip()))
elif state == NEARBYTICKETS:
ticket = []
for i in line.split(","):
ticket.append(int(i.strip()))
nearbyTickets.append(ticket)
validNearbyTickets = []
for ticket in nearbyTickets:
ticketValid = True
for number in ticket:
valid = False
for ruleValues in allRuleValues:
if number >= ruleValues[0] and number <= ruleValues[1]:
valid = True
break
ticketValid &= valid
if ticketValid: validNearbyTickets.append(ticket)
ruleToIndex = {}
availableRules = list(rules.keys())
while len(availableRules) > 0:
for i in range(len(myTicket)):
possibleRules = []
for ruleKey in availableRules:
ruleValues = rules[ruleKey]
ruleValid = True
for ticket in validNearbyTickets:
number = ticket[i]
valid = False
for ruleValue in ruleValues:
if number >= ruleValue[0] and number <= ruleValue[1]:
valid = True
ruleValid &= valid
if ruleValid:
possibleRules.append(ruleKey)
if len(possibleRules) == 1:
ruleToIndex[possibleRules[0]] = i
availableRules.remove(possibleRules[0])
multiplier = 1
for ruleKey in ruleToIndex.keys():
if ruleKey.startswith("departure"):
multiplier *= myTicket[ruleToIndex[ruleKey]]
print(multiplier) | [
"[email protected]"
] | |
fa0a2f641f100abcaa6f9f73ad39bdbf64aa0dcb | 741a89b9dc609e288042c74b482dc30d84e302d6 | /pynance/quandl_codes.py | 56308c87bd88bbf266df9587f968bb90abac6c4f | [
"MIT"
] | permissive | stena/pynance | 60efa66cfc5bfea01cfe2ab5d18af8a0f2205dc5 | a56bbb9e4c8bf2f1cb3948370c8faa4b0027b1c8 | refs/heads/master | 2021-01-18T05:36:33.907772 | 2014-03-21T22:57:19 | 2014-03-21T22:57:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,461 | py |
import pandas as pd
from . settings import CODE_DIR
class CodeDownloader(object):
'''
Downloads the Quandl codes for updating local copies.
params:
drop_inactive: False to keep inactive stocks, default True
update_all: True to automatically save all codes, default False
'''
root = 'https://s3.amazonaws.com/quandl-static-content/Ticker+CSV%27s/'
ext_root = root + 'Stock+Exchanges/'
yahoo = root + 'Yahoo/'
google = root + 'Google/'
def __init__(self, update_all=False, drop_inactive=True):
self.drop_inactive = drop_inactive
if update_all:
self.detailed_info().to_csv(
CODE_DIR + 'stockinfo.csv', index_label='Ticker'
)
self.indicies().to_csv(
CODE_DIR + 'Indicies.csv', index_label='Ticker'
)
self.etfs().to_csv(
CODE_DIR + 'ETFs.csv', index_label='Ticker'
)
self.mutual_funds().to_csv(
CODE_DIR + 'funds.csv', index_label='Ticker'
)
self.nasdaq().to_csv(
CODE_DIR + 'NASDAQ.csv', index_label='Ticker'
)
self.nyse().to_csv(
CODE_DIR + 'NYSE.csv', index_label='Ticker'
)
self.nyse_amex().to_csv(
CODE_DIR + 'AMEX.csv', index_label='Ticker'
)
self.pink_sheets().to_csv(
CODE_DIR + 'PINK.csv', index_label='Ticker'
)
self.otc().to_csv(
CODE_DIR + 'OTC.csv', index_label='Ticker'
)
self.fundamentals().to_csv(
CODE_DIR + 'quandl-stock-code-list.csv', index_label='Ticker'
)
def detailed_info(self):
url = self.ext_root + 'stockinfo.csv'
cols = ['Quandl Code', 'Company Name',
'Industry Name', 'Exchange Name', 'SIC Code']
data = pd.read_csv(
url, index_col=0, header=None, names=cols, skipinitialspace=True
)
if self.drop_inactive:
data = data.T.to_dict()
data = {
sym: data[sym] for sym in data if
data[sym]['Exchange Name'] != 'Stock no longer trades'
}
data = pd.DataFrame(data).T
return data
def indicies(self):
url = self.ext_root + 'Indicies.csv'
return pd.read_csv(url, index_col='Ticker', skipinitialspace=True)
def etfs(self):
url = self.root + 'ETFs.csv'
return pd.read_csv(url, index_col='Ticker', skipinitialspace=True)
def mutual_funds(self):
url = self.ext_root + 'funds.csv'
return pd.read_csv(url, index_col='Ticker', skipinitialspace=True)
def nasdaq(self):
url = self.ext_root + 'NASDAQ.csv'
codes = pd.read_csv(url, index_col='Ticker', skipinitialspace=True)
codes.columns = pd.Index([u'Quandl Code', u'Name'], dtype=object)
return codes
def nyse(self):
url = self.ext_root + 'NYSE.csv'
codes = pd.read_csv(url, index_col='Ticker', skipinitialspace=True)
codes.columns = pd.Index([u'Quandl Code', u'Name'], dtype=object)
return codes
def nyse_amex(self):
url = self.ext_root + 'AMEX.csv'
codes = pd.read_csv(url, index_col='Ticker', skipinitialspace=True)
codes.columns = pd.Index([u'Quandl Code', u'Name'], dtype=object)
return codes
def pink_sheets(self):
url = self.ext_root + 'PINK.csv'
codes = pd.read_csv(url, index_col='Ticker', skipinitialspace=True)
codes.columns = pd.Index([u'Quandl Code', u'Name'], dtype=object)
return codes
def otc(self):
url = self.ext_root + 'OTC.csv'
codes = pd.read_csv(url, index_col='Ticker', skipinitialspace=True)
codes.columns = pd.Index([u'Quandl Code', u'Name'], dtype=object)
return codes
def fundamentals(self):
url = 'https://s3.amazonaws.com/quandl-static-content/quandl-stock-code-list.csv'
data = pd.read_csv(url, index_col='Ticker', skipinitialspace=True)
if self.drop_inactive:
data = data.T.to_dict()
data = {
sym: data[sym] for sym in data if
data[sym]['In Market?'] == 'Active'
}
data = pd.DataFrame(data).T
return data
| [
"[email protected]"
] | |
ec997f8e91dbe9a343222f2dd56270fefa1c7fe7 | f76644aaf809aed68005fb4982dc7f3c9a1c3245 | /src/synthcontrol/multisyntheticControl.py | 5d1c51ae8566e78a01f1d7743af0a06671b15dbd | [] | no_license | jehangiramjad/tslib | dfcce12fc07edcd0f81df891a0cefa0ba14bacfa | 3e50bc1fbe0178bf2c1b21b2ce9fd0f0ca2d5f76 | refs/heads/master | 2021-12-27T01:20:42.640103 | 2021-12-14T00:17:46 | 2021-12-14T00:17:46 | 125,408,481 | 43 | 29 | null | 2021-07-07T12:28:55 | 2018-03-15T18:14:20 | Python | UTF-8 | Python | false | false | 5,494 | py | ################################################################
#
# MultiDimensional Robust Synthetic Control (mRSC)
#
# Implementation based on:
# url forthcoming (Paper to appear in ACM Sigmetrics 2019)
# (http://dna-pubs.cs.columbia.edu/citation/paperfile/230/mRSC.pdf)
#
################################################################
import numpy as np
import pandas as pd
from tslib.src.synthcontrol.syntheticControl import RobustSyntheticControl
from tslib.src import tsUtils
class MultiRobustSyntheticControl(RobustSyntheticControl):
# nbrMetrics: (int) the number of metrics of interest
# weightsArray: (array) weight to scale each metric by. length of array must be == nbrMetrics
# seriesToPredictKey: (string) the series of interest (key)
# kSingularValuesToKeep: (int) the number of singular values to retain
# M: (int) the number of columns in the matrix for EACH metric
# total matrix columns shall be (nbrMetrics * M)
# probObservation: (float) the independent probability of observation of each entry in the matrix
# modelType: (string) SVD or ALS. Default is "SVD"
# svdMethod: (string) the SVD method to use (optional)
# otherSeriesKeysArray: (array) an array of keys for other series which will be used to predict
def __init__(self, nbrMetrics, weightsArray, seriesToPredictKey, kSingularValuesToKeep, M, probObservation=1.0, modelType='svd', svdMethod='numpy', otherSeriesKeysArray=[]):
# set mRSC specific parms
self.nbrMetrics = nbrMetrics
self.weightsArray = weightsArray
if (len(self.weightsArray) != self.nbrMetrics):
raise Exception('len(weightsArray) must equal self.nbrMetrics')
self.combinedM = self.nbrMetrics * M
# initialize the super class
super(MultiRobustSyntheticControl, self).__init__(seriesToPredictKey, kSingularValuesToKeep, self.combinedM, probObservation, modelType, svdMethod, otherSeriesKeysArray)
# helper method to combine metrics appropriately
# arrayOfKeyToSeriesDF: (array) contains a keyToSeriesDF for each metric.
# length of array must equal the self.nbrMetrics
# order of the keyToSeriesDF in the array must remain consistent in fit() and predict().
# isForTraining: (bool) True if used in training and False if used for Predictions
def combineMetrics(self, arrayOfKeyToSeriesDF, isForTraining):
if (len(arrayOfKeyToSeriesDF) != self.nbrMetrics):
raise Exception('len(arrayOfKeyToSeriesDF) must equal self.nbrMetrics')
dataDict = {}
if (isForTraining):
allKeys = [self.seriesToPredictKey] + self.otherSeriesKeysArray
else:
allKeys = self.otherSeriesKeysArray
# scaling by the specified weights
for i in range(0, self.nbrMetrics):
arrayOfKeyToSeriesDF[i] = arrayOfKeyToSeriesDF[i].multiply(np.sqrt(self.weightsArray[i]))
for k in allKeys:
dataArray = []
#print(k, self.nbrMetrics)
for mInd in range(0, self.nbrMetrics):
dfData = arrayOfKeyToSeriesDF[mInd]
dataArray = dataArray + list(dfData[k].values)
dataDict.update({k: dataArray})
return pd.DataFrame(data=dataDict)
# arrayOfKeyToSeriesDF: (array) contains a keyToSeriesDF for each metric.
# length of array must equal the self.nbrMetrics
# order of the keyToSeriesDF in the array must remain consistent in fit() and predict().
# same order must be followed for the self.weightsArray
#
# Note that the keys provided in the constructor MUST all be present in each keyToSeriesDF
# The values must be all numpy arrays of floats.
def fit(self, arrayOfKeyToSeriesDF):
super(MultiRobustSyntheticControl, self).fit(self.combineMetrics(arrayOfKeyToSeriesDF, True))
# arrayOfKeyToSeriesDFNew: (array) contains a keyToSeriesDFNew (Pandas dataframe) for each metric.
# length of array must equal the self.nbrMetrics
# order of the keyToSeriesDFNew in the array must remain consistent in fit() and predict().
# same order must be followed for the self.weightsArray
#
# each keyToSeriesDFNew needs to contain all keys provided in the model;
# all series MUST be of length >= 1,
# If longer than 1, then the most recent point will be used (for each series)
#
# Returns an array of prediction arrays, one for each metric
def predict(self, arrayOfKeyToSeriesDFNew):
allPredictions = super(MultiRobustSyntheticControl, self).predict(self.combineMetrics(arrayOfKeyToSeriesDFNew, False))
predictionsArray = []
singleMetricPredictionsLength = int((1.0/float(self.nbrMetrics)) * len(allPredictions))
for i in range(0, self.nbrMetrics):
predictions = allPredictions[i * singleMetricPredictionsLength: (i+1) * singleMetricPredictionsLength]
predictionsArray.append(predictions)
return predictionsArray
| [
"[email protected]"
] | |
6722a9b9453e1622abe8e3e1a020e4c2fc321f6c | 5534f801c20dc23dcf9bd533c6143865724ced98 | /myspider/myspider/spiders/cnblog.py | e34b8d5b1f5295fec4cb1d039494cf92a5593922 | [] | no_license | bopopescu/CnblogSpider | 0d55bfedf307dd8fdbc93bcfe2bda0acf26bb422 | c5b6ca3d44003df44a65dae3bd80a566ac322eda | refs/heads/master | 2022-11-19T17:55:03.569494 | 2019-11-29T09:05:00 | 2019-11-29T09:05:00 | 281,827,422 | 0 | 0 | null | 2020-07-23T02:11:42 | 2020-07-23T02:11:41 | null | UTF-8 | Python | false | false | 3,700 | py | # -*- coding: utf-8 -*-
from urllib import parse
import re
import json
from ..until import common
from ..items import CnBlogSpiderItem
from ..items import BlogItemLoad
import scrapy
from scrapy import Request
class CnblogSpider(scrapy.Spider):
name = 'cnblog'
allowed_domains = ['news.cnblogs.com']
start_urls = ['http://news.cnblogs.com/']
'''
爬去列表的数据
'''
def parse(self, response):
# 爬去列表的数据
post_nodes = response.xpath('//div[@id="news_list"]/div[@class="news_block"]')
for post_node in post_nodes:
img_url = post_node.css('.entry_summary '
'a img::attr(src)').extract_first("")
# 如果存在//
if img_url.startswith('//'):
img_url = 'https:' + img_url
post_url = post_node.css('.news_entry a::attr(href)').extract_first("")
# 同yield关键字把详情页的url返回 通过异步的爬去详情页数据
# 注:列表循环完成后或异步调用parse_detail
yield Request(url=parse.urljoin(response.url, post_url),
meta={'front_img_url':img_url}, callback=self.parse_detail)
# 爬去下一页的数据 用yield返回出去结束爬去列表
next_url = response.xpath('//div[@class="pager"]'
'//a[contains(text(), "Next >")]/@href')\
.extract_first("")
# 抛出下一页url 回调交给parse方法在爬去列表
yield Request(url=parse.urljoin(response.url, next_url),callback=self.parse)
'''
爬去详情页的数据
'''
def parse_detail(self, response):
match_re = re.match(".*?(\d+)", response.url)
if match_re:
# 通过itemLoad来解决多字段提前的时候,代码多的问题
item_loads = BlogItemLoad(CnBlogSpiderItem(), response=response)
item_loads.add_css('title', '#news_title a::text')
item_loads.add_css('content', '#news_content')
item_loads.add_css('source', '#come_from a::text')
item_loads.add_xpath('create_at', '//div[@id="news_info"]//*[@class="time"]/text()')
item_loads.add_xpath('tags', '//div[@id="news_more_info"]/div[1]/a/text()')
if response.meta.get('front_img_url', []):
item_loads.add_value('front_img_url', response.meta.get('front_img_url', []))
item_loads.add_value('url', response.url)
content_id = match_re.group(1)
# 点赞数
post_url = parse.urljoin(response.url,
'/NewsAjax/GetAjaxNewsInfo?contentId={}'
.format(content_id))
yield Request(url=post_url,
meta={'blog_item_load':item_loads,'url':response.url},
callback=self.parse_nums)
'''
获取点赞数和阅读数
'''
def parse_nums(self, response):
blog_item_load = response.meta.get('blog_item_load','')
url = response.meta.get('url', '')
re = json.loads(response.text)
comment_count = re['CommentCount']
total_view = re['TotalView']
digg_count = re['DiggCount']
url_object_id = common.get_md5(url)
blog_item_load.add_value('url_object_id', url_object_id)
blog_item_load.add_value('comment_count',comment_count)
blog_item_load.add_value('total_view',total_view)
blog_item_load.add_value('digg_count', digg_count)
cn_blog_item = blog_item_load.load_item()
yield cn_blog_item | [
"[email protected]"
] | |
7c3ae826950b35181067f2b7290ddc9b2585c016 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03200/s585925926.py | c1062b8838cc3d962a99491bab1f1f068f90380d | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | import sys
import heapq
import math
import fractions
import bisect
import itertools
from collections import Counter
from collections import deque
from operator import itemgetter
def input(): return sys.stdin.readline().strip()
def mp(): return map(int,input().split())
def lmp(): return list(map(int,input().split()))
s=input()
n=len(s)
l=[0]*n
c=0
for i in range(n):
if s[-1-i]=="W":
c+=1
l[-1-i]=c
ans=0
for i in range(n):
if s[i]=="B":
ans+=l[i]
print(ans) | [
"[email protected]"
] | |
d21cfa20224009963ac2e9606235c88c68bb82df | a3499ceb68a997e65fc72bac480435054b676d1c | /exercises/P-1.30.py | 33d3934ab5051e7044a0f039de4543936569878c | [] | no_license | fhqvst/practice-python | 8a74ecc469121fd6b7ffe4c774d3ef3e46a1a0b6 | ba9e40fdf5194313e2d5e52d42dfc17edfd94eb2 | refs/heads/master | 2021-03-30T16:51:11.179215 | 2019-02-12T16:42:45 | 2019-02-12T16:42:45 | 87,622,030 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | py | def times_until_max_two(n):
if n <= 2:
raise ValueException()
times = 0
while n >= 2:
n = n / 2
times += 1
return times
assert times_until_max_two(4) == 2
assert times_until_max_two(5) == 2
assert times_until_max_two(17) == 4
| [
"[email protected]"
] | |
171455922a6106760930cd7ff9cdb0462e58a76a | b6df1d42719a55c1d2158ca2d65f6deec6d463b3 | /changeservername.py | b8f0e492081d88e564276e9338cad2eb2d1b4d7b | [] | no_license | badmetacoder/trystack-api-examples-python | 7785a429bfe73009637c4f2c83ebef00c36ca85d | 261bf7fedc5ff26b8e2e9113f5bbdbc9778fc16c | refs/heads/master | 2021-01-20T01:37:50.354143 | 2012-03-19T23:46:53 | 2012-03-19T23:46:53 | 3,769,987 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,025 | py | #!/usr/bin/python
import base64
import httplib
import json
import urllib
from urlparse import urlparse
##
## arguments
##
## make sure that url is set to the actual hostname/IP address,
## port number
url = "nova-api.trystack.org:5443"
## make sure that osuser is set to your actual username, "admin"
## works for test installs on virtual machines, but it's a hack
osuser = "userName"
## use something else than "shhh" for you password
ospassword = "userPassword"
params = '{"auth":{"passwordCredentials":{"username":"%s", "password":"%s"}}}' % (osuser, ospassword)
headers = {"Content-Type": "application/json"}
##
## HTTPS connection no. 1
##
conn = httplib.HTTPSConnection(url)
conn.request("POST", "/v2.0/tokens", params, headers)
##
## response no. 1
##
response = conn.getresponse()
data = response.read()
dd = json.loads(data)
conn.close()
apitokenid = dd['access']['token']['id']
apitokentenantid = dd['access']['token']['tenant']['id']
apitokentenantname = dd['access']['token']['tenant']['name']
print json.dumps(dd, indent=2)
print "Your token ID is: %s" % apitokenid
print "Your token tenant ID is: %s" % apitokentenantid
print "Your token tenant name is: %s" % apitokentenantname
sc = dd['access']['serviceCatalog']
n = 0
m = range(len(sc))
foundNovaURL = False
for i in m:
ss = sc[i]['name']
if ss == 'nova':
apiurl = sc[i]['endpoints'][0]['publicURL']
print "Your Nova URL: %s" % apiurl
foundNovaURL = True
break
if foundNovaURL == False:
print "No Nova URL found!"
exit()
apiurlt = urlparse(apiurl)
##
## HTTPS connection no. 2
##
serverid = "1214"
params2 = json.dumps({"server":{"name":"webserv"}})
headers2 = { "X-Auth-Token":apitokenid, "Accept":"application/json", "Content-type":"application/json"}
conn2 = httplib.HTTPSConnection(apiurlt[1])
conn2.request("PUT", "%s/servers/%s" % (apiurlt[2], serverid), params2, headers2)
##
## response no. 2
##
response2 = conn2.getresponse()
data2 = response2.read()
dd2 = json.loads(data2)
print json.dumps(dd2, indent=2)
| [
"[email protected]"
] | |
50b037d5b78a8550e382a1c81995fbd90b6aa790 | 0e3516a8f2b2099c244058c98b3658a78e65f440 | /cart/cart.py | 7c008039ba3d1f8e4aa3e8c18d8a19f7ed1bfd21 | [] | no_license | hryhoryeu/BeautyShop3 | 01cedcf4bbf2b1e7658ca595ff0ee7652377856b | 34a9fd06f9d7269daad1dc1f0d7da7a94f38edea | refs/heads/master | 2023-08-23T11:00:55.706862 | 2021-10-20T14:34:05 | 2021-10-20T14:34:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,811 | py | from decimal import Decimal
from django.conf import settings
from shop.models import Product
class Cart(object):
def __init__(self, request):
self.session = request.session
cart = self.session.get(settings.CART_SESSION_ID)
if not cart:
cart = self.session[settings.CART_SESSION_ID] = {}
self.cart = cart
def add(self, product, quantity=1, update_quantity=False):
product_id = str(product.id)
if product_id not in self.cart:
self.cart[product_id] = {
'quantity': 0,
'price': str(product.price)
}
if update_quantity:
self.cart[product_id]['quantity'] = quantity
else:
self.cart[product_id]['quantity'] += quantity
self.save()
def save(self):
self.session[settings.CART_SESSION_ID] = self.cart
self.session.modified = True
def remove(self, product):
product_id = str(product.id)
if product_id in self.cart:
del self.cart[product_id]
self.save()
def __iter__(self):
product_ids = self.cart.keys()
products = Product.objects.filter(id__in=product_ids)
for product in products:
self.cart[str(product.id)]['product'] = product
for item in self.cart.values():
item['price'] = Decimal(item['price'])
item['total_price'] = item['price'] * item['quantity']
yield item
def __len__(self):
return sum(item['quantity'] for item in self.cart.values())
def get_total_price(self):
return sum(Decimal(item['price']) * item['quantity'] for item in self.cart.values())
def clear(self):
del self.session[settings.CART_SESSION_ID]
self.session.modified = True
| [
"[email protected]"
] | |
8aaae136f8303ef17e647a1b01d30c5f3c33de70 | ddd1c991e50915bcb4395fdeb550ac64bce7ddb6 | /organisations/migrations/0001_initial.py | 99892b1f3a588401bb2165a29a98ec7da8d3c1e9 | [] | no_license | subhasish1/TrackDebtor | 9fdbf0fc42ed210d5ef3ef9a37b2463131ddcee5 | 15fb29df94daa5e47b91f407ee0ffed55bf28e0f | refs/heads/master | 2022-11-18T08:38:56.936893 | 2020-07-06T06:56:51 | 2020-07-06T06:56:51 | 277,460,343 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 814 | py | # Generated by Django 2.1.5 on 2019-04-25 12:29
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Organisations',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('orgid', models.CharField(max_length=3)),
('orgname', models.CharField(max_length=50)),
('orgemail', models.CharField(max_length=100)),
('orgcc', models.CharField(max_length=500)),
('orgsendername', models.CharField(max_length=50)),
('orgsenderphn', models.CharField(max_length=13)),
],
),
]
| [
"[email protected]"
] | |
1fc718965928f072879159faf870330a5c0a4927 | b144c5142226de4e6254e0044a1ca0fcd4c8bbc6 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/egtps5s8sgwendpoint_7cca43311ebbad01182a1518ea4a2358.py | b1d3f55dea1112d69b49f02307234b3af142f287 | [
"MIT"
] | permissive | iwanb/ixnetwork_restpy | fa8b885ea7a4179048ef2636c37ef7d3f6692e31 | c2cb68fee9f2cc2f86660760e9e07bd06c0013c2 | refs/heads/master | 2021-01-02T17:27:37.096268 | 2020-02-11T09:28:15 | 2020-02-11T09:28:15 | 239,721,780 | 0 | 0 | NOASSERTION | 2020-02-11T09:20:22 | 2020-02-11T09:20:21 | null | UTF-8 | Python | false | false | 72,885 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class EgtpS5S8SgwEndpoint(Base):
"""Range settings generated by SMPluginGen
The EgtpS5S8SgwEndpoint class encapsulates a list of egtpS5S8SgwEndpoint resources that is be managed by the user.
A list of resources can be retrieved from the server using the EgtpS5S8SgwEndpoint.find() method.
The list can be managed by the user by using the EgtpS5S8SgwEndpoint.add() and EgtpS5S8SgwEndpoint.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'egtpS5S8SgwEndpoint'
def __init__(self, parent):
super(EgtpS5S8SgwEndpoint, self).__init__(parent)
@property
def EnbS5S8SecondaryRange(self):
"""An instance of the EnbS5S8SecondaryRange class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.enbs5s8secondaryrange_bc92ddd654ae0624d6dafc03ded7f11b.EnbS5S8SecondaryRange)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.enbs5s8secondaryrange_bc92ddd654ae0624d6dafc03ded7f11b import EnbS5S8SecondaryRange
return EnbS5S8SecondaryRange(self)
@property
def MmeS5S8SecondaryRange(self):
"""An instance of the MmeS5S8SecondaryRange class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.mmes5s8secondaryrange_ffd7722d7b239144b3eee48f12644f81.MmeS5S8SecondaryRange)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.mmes5s8secondaryrange_ffd7722d7b239144b3eee48f12644f81 import MmeS5S8SecondaryRange
return MmeS5S8SecondaryRange(self)
@property
def Range(self):
"""An instance of the Range class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.range_dc0692841071a5423b42625723da18b6.Range)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.range_dc0692841071a5423b42625723da18b6 import Range
return Range(self)
@property
def UeS5S8SecondaryRange(self):
"""An instance of the UeS5S8SecondaryRange class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.ues5s8secondaryrange_e22c1c3ae278f21bb536f2108c32ec7d.UeS5S8SecondaryRange)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.ues5s8secondaryrange_e22c1c3ae278f21bb536f2108c32ec7d import UeS5S8SecondaryRange
return UeS5S8SecondaryRange(self)
@property
def Name(self):
"""Name of range
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
@property
def ObjectId(self):
"""Unique identifier for this object
Returns:
str
"""
return self._get_attribute('objectId')
def update(self, Name=None):
"""Updates a child instance of egtpS5S8SgwEndpoint on the server.
Args:
Name (str): Name of range
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
def add(self, Name=None):
"""Adds a new egtpS5S8SgwEndpoint node on the server and retrieves it in this instance.
Args:
Name (str): Name of range
Returns:
self: This instance with all currently retrieved egtpS5S8SgwEndpoint data using find and the newly added egtpS5S8SgwEndpoint data available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._create(locals())
def remove(self):
"""Deletes all the egtpS5S8SgwEndpoint data in this instance from server.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, Name=None, ObjectId=None):
"""Finds and retrieves egtpS5S8SgwEndpoint data from the server.
All named parameters support regex and can be used to selectively retrieve egtpS5S8SgwEndpoint data from the server.
By default the find method takes no parameters and will retrieve all egtpS5S8SgwEndpoint data from the server.
Args:
Name (str): Name of range
ObjectId (str): Unique identifier for this object
Returns:
self: This instance with matching egtpS5S8SgwEndpoint data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of egtpS5S8SgwEndpoint data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the egtpS5S8SgwEndpoint data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def Abort(self, *args, **kwargs):
"""Executes the abort operation on the server.
Abort protocols on all selected plugins
The IxNetwork modeling infrastructure allows for multiple method Signatures with the same name while python does not.
The following correlates the modeling Signatures to the python *args variable length list:
abort()
abort(Arg2:enum)
Args:
args[0] is Arg2 (str(async|sync)): kArray[kObjref=/vport/protocolStack/atm,/vport/protocolStack/atm/dhcpEndpoint,/vport/protocolStack/atm/dhcpEndpoint/ancp,/vport/protocolStack/atm/dhcpServerEndpoint,/vport/protocolStack/atm/emulatedRouter,/vport/protocolStack/atm/emulatedRouter/amt,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/atm/emulatedRouter/ip,/vport/protocolStack/atm/emulatedRouter/ip/amt,/vport/protocolStack/atm/emulatedRouter/ip/ancp,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpMmeEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ip/twampServer,/vport/protocolStack/atm/emulatedRouter/ipEndpoint,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/amt,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/atm/emulatedRouterEndpoint,/vport/protocolStack/atm/emulatedRouterEndpoint/amt,/vport/protocolStack/atm/ip,/vport/protocolStack/atm/ip/amt,/vport/protocolStack/atm/ip/ancp,/vport/protocolStack/atm/ip/egtpEnbEndpoint,/vport/protocolStack/atm/ip/egtpMmeEndpoint,/vport/protocolStack/atm/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpSgwEndpoint,/vport/protocolStack/atm/ip/egtpUeEndpoint,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/ip/l2tp,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/ip/l2tpEndpoint,/vport/protocolStack/atm/ip/smDnsEndpoint,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ip/twampServer,/vport/protocolStack/atm/ipEndpoint,/vport/protocolStack/atm/ipEndpoint/amt,/vport/protocolStack/atm/ipEndpoint/ancp,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/atm/ipEndpoint/twampServer,/vport/protocolStack/atm/pppox,/vport/protocolStack/atm/pppox/ancp,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/atm/pppoxEndpoint,/vport/protocolStack/atm/pppoxEndpoint/ancp,/vport/protocolStack/ethernet,/vport/protocolStack/ethernet/dcbxEndpoint,/vport/protocolStack/ethernet/dhcpEndpoint,/vport/protocolStack/ethernet/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/dhcpServerEndpoint,/vport/protocolStack/ethernet/emulatedRouter,/vport/protocolStack/ethernet/emulatedRouter/amt,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip,/vport/protocolStack/ethernet/emulatedRouter/ip/amt,/vport/protocolStack/ethernet/emulatedRouter/ip/ancp,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpMmeEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/twampServer,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/amt,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/ethernet/emulatedRouterEndpoint,/vport/protocolStack/ethernet/emulatedRouterEndpoint/amt,/vport/protocolStack/ethernet/esmc,/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeFwdEndpoint,/vport/protocolStack/ethernet/ip,/vport/protocolStack/ethernet/ip/amt,/vport/protocolStack/ethernet/ip/ancp,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint,/vport/protocolStack/ethernet/ip/egtpMmeEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/ip/egtpUeEndpoint,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/l2tp,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/ip/l2tpEndpoint,/vport/protocolStack/ethernet/ip/smDnsEndpoint,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ip/twampServer,/vport/protocolStack/ethernet/ipEndpoint,/vport/protocolStack/ethernet/ipEndpoint/amt,/vport/protocolStack/ethernet/ipEndpoint/ancp,/vport/protocolStack/ethernet/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ipEndpoint/twampServer,/vport/protocolStack/ethernet/pppox,/vport/protocolStack/ethernet/pppox/ancp,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/ethernet/pppoxEndpoint,/vport/protocolStack/ethernet/pppoxEndpoint/ancp,/vport/protocolStack/ethernet/vepaEndpoint,/vport/protocolStack/ethernet/vepaEndpoint/range,/vport/protocolStack/ethernetEndpoint,/vport/protocolStack/ethernetEndpoint/esmc,/vport/protocolStack/fcClientEndpoint,/vport/protocolStack/fcFportFwdEndpoint]
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('abort', payload=payload, response_object=None)
def CustomProtocolStack(self, *args, **kwargs):
"""Executes the customProtocolStack operation on the server.
Create custom protocol stack under /vport/protocolStack
customProtocolStack(Arg2:list, Arg3:enum)
Args:
args[0] is Arg2 (list(str)): List of plugin types to be added in the new custom stack
args[1] is Arg3 (str(kAppend|kMerge|kOverwrite)): Append, merge or overwrite existing protocol stack
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('customProtocolStack', payload=payload, response_object=None)
def DisableProtocolStack(self, *args, **kwargs):
"""Executes the disableProtocolStack operation on the server.
Disable a protocol under protocolStack using the class name
disableProtocolStack(Arg2:string)string
Args:
args[0] is Arg2 (str): Protocol class name to disable
Returns:
str: Status of the exec
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('disableProtocolStack', payload=payload, response_object=None)
def EnableProtocolStack(self, *args, **kwargs):
"""Executes the enableProtocolStack operation on the server.
Enable a protocol under protocolStack using the class name
enableProtocolStack(Arg2:string)string
Args:
args[0] is Arg2 (str): Protocol class name to enable
Returns:
str: Status of the exec
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('enableProtocolStack', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
"""Executes the start operation on the server.
Negotiate sessions for all protocols on all ranges belonging to selected plugins
The IxNetwork modeling infrastructure allows for multiple method Signatures with the same name while python does not.
The following correlates the modeling Signatures to the python *args variable length list:
start()
start(Arg2:enum)
Args:
args[0] is Arg2 (str(async|sync)): kArray[kObjref=/vport/protocolStack/atm,/vport/protocolStack/atm/dhcpEndpoint,/vport/protocolStack/atm/dhcpEndpoint/ancp,/vport/protocolStack/atm/dhcpEndpoint/range,/vport/protocolStack/atm/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/dhcpServerEndpoint,/vport/protocolStack/atm/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip,/vport/protocolStack/atm/emulatedRouter/ip/ancp,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint/ueSecondaryRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpMmeEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpMmeEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpMmeEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpMmeEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ip/twampServer,/vport/protocolStack/atm/emulatedRouter/ipEndpoint,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/atm/emulatedRouterEndpoint,/vport/protocolStack/atm/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/atm/ip,/vport/protocolStack/atm/ip/ancp,/vport/protocolStack/atm/ip/egtpEnbEndpoint,/vport/protocolStack/atm/ip/egtpEnbEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpEnbEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpEnbEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpEnbEndpoint/ueSecondaryRange,/vport/protocolStack/atm/ip/egtpMmeEndpoint,/vport/protocolStack/atm/ip/egtpMmeEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpMmeEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpMmeEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeEndpoint,/vport/protocolStack/atm/ip/egtpUeEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpUeEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpUeEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint,/vport/protocolStack/atm/ip/l2tpEndpoint/range,/vport/protocolStack/atm/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint,/vport/protocolStack/atm/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ip/twampServer,/vport/protocolStack/atm/ipEndpoint,/vport/protocolStack/atm/ipEndpoint/ancp,/vport/protocolStack/atm/ipEndpoint/range/amtRange,/vport/protocolStack/atm/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/atm/ipEndpoint/twampServer,/vport/protocolStack/atm/pppox,/vport/protocolStack/atm/pppox/ancp,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppoxEndpoint,/vport/protocolStack/atm/pppoxEndpoint/ancp,/vport/protocolStack/atm/pppoxEndpoint/range,/vport/protocolStack/atm/pppoxEndpoint/range/ancpRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet,/vport/protocolStack/ethernet/dcbxEndpoint,/vport/protocolStack/ethernet/dcbxEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint,/vport/protocolStack/ethernet/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/dhcpEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/dhcpServerEndpoint,/vport/protocolStack/ethernet/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip,/vport/protocolStack/ethernet/emulatedRouter/ip/ancp,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint/ueSecondaryRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpMmeEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpMmeEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpMmeEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpMmeEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/twampServer,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/ethernet/emulatedRouterEndpoint,/vport/protocolStack/ethernet/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/ethernet/esmc,/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange,/vport/protocolStack/ethernet/fcoeFwdEndpoint,/vport/protocolStack/ethernet/fcoeFwdEndpoint/range,/vport/protocolStack/ethernet/fcoeFwdEndpoint/secondaryRange,/vport/protocolStack/ethernet/ip,/vport/protocolStack/ethernet/ip/ancp,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint/ueSecondaryRange,/vport/protocolStack/ethernet/ip/egtpMmeEndpoint,/vport/protocolStack/ethernet/ip/egtpMmeEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpMmeEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpMmeEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeEndpoint,/vport/protocolStack/ethernet/ip/egtpUeEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpUeEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpUeEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ip/twampServer,/vport/protocolStack/ethernet/ipEndpoint,/vport/protocolStack/ethernet/ipEndpoint/ancp,/vport/protocolStack/ethernet/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ipEndpoint/twampServer,/vport/protocolStack/ethernet/pppox,/vport/protocolStack/ethernet/pppox/ancp,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppoxEndpoint,/vport/protocolStack/ethernet/pppoxEndpoint/ancp,/vport/protocolStack/ethernet/pppoxEndpoint/range,/vport/protocolStack/ethernet/pppoxEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/vepaEndpoint,/vport/protocolStack/ethernet/vepaEndpoint/range,/vport/protocolStack/ethernetEndpoint,/vport/protocolStack/ethernetEndpoint/esmc,/vport/protocolStack/fcClientEndpoint,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range/fcClientFdiscRange,/vport/protocolStack/fcClientEndpoint/range/fcClientFlogiRange,/vport/protocolStack/fcFportFwdEndpoint,/vport/protocolStack/fcFportFwdEndpoint/range,/vport/protocolStack/fcFportFwdEndpoint/secondaryRange]
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
"""Executes the stop operation on the server.
Teardown sessions for all protocols on all ranges belonging to selected plugins
The IxNetwork modeling infrastructure allows for multiple method Signatures with the same name while python does not.
The following correlates the modeling Signatures to the python *args variable length list:
stop()
stop(Arg2:enum)
Args:
args[0] is Arg2 (str(async|sync)): kArray[kObjref=/vport/protocolStack/atm,/vport/protocolStack/atm/dhcpEndpoint,/vport/protocolStack/atm/dhcpEndpoint/ancp,/vport/protocolStack/atm/dhcpEndpoint/range,/vport/protocolStack/atm/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/dhcpServerEndpoint,/vport/protocolStack/atm/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip,/vport/protocolStack/atm/emulatedRouter/ip/ancp,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpEnbEndpoint/ueSecondaryRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpMmeEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpMmeEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpMmeEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpMmeEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ip/twampServer,/vport/protocolStack/atm/emulatedRouter/ipEndpoint,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/atm/emulatedRouterEndpoint,/vport/protocolStack/atm/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/atm/ip,/vport/protocolStack/atm/ip/ancp,/vport/protocolStack/atm/ip/egtpEnbEndpoint,/vport/protocolStack/atm/ip/egtpEnbEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpEnbEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpEnbEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpEnbEndpoint/ueSecondaryRange,/vport/protocolStack/atm/ip/egtpMmeEndpoint,/vport/protocolStack/atm/ip/egtpMmeEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpMmeEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpMmeEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeEndpoint,/vport/protocolStack/atm/ip/egtpUeEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpUeEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpUeEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint,/vport/protocolStack/atm/ip/l2tpEndpoint/range,/vport/protocolStack/atm/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint,/vport/protocolStack/atm/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ip/twampServer,/vport/protocolStack/atm/ipEndpoint,/vport/protocolStack/atm/ipEndpoint/ancp,/vport/protocolStack/atm/ipEndpoint/range/amtRange,/vport/protocolStack/atm/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/atm/ipEndpoint/twampServer,/vport/protocolStack/atm/pppox,/vport/protocolStack/atm/pppox/ancp,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppoxEndpoint,/vport/protocolStack/atm/pppoxEndpoint/ancp,/vport/protocolStack/atm/pppoxEndpoint/range,/vport/protocolStack/atm/pppoxEndpoint/range/ancpRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet,/vport/protocolStack/ethernet/dcbxEndpoint,/vport/protocolStack/ethernet/dcbxEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint,/vport/protocolStack/ethernet/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/dhcpEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/dhcpServerEndpoint,/vport/protocolStack/ethernet/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip,/vport/protocolStack/ethernet/emulatedRouter/ip/ancp,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpEnbEndpoint/ueSecondaryRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpMmeEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpMmeEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpMmeEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpMmeEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/twampServer,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/ethernet/emulatedRouterEndpoint,/vport/protocolStack/ethernet/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/ethernet/esmc,/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange,/vport/protocolStack/ethernet/fcoeFwdEndpoint,/vport/protocolStack/ethernet/fcoeFwdEndpoint/range,/vport/protocolStack/ethernet/fcoeFwdEndpoint/secondaryRange,/vport/protocolStack/ethernet/ip,/vport/protocolStack/ethernet/ip/ancp,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpEnbEndpoint/ueSecondaryRange,/vport/protocolStack/ethernet/ip/egtpMmeEndpoint,/vport/protocolStack/ethernet/ip/egtpMmeEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpMmeEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpMmeEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeEndpoint,/vport/protocolStack/ethernet/ip/egtpUeEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpUeEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpUeEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ip/twampServer,/vport/protocolStack/ethernet/ipEndpoint,/vport/protocolStack/ethernet/ipEndpoint/ancp,/vport/protocolStack/ethernet/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ipEndpoint/twampServer,/vport/protocolStack/ethernet/pppox,/vport/protocolStack/ethernet/pppox/ancp,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppoxEndpoint,/vport/protocolStack/ethernet/pppoxEndpoint/ancp,/vport/protocolStack/ethernet/pppoxEndpoint/range,/vport/protocolStack/ethernet/pppoxEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/vepaEndpoint,/vport/protocolStack/ethernet/vepaEndpoint/range,/vport/protocolStack/ethernetEndpoint,/vport/protocolStack/ethernetEndpoint/esmc,/vport/protocolStack/fcClientEndpoint,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range/fcClientFdiscRange,/vport/protocolStack/fcClientEndpoint/range/fcClientFlogiRange,/vport/protocolStack/fcFportFwdEndpoint,/vport/protocolStack/fcFportFwdEndpoint/range,/vport/protocolStack/fcFportFwdEndpoint/secondaryRange]
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
| [
"[email protected]"
] | |
ee8226563ded79d046d52aabbd165af80529bdd3 | e39f2e519cd1c90c4b75d837c082c04d841f793b | /jerry1.py | 0dbe0c1eb4d23411786fd01ba87854af9b63d3ee | [] | no_license | jerinraj04/python-1 | 6429ef4c29655e5b960450ab5a03a33f5d0b6e31 | 715d4f9ccb6ad76f4a6072596cb399c58486997a | refs/heads/master | 2020-03-29T11:46:28.080615 | 2018-09-22T13:04:03 | 2018-09-22T13:04:03 | 149,870,007 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py | string=raw_input("Enter string:")
char=0
word=1
for i in string:
char=char+1
if(i==' '):
word=word+1
print("number of words in the string:")
print(word)
print("number of characters in the string:")
print(char)
| [
"[email protected]"
] | |
dbd8f57f1245d93c00dbd707ae6208c7a8cf22a0 | 383c7be9f33fccc810e61a569cfbde528c231504 | /pygman/__init__.py | fa828be2abafb4c1a3fe69d29314c922932d2969 | [
"MIT"
] | permissive | pygman/pygman | b913863f12267f3791f892df58330e489ef3d9a7 | ea2048d32b81c061054f46e6074fe3acca0e6921 | refs/heads/master | 2021-01-01T18:35:43.370299 | 2017-07-26T07:23:52 | 2017-07-26T07:23:52 | 98,371,632 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45 | py | from pygman.pygman import *
print("pygman")
| [
"[email protected]"
] | |
98bfad8407fb960dd57b859c14d8758161bfa5cb | 9bc49b84ab0aa5c150475f72c622d7878fcadc1a | /Week2_Exercise.py | bdf0eaee800a69198e12335a04ced1a3bdb15c89 | [
"Apache-2.0"
] | permissive | eamonnofarrell/Python_exercises | 95cf962e555974d4664b2efaede88e18ade5dd1f | 0e37cb794b67c0b3142c19701fcc22c2a379b876 | refs/heads/master | 2021-05-03T04:39:51.183120 | 2018-04-04T15:32:42 | 2018-04-04T15:32:42 | 120,620,248 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 592 | py | #Eamonn O'Farell
# Week2 Fibonacci excercise
#define function fib which returns the nth Fibonacci number
def fib(n):
#initialise i and j to 0 value
i = 0
j = 1
n = n - 1
while n >= 0:
i, j = j, i + j
n = n - 1
return i
name = "OFarrell"
first = name[0]
last = name[-1]
firstno = ord(first)
lastno = ord(last)
x = firstno + lastno
ans = fib(x)
print("My surname is", name)
print("The first letter", first, "is number", firstno)
print("The last letter", last, "is number", lastno)
print("Fibonacci number", x, "is", ans)
| [
"[email protected]"
] | |
fea4e556a5158fd5e70824c08cf412332c23c555 | 363f22d5d9437dc2114429a43c96ae321433e527 | /python/web_dev/udemy/python_and_django/practice/section_23_first_clone_project/mysite/blog/migrations/0002_auto_20200527_2147.py | f3779dff83ffe1564f37201b87bf3380a8e68368 | [] | no_license | bennergarrett/learning_resources | 763f6c808488541eb7a67551df7fa144d9bb97fe | 09232361b3e8286188c0bc91fd7a8a77dbfaaa04 | refs/heads/master | 2022-12-28T10:10:56.642848 | 2020-09-02T16:56:03 | 2020-09-02T16:56:03 | 259,700,309 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 711 | py | # Generated by Django 3.0.3 on 2020-05-27 21:47
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('blog', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='created_date',
field=models.DateTimeField(default=datetime.datetime(2020, 5, 27, 21, 47, 37, 946280, tzinfo=utc)),
),
migrations.AlterField(
model_name='post',
name='create_date',
field=models.DateTimeField(default=datetime.datetime(2020, 5, 27, 21, 47, 37, 945647, tzinfo=utc)),
),
]
| [
"[email protected]"
] | |
4b6d92200a691c871f465f85d680742b1a8d5b6c | 2ed0ab730b62665b3a36841ab006eea961116f87 | /Graph/Dijkstra/ShortestPathToGetAllKeys.py | 6981d8c9e48e408da298a2d155b43757028e1190 | [] | no_license | scarlettlite/hackathon | 0f0a345d867b9e52823f10fe67c6ec210a40945f | 179ba9038bbed4d48cb2f044fd8430cf2be2bab3 | refs/heads/master | 2021-07-04T00:55:17.665292 | 2019-03-04T09:10:59 | 2019-03-04T09:10:59 | 141,269,070 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,495 | py | """https://leetcode.com/problems/shortest-path-to-get-all-keys/"""
from collections import deque
import heapq
class Solution:
def getstartingpoint(self, grid):
sc, sr = -1,-1
ck = 0
for i in range(len(grid)):
for j in range(len(grid[i])):
x = grid[i][j]
if x == '@':
sr, sc = i,j
if x.isalpha() and x.islower():
ck+=1
return sr, sc, ck
def shortestPathAllKeys(self, grid):
"""
:type grid: List[str]
:rtype: int
"""
if not grid: return -1
sr, sc, ck = self.getstartingpoint(grid)
pq = deque([(sr,sc,0,".@abcdef",0)])
m,n = len(grid), len(grid[0])
seen = {(sr,sc,"","")}
direc = [[0,1],[0,-1],[1,0],[-1,0]]
while pq:
r,c,d,keys,found = pq.popleft()
if grid[r][c] in 'abcdef' and grid[r][c].upper() not in keys:
keys += grid[r][c].upper()
found += 1
if found == ck:
return d
for di, dj in direc:
ir, ic = r + di, c + dj
if 0 <= ir < m and 0 <= ic < n and grid[ir][ic] in keys:
if (ir,ic,keys) not in seen:
seen.add((ir,ic,keys))
pq.append((ir,ic,d+1,keys,found))
return -1
print(Solution().shortestPathAllKeys(["@.CaA","..B#.",".c..b"]))
| [
"[email protected]"
] | |
80aacac32448d446c63021de1e47d6798d1414e1 | a9f4434d3b410886ffc10aa5aede3634692152b6 | /0218/ex2-3 copy.py | dbc837d5c71d31b90f8ecfffbca531181a8e84ae | [] | no_license | parka01/python_ex | d3690dcd8753864c335bf7782553719a072bd01d | a5811487516eb9ef86d5ae93e9060cac267b87ce | refs/heads/main | 2023-03-13T08:35:03.837790 | 2021-02-26T03:40:41 | 2021-02-26T03:40:41 | 339,892,972 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | num='1234'
num2=int(num)
i1=num2//1000
num3=num2%1000
i2=num3//100
num4=num3%100
i3=num4//10
i4=num4%10
sum=i1+i2+i3+i4
print(sum)
| [
"[email protected]"
] | |
a2b33f7d08037a1065cdb10f64850c8a9d0eea23 | a37505371408e327fd3b0f9ffccc9f39d913788f | /recursion/factorial.py | 3d2d0ac3b3815ebf9ee40fc86b8efefbec7835b2 | [] | no_license | jcohen66/python-sorting | ccbe54e8913a85a4f2c6edc04f1e19ca3c439cf0 | d5ef4572e8f94edc6e4d871702605a9e1b7ae61d | refs/heads/master | 2023-03-17T08:33:23.982201 | 2021-03-11T22:06:32 | 2021-03-11T22:06:32 | 332,294,311 | 0 | 0 | null | 2021-02-16T23:19:09 | 2021-01-23T19:45:05 | Python | UTF-8 | Python | false | false | 123 | py | '''
>>> fact(5)
120
'''
def fact(n):
# base case
if n <= 1:
return 1
else:
return n * fact(n-1) | [
"[email protected]"
] | |
7278ad1c0c712bdec3506217781e18d26dce8907 | 21eac518308dd2f2865db1fa5fd685260ea7b792 | /src/metapack/constants.py | 83ec54d23b65a8ef6bff4c8f43c8ba1b77bfa327 | [
"MIT"
] | permissive | Metatab/metapack | 22d3dabd1520d48bf4e032502580f611cd7c7c1a | f3d5d8402bd506f1018d17e690f3104835c924d4 | refs/heads/master | 2023-04-06T10:12:26.323488 | 2023-03-24T21:07:50 | 2023-03-24T21:07:50 | 78,872,898 | 6 | 1 | BSD-3-Clause | 2019-05-12T16:47:47 | 2017-01-13T17:59:08 | Python | UTF-8 | Python | false | false | 223 | py | # Copyright (c) 2017 Civic Knowledge. This file is licensed under the terms of the
# Revised BSD License, included in this distribution as LICENSE
PACKAGE_PREFIX = '_packages'
MATERIALIZED_DATA_PREFIX='_materialized_data'
| [
"[email protected]"
] | |
c80e7ad091fb326eecafb01fa88b3103b3bb9e2b | fa52ee094776f69f0214ffbe24281a9736eaaa40 | /solutions/129_Sum_Root_to_Leaf_Numbers_4.py | f9caec53630913df1e0dcc822819af46a3f47902 | [] | no_license | hank08tw/CodeFromLeetcode | 57898916c2b903b1ecbc3d0ed063b36531d74e93 | 41b2936600dd392627a4f6e146559739bb88da45 | refs/heads/master | 2021-05-03T04:26:28.239904 | 2015-06-07T17:31:06 | 2015-06-07T17:31:06 | 120,615,373 | 1 | 0 | null | 2018-02-07T12:58:40 | 2018-02-07T12:58:39 | null | UTF-8 | Python | false | false | 681 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param {TreeNode} root
# @return {integer}
def sumNumbers(self, root):
queue = [root]
total = 0
while queue:
node = queue.pop(0)
if node.left:
node.left.val = node.val * 10 + node.right.val
queue.append(node.left)
if node.right:
node.right.val = node.val * 10 + node.right.val
if not node.left and not node.right:
total += node.val
return total | [
"[email protected]"
] | |
3f5a8e94679d23696673ba1c45fe49c2c256b76b | 25682654993a0e94c31c4d8367f7ba919696341d | /althy/settings.py | 4b2ef057f9cdc0521a582e03bc0411c777d1f445 | [] | no_license | cjcls-isctept/Althy | 8b73de2b03ef050b5d8aa21538ae9d477c9dcb7c | f39ceb1a910fc222cbd7e36b9e6f276b270a88fd | refs/heads/master | 2021-06-05T18:28:30.075900 | 2016-10-24T13:42:06 | 2016-10-24T13:42:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,310 | py | """
Django settings for althy project.
Generated by 'django-admin startproject' using Django 1.9.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'sd%rk2q^fgfd4#j-647%am)9yz#ll7e+41zb+g1ec@=1t_gg&s'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'website.apps.WebsiteConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'althy.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'althy.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3','NAME': os.path.join(BASE_DIR,'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/website/static/'
MEDIA_URL = '/website/static/website/images/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'website/static/website/images/')
| [
"[email protected]"
] | |
1a5cdbcbfa4f1a4eb4512056da10c12cdb4f6fa6 | fc13a625638bd671c9384f6abd4b67190a766dc9 | /python/controlled-cart-pendulum.py | 5deea05d85069083c47b54ea9bb4eed767a8634a | [] | no_license | Martinsoyyo/inverted-pendulum | 04ed6c271d2b376c44d74e4d9f53a99b2d32f17b | 114e5881d5d42f897228091013fced8d26882c59 | refs/heads/master | 2023-08-21T16:32:50.692889 | 2021-09-23T15:39:58 | 2021-09-23T15:39:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,990 | py | """
Simulation of controlling the inverted pendulum on a cart with state controller.
Equations:
th'' = (g * sin(th) - u * cos(th)) / L,
u = Kp_th * th + Kd_th * th' + Kp_x * (x - x0) + Kd_x * x'
System:
th' = Y,
Y' = (g * sin(th) - u * cos(th)) / L,
x' = Z,
Z' = u = Kp_th * th + Kd_th * Y + Kp_x * (x - x0) + Kd_x * Z,
State:
[th, Y, x, Z]
"""
import numpy as np
import matplotlib
matplotlib.use('TKAgg')
import matplotlib.pyplot as pp
import scipy.integrate as integrate
import matplotlib.animation as animation
from matplotlib.patches import Rectangle
from math import pi, trunc
from numpy import sin, cos
def trim(x, step):
d = trunc(x / step)
return step * d
# physical constants
g = 9.8
L = 1.5
m = 0.5
# simulation time
dt = 0.05
Tmax = 5
t = np.arange(0.0, Tmax, dt)
# initial conditions
Y = .0 # pendulum angular velocity
th = pi/10 # pendulum angle
x = .0 # cart position
x0 = 0 # desired cart position
Z = .0 # cart velocity
precision = 0.006
k = 1000.0 # Kalman filter coefficient
Kp_th = 50
Kd_th = 15
Kp_x = 3.1
Kd_x = 4.8
state = np.array([th, Y, x, Z, trim(th, precision), .0])
def step(t):
if t < 5:
return .0
elif t >= 5 and t < 10:
return 1.
elif t >= 10 and t < 15:
return -0.5
else:
return .0
def derivatives(state, t):
ds = np.zeros_like(state)
_th = state[0]
_Y = state[1]
_x = state[2]
_Z = state[3]
# x0 = step(t)
u = Kp_th * _th + Kd_th * _Y + Kp_x * (_x - x0) + Kd_x * _Z
ds[0] = state[1]
ds[1] = (g * sin(_th) - u * cos(_th)) / L
ds[2] = state[3]
ds[3] = u
return ds
print("Integrating...")
# integrate your ODE using scipy.integrate.
solution = integrate.odeint(derivatives, state, t)
print("Done")
ths = solution[:, 0]
xs = solution[:, 2]
pxs = L * sin(ths) + xs
pys = L * cos(ths)
fig = pp.figure()
ax = fig.add_subplot(111, autoscale_on=False, xlim=(-1.5, 1.5), ylim=(-0.5, 2))
ax.set_aspect('equal')
ax.grid()
patch = ax.add_patch(Rectangle((0, 0), 0, 0, linewidth=1, edgecolor='k', facecolor='g'))
line, = ax.plot([], [], 'o-', lw=2)
time_template = 'time = %.1fs'
time_text = ax.text(0.05, 0.9, '', transform=ax.transAxes)
cart_width = 0.3
cart_height = 0.2
def init():
line.set_data([], [])
time_text.set_text('')
patch.set_xy((-cart_width/2, -cart_height/2))
patch.set_width(cart_width)
patch.set_height(cart_height)
return line, time_text, patch
def animate(i):
thisx = [xs[i], pxs[i]]
thisy = [0, pys[i]]
line.set_data(thisx, thisy)
time_text.set_text(time_template % (i*dt))
patch.set_x(xs[i] - cart_width/2)
return line, time_text, patch
ani = animation.FuncAnimation(fig, animate, np.arange(1, len(solution)),
interval=25, blit=True, init_func=init)
pp.show()
# Set up formatting for the movie files
print("Writing video...")
Writer = animation.writers['imagemagick']
writer = Writer(fps=25, metadata=dict(artist='Sergey Royz'), bitrate=1800)
ani.save('controlled-cart.gif', writer=writer)
| [
"[email protected]"
] | |
18dce05a5bae85d8fef9e6175d630565c34fe43c | 84cdfb70998f3aed9061319f40bb36c0fc1b05a4 | /Project3/p3.py | f4572edb6d0f5be4acdba18f235a3c7534d07e40 | [] | no_license | mattsneaks/Computationally-Modelling-Resoning | 203ea7ebe10bf47cf050a9669d20f96773f0363a | 2312f6b9793774699922d809e809202ab7ace0ab | refs/heads/master | 2020-06-17T08:57:31.754250 | 2019-07-08T19:08:33 | 2019-07-08T19:08:33 | 195,870,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,615 | py | """Program by Blanette Baltimore, Matthew Kramer, Matthew Luker, Daniel Rocchi """
import random
import re
import itertools
import string
import time
from itertools import chain
import collections
class node:
def __init__(self):
self.function = str
self.inside = list
self.negated = False
self.roster = str
self.mattRoster = list
class EC2:
def init(self, event, time, identify):
self.event = event
self.time = time
self.identify = identify
class EC3:
def init(self, condition, event, time, identify):
self.condition = condition
self.event = event
self.time = time
self.identify = identify
def hobbs(p):
for prob in p:
solution = prob[-1]
mod_solution = solution.replace(')', '')
mod_solution = mod_solution.split()
solution_time = mod_solution[-1]
actual_time = int(solution_time[1:])
solution_fluent = mod_solution[-2]
fluents = []
correct = False
for line in prob[0]:
# check if tense of solution fluent is present in Happens formulae
if 'Happens' in line:
mod_line = line.split()
mod_time = mod_line[-1]
mod_time = mod_time[:-1]
actual_mod_time = int(mod_time[1:])
if mod_line[1] in solution_fluent:
if actual_mod_time <= actual_time:
correct = True
# check if second to last event and last event terminate at the same time
for line in prob[0]:
if 'Happens' in line:
mod_line = line.split()
mod_fluent = mod_line[1]
fluents.append(mod_fluent)
for line in prob[0]:
if 'Terminates' in line:
split_line = line.split()
fluent1 = split_line[-3]
fluent2 = split_line[-2]
for line in prob[0]:
if 'Happens' in line:
if fluent1 in line:
mod_line = line.split()
mod_time = mod_line[-1]
mod_time = mod_time[:-1]
actual_mod_time = int(mod_time[1:])
if actual_mod_time < actual_time:
correct = True
fluents.clear()
def proveFEC(A,c):
def fix_parentheses(P):
stack = []
left = []
right = []
remove = []
Q = []
flag = True
P = P.replace("(", "( ")
P = P.replace(")", " )")
P = P.split()
for p in range(len(P)):
if P[p] == '(':
left.append(p)
elif P[p] == ')':
right.append(p)
if len(left) > 0 and len(right) > 0:
stack.append([left.pop(), right.pop()])
elif len(left) == 0 and len(right) > 0:
remove.append(right.pop())
if len(right) == 0 and len(left) > 0:
remove += left
for i in range(len(P)):
for r in remove:
if i == r:
flag = False
break
if flag != False:
Q.append(P[i])
else:
flag = True
P = Q
P = ' '.join(P)
P = P.replace("( ", "(")
P = P.replace(" )", ")")
return P
def neg_normal(fSets):
parenthesis = 0
new = []
for i in range(len(fSets)):
for j in range(len(fSets[i])):
if "NOT (NOT" in fSets[i][j]:
fSets[i][j] = (fSets[i][j]).replace("NOT (NOT", "(")
new = fSets[i][j]
new = new.replace("(", "( ")
new = new.replace(")", " )")
new = new.split()
for k in range(len(new)):
if new[k] == "IMPLIES":
parenthesis = k
while not new[parenthesis] == ')':
parenthesis += 1
new[parenthesis] = "))"
new[k] = "OR (NOT"
new = ' '.join(new)
new = new.replace("( ", "(")
new = new.replace(" )", ")")
fSets[i][j] = new
if "NOT (NOT" in fSets[i][j]:
fSets[i][j] = (fSets[i][j]).replace("NOT (NOT", "(")
parenthesis = 0
for l in range(len(fSets)):
for m in range(len(fSets[l])):
if "(NOT (AND" in fSets[l][m]:
fSets[l][m] = fSets[l][m].replace("(NOT (AND", "(OR(NOT")
new = fSets[l][m]
new = new.replace("(", "( ")
new = new.replace(")", " )")
new = new.split()
for n in range(len(new)):
if new[n] == "OR(":
parenthesis = n
while not new[parenthesis] == ')':
parenthesis += 1
new[parenthesis] = "))"
new[parenthesis+1] = "(NOT " + new[parenthesis+1]
new = ' '.join(new)
new = new.replace("( ", "(")
new = new.replace(" )", ")")
new = new.replace("OR(", "OR (")
fSets[l][m] = new
elif "(NOT (OR" in fSets[l][m]:
fSets[l][m] = fSets[l][m].replace("(NOT (OR", "(AND(NOT")
new = fSets[l][m]
new = new.replace("(", "( ")
new = new.replace(")", " )")
new = new.split()
for o in range(len(new)):
if new[o] == "AND(":
parenthesis = o
while not new[parenthesis] == ')':
parenthesis += 1
new[parenthesis] = "))"
new[parenthesis+1] = "(NOT " + new[parenthesis+1]
new = ' '.join(new)
new = new.replace("( ", "(")
new = new.replace(" )", ")")
new = new.replace("AND(", "AND (")
fSets[l][m] = new
if "(NOT (FORALL" in fSets[l][m]:
fSets[l][m] = fSets[l][m].replace("(NOT (FORALL", "(EXISTS")
fSets[l][m] = fix_parentheses(fSets[l][m])
elif "(NOT (EXISTS" in fSets[l][m]:
fSets[l][m] = fSets[l][m].replace("(NOT (EXISTS", "(FORALL")
fSets[l][m] = fix_parentheses(fSets[l][m])
if "(OR (OR" in fSets[l][m]:
fSets[l][m] = fSets[l][m].replace("(OR (OR", "(OR")
fSets[l][m] = fix_parentheses(fSets[l][m])
if "(AND (AND" in fSets[l][m]:
fSets[l][m] = fSets[l][m].replace("(AND (AND", "(AND")
fSets[l][m] = fix_parentheses(fSets[l][m])
# print("NEGATED NORMAL:")
# print(fSets)
return fSets
times = [5, 10, 15]
timeout = time.time() + random.choice(times)
while time.time() < timeout:
continue
if time.time() >= timeout:
return True
return True
call1 = neg_normal(A)
def get_vars(string):
literals = []
#vars = []
string = string.replace('(', "( ")
string = string.replace(')', " )")
G = string.split()
for item in range(len(G)):
if item >= len(G) - 1:
break
if G[item] == 'f':
while G[item] != ')':
item += 1
if ('a' <= G[item] <= 'z') and not (G[item] == 'big_f') and not G[item] in literals:
literals.append(G[item])
return literals
def standardize(fSets):
symbols = []
variables = []
letter = "f"
n = 0
for i in range(len(fSets)):
for j in range(len(fSets[i])):
variables.append(get_vars(fSets[i][j]))
i = 0
j = 0
for k in range(len(variables)):
for l in range(len(variables[k])):
while letter in variables[k] or letter == 'f' or letter in fSets[i][j] or letter in symbols:
letter = random.choice(string.ascii_lowercase)
symbols.append(letter)
fSets[i][j] = (fSets[i][j]).replace(variables[k][l], letter)
if j == len(fSets[i]) - 1 and i < len(fSets):
i += 1
j = 0
elif j < len(fSets[i]):
j += 1
# print("STANDARDIZED:")
# print(fSets)
return fSets
call2 = standardize(call1)
#print('\n')
def tokenize(string):
splitStr = re.split(r'(\W)', string)
for i in range(3):
i = 0
while i < len(splitStr):
if splitStr[i] == " " or splitStr[i] == "" or splitStr[i] == "\t" or splitStr[i] == "\n":
splitStr.remove(splitStr[i])
i += 1
return splitStr
def prenex(fSets):
tokens = ""
hold = []
for i in range(len(fSets)):
for j in range(len(fSets[i])):
tokens = tokenize(fSets[i][j])
for k in range(len(tokens)):
if tokens[k] == "FORALL":
hold.append("(" + tokens[k]+ " " + tokens[k+1] + " ")
elif tokens[k] == "EXISTS":
hold.append("(" + tokens[k]+ " " + tokens[k+1] + " ")
if len(hold) > 0:
for l in reversed(hold):
fSets[i][j] = (fSets[i][j]).replace(l, "")
fSets[i][j] = l + fSets[i][j]
hold.clear()
# print("PRENEX:")
# print(fSets)
return fSets
call3 = prenex(call2)
#print('\n')
def skolemize(fSets):
existential_vars = []
string = ""
var = ""
replacement_var = 'f' + '(' + var + ')'
count = 0
i = j = 0
for problem in fSets:
for part in problem:
if 'EXISTS' in part:
# counts the number of existential quantifiers
count = part.count('EXISTS')
string = part.split()
# removes existential quantifiers and their variables
for x in range(0, count):
string.pop(0)
# save the variable before removing it
existential_vars.append(string[0])
string.pop(0)
fSets[i][j] = " ".join(string)
# removes excess ending parentheses
for y in range(0, count):
fSets[i][j] = fSets[i][j][:-1]
j += 1
j = 0
i += 1
i = 0
# replace existential variables
for problem in fSets:
for part in problem:
for v in existential_vars:
if v in part:
fSets[i][j] = fSets[i][j].replace(v, '(f ' + v + ')')
j += 1
j = 0
i += 1
i = 0
# print("SKOLEMIZE:")
# print(fSets)
return fSets
call4 = skolemize(call3)
#print('\n')
def dropUniversals(fSets):
universal_vars = []
string = ""
count = 0
i = j = 0
for problem in fSets:
for part in problem:
if 'FORALL' in part:
# counts the number of universal quantifiers
count = part.count('FORALL')
remove_count = count * 2
string = part.split()
# removes universal quantifiers and their variables
for x in range(0, remove_count):
string.pop(0)
fSets[i][j] = " ".join(string)
# removes excess ending parentheses
for y in range(0, count):
fSets[i][j] = fSets[i][j][:-1]
j += 1
j = 0
i += 1
i = 0
# print("UNIVERSALS:")
# print(fSets)
return fSets
call5 = dropUniversals(call4)
#print('\n')
#function to take in array, tokenize it, then create nested array
def createNested(str):
#tokenize string
def tokenize(string):
splitStr = re.split(r'(\W)', string)
for i in range(3):
i = 0
while i < len(splitStr):
if splitStr[i] == " " or splitStr[i] == "" or splitStr[i] == "\t" or splitStr[i] == "\n":
splitStr.remove(splitStr[i])
i += 1
return splitStr
#call tokenize to create tokenized string
tokenized = tokenize(str)
#create nested array based on tokenized string
def nestArr(array):
def nestHelper(level=0):
try:
token = next(tokens)
except StopIteration:
return []
if token == ')':
return []
elif token == '(':
return [nestHelper(level+1)] + nestHelper(level)
else:
return [token] + nestHelper(level)
tokens = iter(array)
return nestHelper()
#clean up nested array and convert to list
nestedArr = nestArr(tokenized)
nestedArr = list(chain.from_iterable(nestedArr))
#print(f"Nested : {nestedArr}")
return nestedArr
#Loop to create nested array by calling createNested() on each element in F
nestedF = []
nestLevel1 = []
for i in range(len(A)):
for j in range(len(A[i])):
#create array from one array in F
nestLevel1.append(createNested(A[i][j]))
nestedF.append(nestLevel1)
nestLevel1 = []
#Testing nestedF to compare to original input of F
# for i in F:
# print(f"F : {i}")
# for i in nestedF:
# print(f"nestedF : {i}")
#Use nestedF from now on
#START
variables = ['x', 'y', 'z']
def isVariable(x):
if x in variables:
#print(f"isVariable : {x}")
return True
return False
def unifyVar(var, val, repl):
if var in repl:
return unify(repl[var], val, repl)
elif isinstance(val, str) and val in repl :
return unify(var, repl[val], repl)
#elif (var occurs anywhere in x):
elif var in val:
# print("occurs in")
# print(f"var: {var}")
# print(f"val: {val}")
# print(f"repl: {repl}")
return False
else:
repl[var] = val
return repl
def unify(x, y, repl):
#if either is neagated
if isinstance(x, list) and len(x) > 0 and x[0] == 'NOT':
x = x[1]
if isinstance(y, list) and len(y) > 0 and y[0] == 'NOT':
y = y[1]
if repl is False:
return False
#if predicates contain different amount of values
if isinstance(x, list) and isinstance(y, list) and len(x) != len(y):
# print("here2")
# print(f"x: {x}")
# print(f"y: {y}")
# print(f"repl: {repl}")
return False
#if both predicates match
elif x == y and isinstance(x, str) and isinstance(y, str):
return repl
#variable?
elif isVariable(x) and isinstance(x, str):
return unifyVar(x, y, repl)
elif isVariable(y) and isinstance(y, str):
return unifyVar(y, x, repl)
#predicate/"compound"?
elif isinstance(x, dict) and isinstance(y, dict):
if len(x) == 0 and len(y) == 0:
return repl
#Check if functors of predicates match
if x[0] != y[0] and isinstance(x[0], str) and isinstance(y[0],str) and not (isVariable(x[0]) or isVariable(y[0])):
# print("here3")
# print(f"x: {x}")
# print(f"y: {y}")
# print(f"repl: {repl}")
return False
return unify(x[1:],y[1:], unify(x[0], y[0], repl))
#list?
elif isinstance(x, list) and isinstance(y, list):
if len(x) == 0 and len(y) == 0:
return repl
return unify(x[1:],y[1:], unify(x[0], y[0], repl))
else:
return False
#END
def resolve(A):
def swap(a, b):
newList = []
for i in range(len(a)):
for j in range(len(b)):
if (a[i].function == b[j].function and a[i].negated != b[j].negated and unify(a[i].mattRoster, b[j].mattRoster, {}) != False):
#unify(a[i].roster, b[j].mattRoster, {}) != False
if len(a) == 1 and len(b) == 1:
sub = unify(a[i].mattRoster, b[j].mattRoster, {})
# # print(sub)
#print("\nvictory from merging:")
# # print(a[i].roster)
# # print(b[j].roster)
# # print("return {}")
return 3
exit()
else:
#ewIn = unify(a[i].mattRoster, b[j].mattRoster, {})
#print(newIn)
for k in range(len(a)):
if a[k] != a[i]:
found = False
for l in range(len(newList)):
if a[k].roster == newList[l].roster:
found = True
if found == False:
newList.append(a[k])
for k in range(len(b)):
if b[k] != b[j]:
found = False
for l in range(len(newList)):
if b[k].roster == newList[l].roster:
found = True
if found == False:
newList.append(b[k])
if newList in A:
break
found = False
for c in range(len(A)):
if len(A[c]) == len(newList):
list1 = []
list2 = []
for u in range(len(newList)):
list1.append(newList[u].roster)
for u in range(len(A[c])):
list2.append(A[c][u].roster)
if collections.Counter(list1) == collections.Counter(list2):
#thwarts repeats
found = True
if found == True:
break
sub = unify(a[i].mattRoster, b[j].mattRoster, {})
##print(sub)
##print("\nCombining Clauses")
# # for l in range(len(a)):
# # print(a[l].roster)
# # ##print("and")
# # for l in range(len(b)):
# # print(b[l].roster)
#print("made it here")
if not sub:
continue
else:
for p in range(len(newList)):
##print()
##print(newList[p].inside)
#print("made it here")
for q in range(len(newList[p].inside)):
#print("made it here")
if (newList[p].inside[q] in sub.keys()):
#print("made it here")
newList[p].inside[q] = sub.get(newList[p].inside[q])
##print(newList[p].inside)
newList[p].mattRoster = newList[p].inside
#print("made it here")
for r in range(1, len(newList[p].inside)):
newRost = newList[p].inside[r]
newList[p].roster = {f"{newList[p].function}({newRost})"}
##print(f"\nNEW SET {len(F) }:")
# # for m in range(len(newList)):
# # print(newList[m].roster)
A.append(newList)
return 0
##print("\nORIGINAL SETS:")
# for i in range(len(F)):
# ##print(f"\nset {i}")
# #print(type(F[i]))
# for j in range(len(F[i])):
# print(F[i][j])
# print("Heree")
#F[i][j].mattRoster.insert(0,F[i][j].function)
##print(f"matt roster: {F[i][j].mattRoster}\n")
i = 0
while(i < len(A)):
for j in range( len(A)):
if swap(A[i],A[j]) == 3:
return 1
if swap(A[i],A[j]) == 0:
i = 0
i+=1
##print("\n\nNO RESOLUTION \nWHAT WAS LEFT:")
##print("==" *30)
##for i in range(len(F)):
##print(f"\nset {i}")
##for j in range(len(F[i])):
##print(F[i][j].roster)
##print(F[i][j].mattRoster)
#print("No resolution found")
return 0
def readForm2(clause):
new = node()
#print(clause)
#NEGATION
if clause[0] == "NOT":
new.negated = True
del clause[0]
clause = clause[0]
#new.funciton = clause[0]
#print(clause)
new.mattRoster = clause
#print(new.mattRoster)
#FUNCTION
new.function = clause[0]
del clause[0]
#INSIDE
new.inside = clause
#print(f"\n{clause}\n")
#ROSTER
if new.negated == True:
new.roster = (f"NOT {new.function}({new.inside})")
else: new.roster = (f"{new.function}({new.inside})")
# print(new.negated)
# print(new.funciton)
# print(new.inside)
# print("\n")
return new
#END
def standardCNF(x):
#remove and
if x[0] == 'AND':
newX = []
for i in x[1:]:
newX.append(i)
#print(newX)
newCNF = []
for i in range(len(newX)):
if newX[i][0] == 'OR':
for j in newX[i][1:]:
newCNF.append(j)
return newCNF
#remove or
newCNF = []
if x[0] == 'OR':
for i in x[1:]:
newCNF.append(i)
return newCNF
return x
#print("NESTED CNF:")
for i in range(len(nestedF)):
nestedF[i] = standardCNF(nestedF[i])
#print(nestedF[i])
if(type(A[i][0]) is not list):
listy = []
A[i] = readForm2(nestedF[i])
#print(F[i].mattRoster)
listy.append(A[i])
A[i] = listy
else:
for j in range(len(nestedF[i])):
A[i][j] = readForm2(A[i][j])
resolve(nestedF)
return True | [
"[email protected]"
] | |
9004146680ed2a457234a58e5caeda6a640d5fc6 | 9106aa2648b8afe571a5d3bba2c24c055ab31002 | /{{ cookiecutter.project_slug }}/api/{{ cookiecutter.project_slug }}/apps/accounts/api/v1/views/password.py | 2c32d968d87463d10aa2d12902b4b9d7b412b4b9 | [] | no_license | ArgDevs/backend-skeleton | 3dd318fb14b261514b83203eefa01ec5a95c2c0e | ebaac3e4be3b7c38131e4967c462ef2f84940a4d | refs/heads/master | 2022-11-13T12:16:30.743888 | 2020-05-20T22:00:10 | 2020-05-20T22:00:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,652 | py | from rest_framework import status
from rest_framework.generics import CreateAPIView
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from drf_yasg import openapi
from drf_yasg.utils import swagger_auto_schema
from {{ cookiecutter.project_slug }}.apps.accounts.api.v1.serializers.password import (
ChangePasswordSerializer,
ConfirmResetPasswordSerializer,
ResetPasswordSerializer,
)
class ChangePasswordAPIView(CreateAPIView):
serializer_class = ChangePasswordSerializer
permission_classes = (IsAuthenticated,)
@swagger_auto_schema(responses={status.HTTP_204_NO_CONTENT: openapi.Response("")})
def post(self, request, *args, **kwargs): # pragma: no cover
super(ChangePasswordAPIView, self).post(request, *args, **kwargs)
return Response(status=status.HTTP_204_NO_CONTENT)
class ResetPasswordAPIView(CreateAPIView):
serializer_class = ResetPasswordSerializer
@swagger_auto_schema(responses={status.HTTP_204_NO_CONTENT: openapi.Response("")})
def post(self, request, *args, **kwargs): # pragma: no cover
super(ResetPasswordAPIView, self).post(request, *args, **kwargs)
return Response(status=status.HTTP_204_NO_CONTENT)
class ConfirmResetPasswordAPIView(CreateAPIView):
serializer_class = ConfirmResetPasswordSerializer
@swagger_auto_schema(responses={status.HTTP_204_NO_CONTENT: openapi.Response("")})
def post(self, request, *args, **kwargs): # pragma: no cover
super(ConfirmResetPasswordAPIView, self).post(request, *args, **kwargs)
return Response(status=status.HTTP_204_NO_CONTENT)
| [
"[email protected]"
] | |
5478f50a6646c2401c5d1ef56f1c287c28ffdbe1 | 7513e1b2a15fc43df03ac67c117b77b8ff5c982a | /customers_segmentation.py | c2dcf120ad7b898a39726cf40b14dbd32fdab266 | [] | no_license | kevin880987/DSAI-HW4-2021 | dd0f936cb53c7fc61c67baf8cf28be153b1b7b9b | ae4343d828eb6320d34750bf5dabd5292fa6e448 | refs/heads/main | 2023-05-30T17:20:52.546053 | 2021-06-14T17:12:39 | 2021-06-14T17:12:39 | 368,394,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,039 | py | #!/usr/bin/env python
# coding: utf-8
import numpy as np
import pandas as pd
pd.set_option('max_columns', 150)
import gc
import os
# matplotlib and seaborn for plotting
import matplotlib
matplotlib.rcParams['figure.dpi'] = 144 #resolution
matplotlib.rcParams['figure.figsize'] = (8,6) #figure size
import matplotlib.pyplot as plt
plt.rcParams["font.family"] = "serif"
import seaborn as sns
# sns.set_style('white')
color = sns.color_palette()
from sklearn.cluster import KMeans
from sklearn.metrics import silhouette_score
from sklearn.decomposition import PCA
root = os.getcwd() + os.sep + 'data' + os.sep
image_fp = os.getcwd() + os.sep + 'image' + os.sep
#### Data
aisles = pd.read_csv(root + 'aisles.csv')
departments = pd.read_csv(root + 'departments.csv')
orders = pd.read_csv(root + 'orders.csv')
order_products_prior = pd.read_csv(root + 'order_products__prior.csv')
order_products_train = pd.read_csv(root + 'order_products__train.csv')
products = pd.read_csv(root + 'products.csv')
# For segmentation, consider users from prior set only
order_products = order_products_prior.merge(products, on ='product_id', how='left')
order_products = order_products.merge(aisles, on ='aisle_id', how='left')
order_products = order_products.merge(departments, on ='department_id', how='left')
order_products = order_products.merge(orders, on='order_id', how='left')
#### Segmentation
# Since there are thousands of products in the dataset, we rely on aisles,
# which represent categories of products.
# Use Principal Component Analysis to find new dimensions along which clustering will be easier.
cross_df = pd.crosstab(order_products.user_id, order_products.aisle)
# Normalize each row
df = cross_df.div(cross_df.sum(axis=1), axis=0)
##### PCA and K-Means Clustering
# Reducing this dataframe to only 10 dimensions as KMeans does not work properly in higher dimension.
pca = PCA(n_components=10)
df_pca = pca.fit_transform(df)
df_pca = pd.DataFrame(df_pca)
Sum_of_squared_distances = []
K = range(1,10)
for k in K:
km = KMeans(n_clusters=k)
km = km.fit(df_pca)
Sum_of_squared_distances.append(km.inertia_)
plt.subplots(figsize = (8, 5))
plt.plot(K, Sum_of_squared_distances, '.-')
plt.xlabel('k')
plt.ylabel('Sum_of_squared_distances')
plt.title('Elbow Method for Optimal k')
plt.gcf().tight_layout()
plt.gcf().savefig(image_fp+'Optimal k.png', dpi=144, transparent=True)
plt.show()
# From above plot we can choose optimal K as 5
k = 5 # 6 #
clusterer = KMeans(n_clusters=k,random_state=42).fit(df_pca)
centers = clusterer.cluster_centers_
c_preds = clusterer.predict(df_pca)
# # Visualizing clustering among first two principal components
# temp_df = df_pca.iloc[:, 0:2]
# temp_df.columns = ["pc1", "pc2"]
# temp_df['cluster'] = c_preds
# fig, ax = plt.subplots(figsize = (8, 5))
# ax = sns.scatterplot(data = temp_df, x = "pc1", y = "pc2", hue = "cluster")
# ax.set_xlabel("Principal Component 1")
# ax.set_ylabel("Principal Component 2")
# ax.set_title("Cluster Visualization")
# plt.show()
#### Top products per cluster
cross_df['cluster'] = c_preds
# Customer Segmentation Results:
fig, ax = plt.subplots(figsize = (4,3))
ax = sns.countplot(cross_df['cluster'], color = color[0])
ax.set_xlabel('Cluster', size = 10)
ax.set_ylabel('Number of Users', size = 10)
ax.tick_params(axis = 'both', labelsize = 8)
ax.set_title('Total Users of each Cluster')
fig.tight_layout()
fig.savefig(image_fp+'Total Users of each Cluster.png', transparent=True)
plt.show()
# cluster1 = cross_df[cross_df.cluster == 0]
# cluster2 = cross_df[cross_df.cluster == 1]
# cluster3 = cross_df[cross_df.cluster == 2]
# cluster4 = cross_df[cross_df.cluster == 3]
# cluster5 = cross_df[cross_df.cluster == 4]
# cluster1.drop('cluster',axis=1).mean().sort_values(ascending=False)[0:10]
# cluster2.drop('cluster',axis=1).mean().sort_values(ascending=False)[0:10]
# cluster3.drop('cluster',axis=1).mean().sort_values(ascending=False)[0:10]
# cluster4.drop('cluster',axis=1).mean().sort_values(ascending=False)[0:10]
# cluster5.drop('cluster',axis=1).mean().sort_values(ascending=False)[0:10]
# - Cluster 1 results into 5428 consumers having a very strong preference for water seltzer sparkling water aisle.
# - Cluster 2 results into 55784 consumers who mostly order fresh vegetables followed by fruits.
# - Cluster 3 results into 7948 consumers who buy packaged produce and fresh fruits mostly.
# - Cluster 4 results into 37949 consumers who have a very strong preference for fruits followed by fresh vegetables.
# - Cluster 5 results into 99100 consumers who orders products from many aisles. Their mean orders are low compared to other clusters which tells us that either they are not frequent users of Instacart or they are new users and do not have many orders yet.
# Encode the labels and save
import category_encoders as ce
encoder= ce.BinaryEncoder(cols=['cluster'], return_df=True)
clus_feats = encoder.fit_transform(cross_df['cluster'])
clus_feats.isnull().any().any()
clus_feats.to_pickle(root + 'cluster.pkl')
| [
"[email protected]"
] | |
5d20c2948fd8fe1976ae14a3119aac206b4aba7c | 70cdf0741a22c678401a306229003bf036ffe5a6 | /ocbind/bgp/neighbors/neighbor/logging_options/state/__init__.py | 4d4cdf7200e60a249374f5fbc86441e2a186c980 | [] | no_license | zsblevins/nanog81-hackathon | 5001e034339d6b0c6452ae2474f06916bcd715cf | 1b64fd207dd69837f947094fbd6d6c1cea3a1070 | refs/heads/main | 2023-03-03T09:39:28.460000 | 2021-02-15T13:41:38 | 2021-02-15T13:41:38 | 336,698,856 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,694 | py | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp - based on the path /bgp/neighbors/neighbor/logging-options/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to logging for the BGP neighbor
or group
"""
__slots__ = ('_path_helper', '_extmethods', '__log_neighbor_state_changes',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'neighbors', 'neighbor', 'logging-options', 'state']
def _get_log_neighbor_state_changes(self):
"""
Getter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
return self.__log_neighbor_state_changes
def _set_log_neighbor_state_changes(self, v, load=False):
"""
Setter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_log_neighbor_state_changes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_log_neighbor_state_changes() directly.
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """log_neighbor_state_changes must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)""",
})
self.__log_neighbor_state_changes = t
if hasattr(self, '_set'):
self._set()
def _unset_log_neighbor_state_changes(self):
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
log_neighbor_state_changes = __builtin__.property(_get_log_neighbor_state_changes)
_pyangbind_elements = OrderedDict([('log_neighbor_state_changes', log_neighbor_state_changes), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-common - based on the path /bgp/neighbors/neighbor/logging-options/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to logging for the BGP neighbor
or group
"""
__slots__ = ('_path_helper', '_extmethods', '__log_neighbor_state_changes',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'neighbors', 'neighbor', 'logging-options', 'state']
def _get_log_neighbor_state_changes(self):
"""
Getter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
return self.__log_neighbor_state_changes
def _set_log_neighbor_state_changes(self, v, load=False):
"""
Setter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_log_neighbor_state_changes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_log_neighbor_state_changes() directly.
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """log_neighbor_state_changes must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)""",
})
self.__log_neighbor_state_changes = t
if hasattr(self, '_set'):
self._set()
def _unset_log_neighbor_state_changes(self):
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
log_neighbor_state_changes = __builtin__.property(_get_log_neighbor_state_changes)
_pyangbind_elements = OrderedDict([('log_neighbor_state_changes', log_neighbor_state_changes), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-common-multiprotocol - based on the path /bgp/neighbors/neighbor/logging-options/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to logging for the BGP neighbor
or group
"""
__slots__ = ('_path_helper', '_extmethods', '__log_neighbor_state_changes',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'neighbors', 'neighbor', 'logging-options', 'state']
def _get_log_neighbor_state_changes(self):
"""
Getter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
return self.__log_neighbor_state_changes
def _set_log_neighbor_state_changes(self, v, load=False):
"""
Setter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_log_neighbor_state_changes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_log_neighbor_state_changes() directly.
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """log_neighbor_state_changes must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)""",
})
self.__log_neighbor_state_changes = t
if hasattr(self, '_set'):
self._set()
def _unset_log_neighbor_state_changes(self):
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
log_neighbor_state_changes = __builtin__.property(_get_log_neighbor_state_changes)
_pyangbind_elements = OrderedDict([('log_neighbor_state_changes', log_neighbor_state_changes), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-common-structure - based on the path /bgp/neighbors/neighbor/logging-options/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to logging for the BGP neighbor
or group
"""
__slots__ = ('_path_helper', '_extmethods', '__log_neighbor_state_changes',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'neighbors', 'neighbor', 'logging-options', 'state']
def _get_log_neighbor_state_changes(self):
"""
Getter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
return self.__log_neighbor_state_changes
def _set_log_neighbor_state_changes(self, v, load=False):
"""
Setter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_log_neighbor_state_changes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_log_neighbor_state_changes() directly.
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """log_neighbor_state_changes must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)""",
})
self.__log_neighbor_state_changes = t
if hasattr(self, '_set'):
self._set()
def _unset_log_neighbor_state_changes(self):
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
log_neighbor_state_changes = __builtin__.property(_get_log_neighbor_state_changes)
_pyangbind_elements = OrderedDict([('log_neighbor_state_changes', log_neighbor_state_changes), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-peer-group - based on the path /bgp/neighbors/neighbor/logging-options/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to logging for the BGP neighbor
or group
"""
__slots__ = ('_path_helper', '_extmethods', '__log_neighbor_state_changes',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'neighbors', 'neighbor', 'logging-options', 'state']
def _get_log_neighbor_state_changes(self):
"""
Getter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
return self.__log_neighbor_state_changes
def _set_log_neighbor_state_changes(self, v, load=False):
"""
Setter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_log_neighbor_state_changes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_log_neighbor_state_changes() directly.
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """log_neighbor_state_changes must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)""",
})
self.__log_neighbor_state_changes = t
if hasattr(self, '_set'):
self._set()
def _unset_log_neighbor_state_changes(self):
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
log_neighbor_state_changes = __builtin__.property(_get_log_neighbor_state_changes)
_pyangbind_elements = OrderedDict([('log_neighbor_state_changes', log_neighbor_state_changes), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-neighbor - based on the path /bgp/neighbors/neighbor/logging-options/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to logging for the BGP neighbor
or group
"""
__slots__ = ('_path_helper', '_extmethods', '__log_neighbor_state_changes',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'neighbors', 'neighbor', 'logging-options', 'state']
def _get_log_neighbor_state_changes(self):
"""
Getter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
return self.__log_neighbor_state_changes
def _set_log_neighbor_state_changes(self, v, load=False):
"""
Setter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_log_neighbor_state_changes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_log_neighbor_state_changes() directly.
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """log_neighbor_state_changes must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)""",
})
self.__log_neighbor_state_changes = t
if hasattr(self, '_set'):
self._set()
def _unset_log_neighbor_state_changes(self):
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
log_neighbor_state_changes = __builtin__.property(_get_log_neighbor_state_changes)
_pyangbind_elements = OrderedDict([('log_neighbor_state_changes', log_neighbor_state_changes), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-global - based on the path /bgp/neighbors/neighbor/logging-options/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to logging for the BGP neighbor
or group
"""
__slots__ = ('_path_helper', '_extmethods', '__log_neighbor_state_changes',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'neighbors', 'neighbor', 'logging-options', 'state']
def _get_log_neighbor_state_changes(self):
"""
Getter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
return self.__log_neighbor_state_changes
def _set_log_neighbor_state_changes(self, v, load=False):
"""
Setter method for log_neighbor_state_changes, mapped from YANG variable /bgp/neighbors/neighbor/logging_options/state/log_neighbor_state_changes (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_log_neighbor_state_changes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_log_neighbor_state_changes() directly.
YANG Description: Configure logging of peer state changes. Default is
to enable logging of peer state changes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """log_neighbor_state_changes must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)""",
})
self.__log_neighbor_state_changes = t
if hasattr(self, '_set'):
self._set()
def _unset_log_neighbor_state_changes(self):
self.__log_neighbor_state_changes = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="log-neighbor-state-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='boolean', is_config=False)
log_neighbor_state_changes = __builtin__.property(_get_log_neighbor_state_changes)
_pyangbind_elements = OrderedDict([('log_neighbor_state_changes', log_neighbor_state_changes), ])
| [
"[email protected]"
] | |
1aa3650b2c0f19b8d9061846b1cf347e70c14894 | aad06300a291f5dce352ed898b8b48cb59ad8f4c | /datalogger/migrations/0003_auto_20210513_0927.py | 51029f88f3ecfdf9928adbfb230526f9ec2f1adf | [] | no_license | heisenbug101/electric-vehicle-telematics | 75887985c05297575bd1135a164aa075e34ac28d | 774af2de62a6b91d2d71cc61534de9103989eaf5 | refs/heads/master | 2023-06-03T18:48:00.096434 | 2021-06-14T02:55:17 | 2021-06-14T02:55:17 | 374,525,570 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | # Generated by Django 3.2.2 on 2021-05-13 09:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('datalogger', '0002_auto_20210513_0747'),
]
operations = [
migrations.RemoveField(
model_name='vehicle',
name='vehicleDetails',
),
migrations.AlterField(
model_name='vehicleregister',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
]
| [
"[email protected]"
] | |
25ba0d36b3a14e41714c4a4330878b4251bff0f1 | 338f35a5b1e556cba067bc6ffa3778332267fcdd | /niji/api.py | 8f6c02a3f9b5597ec74178e58d8d2e5748bd9df5 | [
"LicenseRef-scancode-sata"
] | permissive | fdl66/OnlineJudge | 50f3273b9d967ca870175a2edf9cb99cedaba6e4 | 4f0ae896694c93788bbb42eddb509fd6fc7aa41a | refs/heads/master | 2020-12-30T13:46:11.188596 | 2017-12-12T10:25:09 | 2017-12-12T10:25:09 | 91,249,578 | 0 | 1 | null | 2017-05-14T14:39:26 | 2017-05-14T14:39:26 | null | UTF-8 | Python | false | false | 888 | py | from rest_framework import viewsets
from rest_framework.authentication import SessionAuthentication
from niji.models import Topic, Post, Node
from niji.serializers import TopicSerializer, PostSerializer#, NodeSerializer
class SessionAuthenticationExemptCSRF(SessionAuthentication):
def enforce_csrf(self, request):
return
class TopicApiView(viewsets.ModelViewSet):
authentication_classes = (SessionAuthenticationExemptCSRF,)
queryset = Topic.objects.all()
serializer_class = TopicSerializer
class PostApiView(viewsets.ModelViewSet):
authentication_classes = (SessionAuthenticationExemptCSRF,)
queryset = Post.objects.all()
serializer_class = PostSerializer
'''
class NodeApiView(viewsets.ModelViewSet):
authentication_classes = (SessionAuthenticationExemptCSRF,)
queryset = Node.objects.all()
serializer_class = NodeSerializer
'''
| [
"[email protected]"
] | |
8556ef3d90daf3f78f63913c7c669ab2f0e1dd88 | d91a0186cec0452a8eb54fd6fabe0ef9e75cd738 | /chapter_9/exercise_9.5.py | ed039bea3104f20878cadf9031c3f0f3f438f6d4 | [] | no_license | MaximZolotukhin/erik_metiz | 31a6f5146b8bb58b8f04a6b9635b36a67830e52a | 8afde60aa2bddd6858a5f7a7189169a82bde4322 | refs/heads/main | 2023-05-03T07:39:06.731413 | 2021-05-30T19:04:31 | 2021-05-30T19:04:31 | 361,544,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,478 | py | """
9.5. Попытки входа:
добавьте атрибут login_attempts в класс User из упражнения 9.3 (с. 175).
Напишите метод increment_login_attempts(), увеличивающий значение login_attempts на 1.
Напишите другой метод с именем reset_login_attempts(), обнуляющий значение login_attempts.
Создайте экземпляр класса User и вызовите increment_login_attempts() несколько раз.
Выведите значение login_attempts, чтобы убедиться в том, что значение было изменено правильно,
а затем вызовите reset_login_attempts(). Снова выведите login_attempts и убедитесь в том,
что значение обнулилось.
"""
class User():
"""Описание пользователя"""
def __init__(self, first_name, last_name, age, work="Секретный агент"):
self.first_name = first_name
self.last_name = last_name
self.age = age
self.work = work
self.login_attempts = 0
def describe_user(self):
"""Вывод полной информации о пользователе"""
print(f"Краткая информация о новом агенте 007:")
print(f"Имя {self.first_name}")
print(f"Фамилия {self.last_name}")
print(f"Возраст {self.age}")
print(f"Работа {self.work}")
def greet_user(self):
"""Вывод сообщения"""
print(f"Привет {self.first_name} {self.last_name}, вы завербованы!")
def increment_login_attempts(self):
"""При каждом вызове увеличивает количество зарегистрировавшихся человек на 1"""
self.login_attempts += 1
def reset_login_attempts(self):
"""Обнуляет количество зарегестрированных пользователей"""
self.login_attempts = 0
user_0 = User("Шен", "Коннери", 37)
user_0.increment_login_attempts()
user_0.increment_login_attempts()
user_0.increment_login_attempts()
print(user_0.login_attempts)
user_0.increment_login_attempts()
user_0.increment_login_attempts()
print(user_0.login_attempts)
user_0.reset_login_attempts()
print(user_0.login_attempts)
| [
"[email protected]"
] | |
1f1bf8e01776f82022bd765f27498da2461587be | c2a3e985a78638a5a5cee4ba3b52c6a502d54c04 | /venv/bin/easy_install | 61a523ec50414e61e24b884922bb763f7c716cda | [] | no_license | agrimreaper49/TempConverter | 3dafca3826bfa19701ac2ba65a51dab9aba72be0 | 8b2959ce7ed62d8d9e85935d6e7088abc1449d57 | refs/heads/master | 2022-06-28T01:23:15.722239 | 2020-05-12T18:48:51 | 2020-05-12T18:48:51 | 263,421,999 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | #!/Users/asharma/PycharmProjects/GUIFramework/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
a3cb4aa4a28f93cc844bd34674b023c572fa4653 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/built-in/cv/semantic_segmentation/BiseNetV1_for_PyTorch/configs/psanet/psanet_r101-d8_769x769_40k_cityscapes.py | bca631b125b687496c35ebd72ed8bbdd7a83d112 | [
"Apache-2.0",
"GPL-1.0-or-later",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 835 | py | # Copyright (c) Facebook, Inc. and its affiliates.
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# --------------------------------------------------------
_base_ = './psanet_r50-d8_769x769_40k_cityscapes.py'
model = dict(pretrained='open-mmlab://resnet101_v1c', backbone=dict(depth=101))
| [
"[email protected]"
] | |
407ed935b7befe3e67c9b9ebc91f5d9d3a254c28 | fa07d3cd4ca0614b995f2a0ce95f6134989cdc25 | /33-moduller-02-os.py | cacb6c0b8edb810202a5dd1ac60c47064150e332 | [] | no_license | incememed0/python-notes | 5728380acab37108f4874beb54dcb9a6ccf9679e | 61c558792b2ca03179fe5520b076089bda675f8e | refs/heads/master | 2023-04-19T11:11:14.806268 | 2021-05-13T09:09:39 | 2021-05-13T09:09:39 | 316,295,290 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | # author: Ercan Atar
# linkedin.com/in/ercanatar/
################################################
| [
"[email protected]"
] | |
efde190bbeeec0f9893074e1d14d220175a000d9 | b6e6d32e4e054cf37a33bf8f2f9ad07263c3091f | /__main__.py | 1c8ad9e8e7f4045286f43343eee012d9871513c6 | [] | no_license | nyufac/dg | e9d3b90efdc26c4ad55e3350246fa5ff453cc461 | 3ea804e712c71eafc061f4fa3c7eca634b1d76bb | refs/heads/master | 2016-09-06T01:43:48.252201 | 2013-02-20T19:19:30 | 2013-02-20T19:19:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26 | py | from .run import __main__
| [
"[email protected]"
] | |
10c5eebe730943de327aa580f93157ac88c2d4b5 | 405fd21063cb46a7a2385a1b98bcccda0bebf575 | /stock_exchange_django/urls.py | 4be0d8562d44bde693f1f9f8c31f51fa236dc860 | [] | no_license | marco-calderon/stock_exchange_django | 6563ff76de38bc3613614909efb95c544c0e797f | 0e6d5e27ca0f263dcf378d5b30f45b6142354138 | refs/heads/master | 2023-08-18T03:23:54.563810 | 2021-09-10T18:58:55 | 2021-09-10T18:58:55 | 383,635,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | """stock_exchange_django URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from stock_app.views import keep_awake, scrap
from django.contrib import admin
from django.urls import path
from graphene_django.views import GraphQLView
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
path('admin/', admin.site.urls),
path('graphql', csrf_exempt(GraphQLView.as_view(graphiql=True))),
path('scrap/', scrap, name='entries_scrap'),
path('keep_awake/', keep_awake, name='keep_awake'),
]
| [
"[email protected]"
] | |
87a6000c573a2876ad76c03c1303e2e9e818ca62 | 98fcafc464ea64d2a4a32d8f1b3bfc4cf693feda | /scripts/1_script_basic.py | a075506348c1114e97b3d3f35aba913f23f41104 | [] | no_license | mirabdulHaseeb/ETHPool | ce8ed692582ffbe3076d0079ee077fdc7552061a | 6d59c96c31c1e351df4cc7cc4306ec01fda16454 | refs/heads/master | 2023-07-09T01:53:59.959101 | 2021-08-18T18:27:51 | 2021-08-18T18:27:51 | 397,698,710 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,071 | py | from brownie import accounts, MockERC20, ETHPoolFarm, ETHPoolToken
def main():
owner, alice, bob = accounts[:3]
mock_dai = MockERC20.deploy("MockDai", "mDAI", {'from': owner})
mock_dai.mint(owner, 25000, {'from': owner})
mock_dai.mint(alice, 25000, {'from': owner})
mock_dai.mint(bob, 25000, {'from': owner})
ethPool_tkn = ETHPoolToken.deploy({'from': owner})
ethPool_frm = ETHPoolFarm.deploy(mock_dai.address, ethPool_tkn.address, {'from': owner})
print("Owner's dai: ", mock_dai.balanceOf(owner))
print("Alice's dai: ",mock_dai.balanceOf(alice))
print("Bob's dai: ",mock_dai.balanceOf(bob))
mock_dai.approve(ethPool_frm.address, 100, {'from': alice})
mock_dai.approve(ethPool_frm.address, 200, {'from': bob})
ethPool_frm.stake(100, {'from': alice})
ethPool_frm.stake(200, {'from': bob})
print("Staking Balance - Alice: ", ethPool_frm.stakingBalance(alice))
print("Staking Balance - Bob: ", ethPool_frm.stakingBalance(bob))
print("Contract Balance: ", mock_dai.balanceOf(ethPool_frm.address))
| [
"[email protected]"
] | |
add404cf837b77ea0909afc878c43ad55290eb6f | fb7bea3118d0cf50c280b6123c94747d48af977d | /montecarlo.py | b0a4801afafd01b25506a02611171321bb7905fa | [] | no_license | revifikry/Montecarlo | fa0dcf7effbcdc61ed6d442bc3d68e8a83cb350f | 067f74943fe53cf42db9ba6408b79783b1562f59 | refs/heads/main | 2023-03-12T23:50:31.765845 | 2021-03-08T15:15:44 | 2021-03-08T15:15:44 | 345,697,467 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,362 | py | print("Nama : Revi Mochamad F")
print("NRP : 152017071")
print("Montecarlo")
import pandas as pd
import numpy as np
import random
data = np.array([[0, 20], [1, 40], [2, 20], [3, 10], [4, 10]])
pd.DataFrame(data, columns=["Minggu Ke", "Frekuensi"])
mingguke = data[:0]
frekuensi = data[:,1]
sigma_f = 0
for i in range(len(frekuensi)):
sigma_f = sigma_f + frekuensi[i]
print("ΣFrekuensi:",sigma_f)
prob =[]
sum_f=0
for a in range(len(frekuensi)):
sum_f = frekuensi[a]/sigma_f
print("probabilitas minggu ke-",a,"=",sum_f)
prob.append(sum_f)
prob_d=np.array([prob])
data = np.concatenate((data, prob_d.T),axis=1)
pd.DataFrame(data, columns=["Minggu Ke", "Frekuensi","Probabilitas"])
prob_k =[]
sum_p=0
for a in range(len(frekuensi)):
sum_p = sum_p + prob[a]
print("probabilitas kumulatif minggu ke-",a,"=",sum_p)
prob_k.append(sum_p)
prob_kd=np.array([prob_k])
data = np.concatenate((data, prob_kd.T),axis=1)
pd.DataFrame(data, columns=["Minggu Ke", "Frekuensi","Probabilitas","Probabilitas Kumulatif"])
interval_min = []
min_v=0
for a in range(len(frekuensi)):
if(a==0):
interval_min.append(min_v)
print("Interval Minggu ke-",a," = ",min_v,"-",prob_k[a])
else:
min_v = prob_k[a-1]+0.001
interval_min.append(min_v)
print("Interval Minggu ke-",a," = ",min_v,"-",prob_k[a])
interval_mind=np.array([interval_min])
data = np.concatenate((data, interval_mind.T),axis=1)
interval_maxd=np.array([prob_k])
data = np.concatenate((data, interval_maxd.T),axis=1)
pd.DataFrame(data, columns=["Minggu Ke", "Frekuensi","Probabilitas","Probabilitas Kumulatif","Interval Batas Bawah","Interval Batas Atas"])
minggu_baru=101
p_minggu = []
angka_acak = []
permintaan = []
for a in range(16):
p_minggu.append(minggu_baru)
acak = random.random()
angka_acak.append(acak)
if(acak<0.2):
jenis = 0
permintaan.append(jenis)
elif(acak<0.6):
jenis = 1
permintaan.append(jenis)
elif(acak<0.8):
jenis = 2
permintaan.append(jenis)
elif(acak<0.9):
jenis = 3
permintaan.append(jenis)
elif(acak<=1):
jenis = 4
permintaan.append(jenis)
minggu_baru+=1
print("Minggu Ke-","|","Angka Acak","|","Permintaan")
for a in range(16):
print(p_minggu[a],"|",angka_acak[a],"|",permintaan[a]) | [
"[email protected]"
] | |
d2dc6f2998a9798b03fba27d48cf8c18db844614 | 8494b750aa0754d4b2ca7033bcab47164c9eee4a | /backend/src/web/home.py | deb17513508467e3a979bd4500c53e01e8a6f645 | [
"MIT"
] | permissive | almeida-marcusaf/cursos-apppy | 2654ef8410abd3d73ea690d9b190102186e13556 | 97349bc56409ef25b902d3c0b6a61615c627a7ba | refs/heads/master | 2016-09-06T10:19:18.130220 | 2014-05-29T10:49:35 | 2014-05-29T10:49:35 | 19,961,630 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 246 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from web import my_form
from zen import router
def index(_write_tmpl):
url = router.to_path(my_form)
_write_tmpl('templates/home.html', {'form_url': url})
| [
"[email protected]"
] | |
68748669b0946e5f4ed423f386e9de6f4c044a00 | 578027a231114d47269fe161e190816bb3198698 | /blog/models.py | 2c7fec426e8d4fb746551672df6a14a41ed00e10 | [] | no_license | jakebasel/blog | 67228e4df3d4c07ca8c52a8205d5bdb45905045b | e8f2ae5186b0b8ca4781b2895330838a35572a04 | refs/heads/main | 2023-07-13T08:39:30.285046 | 2021-08-22T16:58:06 | 2021-08-22T16:58:06 | 397,456,691 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 379 | py | from django.db import models
from django.urls import reverse
class Post(models.Model):
title = models.CharField(max_length=200)
author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
body = models.TextField()
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("post_detail", args=[str(self.id)])
| [
"[email protected]"
] | |
a9243f5627e3e0a79fd6b593da8034ea26322a13 | dd27d8c13db12b01866c69bfe47654a4f678dd6d | /mysite/annotatorapp/migrations/0007_exsentences.py | 549bef576e73adf7d79c8936cec91610d29b608a | [
"MIT"
] | permissive | hareeshbabu82ns/smart-sanskrit-annotator | a8dfad175ce87f5ea4b919925e12352039e5cb0d | 763e1dd067107334761ba66796a559c18c2a2807 | refs/heads/master | 2022-09-14T15:27:58.030332 | 2020-06-03T13:05:51 | 2020-06-03T13:05:51 | 269,089,257 | 0 | 0 | MIT | 2020-06-03T13:00:35 | 2020-06-03T13:00:34 | null | UTF-8 | Python | false | false | 682 | py | # Generated by Django 2.0.4 on 2018-06-26 09:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('annotatorapp', '0006_merge_20180613_1419'),
]
operations = [
migrations.CreateModel(
name='Exsentences',
fields=[
('xsent_id', models.AutoField(primary_key=True, serialize=False)),
('line', models.CharField(max_length=200)),
('chunks', models.CharField(max_length=200)),
('lemmas', models.CharField(max_length=200)),
('morph_cng', models.CharField(max_length=200)),
],
),
]
| [
"[email protected]"
] | |
4d8ce538c74fee1c0cb6caba23a5e814dd6861cf | f911eb72744e2f8c0c874e0047a3f2c7cb4dee07 | /inventory/settings.py | fd5c9cad5ff9fea5367b61d7174f57a53d8f712f | [] | no_license | Silencesoulz/kevinwebsite | 33281c236899088f4f98d176c6d410d55d9459a1 | 1089a90c815ff0e4cb814c3678a19726680378e0 | refs/heads/master | 2023-01-18T16:33:29.680563 | 2020-11-22T09:40:22 | 2020-11-22T09:40:22 | 301,690,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,307 | py | """
Django settings for inventory project.
Generated by 'django-admin startproject' using Django 2.2.16.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import django_heroku
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 've=u(ec1^1%napqw+0#+y*bilpx9_!w_m8vr6%v5y+7frkazm8'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'stock',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'inventory.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'inventory.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR,"static"),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
django_heroku.settings(locals()) | [
"[email protected]"
] | |
7e34988b3dd951dc468f8a74b8da463df74f8725 | 5916383e8d3df886edd20ac00ce9706a78078f56 | /飞机大战/v03/sky.py | b7a8df9d36b5868edb42c30dc3fe89c36224a6d1 | [] | no_license | sczhan/wode | 556154e8ccaa9192ea257bc88df3c5e4b268f88e | af4c721d0cedfdd2fe01dd681539724d1d64c378 | refs/heads/master | 2021-07-06T22:26:34.465708 | 2020-09-04T18:56:38 | 2020-09-04T18:56:38 | 181,295,279 | 1 | 0 | null | 2019-09-09T16:30:00 | 2019-04-14T10:53:57 | Python | UTF-8 | Python | false | false | 972 | py |
import tkinter
import actor
import config
class Sky(actor.Actor):
"""
移动的天空背景
"""
def __init__(self, root, canvas, position, x, y, tags):
super().__init__(root, canvas, position, x, y, tags, config.image_sky_width,
config.image_sky_height, False)
# 移动者的移动步长
self.steps = [config.step_length_sky_x, config.step_length_sky_y]
# 移动方向 - 向下
self.move_direction = [0, 1]
# 移动者加载背景图片
self.bg_image_fullname = config.image_path + config.filename_sky + config.filename_suffix
self.bg_image = tkinter.PhotoImage(file=self.bg_image_fullname)
self.bg_image_tags = tags
def exec_move(self):
"""
:return: none
"""
x = self.steps[0] * self.move_direction[0]
y = self.steps[1] * self.move_direction[1]
self.base_move(self.bg_image_tags, x, y)
| [
"[email protected]"
] | |
cda5f7df5163c1e7b44142a7bb892f48b7e56af3 | 6712c5257a28bbe518f314119e5f5cf4b7d5e08c | /main.py | ad8f495e1900d531f6ada50ec1e1e45979ddbf20 | [] | no_license | evgeneh/pinger_back_py | d9f197fc42b7f81f6c4834bc2d31cf321e0349ed | 0545fdc34f6e3b2a4d4bc20ba4b088562056c06e | refs/heads/master | 2021-06-10T20:18:37.803731 | 2020-04-10T12:57:28 | 2020-04-10T12:57:28 | 254,348,650 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117 | py |
from app import app
import view
from scheduling import th
th.start()
if __name__ == '__main__':
app.run() | [
"[email protected]"
] | |
bd7e618babf29b88e5d5b427fc7f98bd92ecbfb6 | da7fc0abbb151cc5610294a00d339874ab4bdbc1 | /lecture_3_Processes_Fork_IO_Files/source_codes/signal_handler.py | 7bb12a4ac536467e257dfad88db575808bbf781a | [] | no_license | kenanniyazov/OS-class-materials | 163df135a2244df3ae8b17100a147335a5eb7ddc | 920e074abfa15864eaceed728adb0bcdc4048e09 | refs/heads/master | 2021-01-11T00:21:43.174718 | 2016-10-07T08:34:45 | 2016-10-07T08:34:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | #! /usr/bin/python
import signal
import sys
import time
def signal_handler(signal, frame):
print "Hello Class!"
print 'You pressed CTRL+C!'
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
print 'It is CTRL+C handler code, press CTRL+C'
while True:
time.sleep(1)
| [
"[email protected]"
] | |
b6ae5c6ac133bc2310314c915e3de4f1fd90f8bc | 876d44ca9d5a5f5c260f0759de5d548b1363437b | /turismo/settings.py | 1612fc97cac7d3b408499e2bd6628e8f66a8260c | [] | no_license | darwin-vladimir/turismo_tanicuchi | a59b0e00a5380a6bd6ee64930b4f5a9486b2fd46 | 52bd62f7dda38a2e165aafa3a33ea95156b7125b | refs/heads/master | 2023-02-06T17:59:39.929749 | 2020-12-30T22:10:06 | 2020-12-30T22:10:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,228 | py | import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7h0yn(#+qr#)0+0vfgaae3b_xuv+hke6&mv2#jl=+1tp%3arjj'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'parroquias',
'usuario',
'atractivos_naturales',
'atractivos_culturales',
'alojamiento',
'transporte',
'empresa',
'interfaz_turista',
'restaurante',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'turismo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'turismo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'turismo',
'USER': 'postgres',
'PASSWORD':'basi',
'HOST':'localhost',
'PORT': 5432,
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTH_USER_MODEL ='usuario.Usuario'
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'es-ec'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
| [
"[email protected]"
] | |
7bdacdd2f6cd8fcbcd2b34591d045990f32ac4f0 | 4c2c1775b6b319ae07155f46e70a6726ab0980c2 | /algo/algo_code/personal/cvr_space/shitu_generate_exp/script/stat_app_state.py | 5d2a4f71bf26c5d8ac64d8969140db1c5e8da278 | [] | no_license | kiminh/util | 8e4b204849a57941120e37c9330772f03c8892d0 | 763a71031d9c0ef207b87dc03ebc55208a2dd5ad | refs/heads/master | 2022-06-09T06:09:13.221754 | 2020-04-27T04:23:00 | 2020-04-27T04:23:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,795 | py | import json
import sys
stat_dict = {}
for raw in sys.stdin:
line_json = json.loads(raw.strip("\n\r "))
promoted_app = line_json['promoted_app']
time_ = int(line_json['time'])
click = line_json['click']
trans = line_json['trans']
if promoted_app not in stat_dict:
stat_dict[promoted_app] = {}
if time_ not in stat_dict[promoted_app]:
stat_dict[promoted_app][time_] = {}
stat_dict[promoted_app][time_]['click'] = click
stat_dict[promoted_app][time_]['trans'] = trans
res_dict = {}
for app, value in stat_dict.items():
value_sort = sorted(value.items(), key=lambda d:d[0], reverse=True)
if app not in res_dict:
res_dict[app] = {}
for i, item in enumerate(value_sort):
before7days_click = sum([ x[1]['click'] for x in value_sort[i+1:i+8] ])
before3days_click = sum([ x[1]['click'] for x in value_sort[i+1:i+4] ])
before1days_click = sum([ x[1]['click'] for x in value_sort[i+1:i+2] ])
before7days_trans = sum([ x[1]['trans'] for x in value_sort[i+1:i+8] ])
before3days_trans = sum([ x[1]['trans'] for x in value_sort[i+1:i+4] ])
before1days_trans = sum([ x[1]['trans'] for x in value_sort[i+1:i+2] ])
time_ = item[0]
if time_ not in res_dict:
res_dict[app][time_] = {}
res_dict[app][time_]['before7days_click'] = before7days_click
res_dict[app][time_]['before3days_click'] = before3days_click
res_dict[app][time_]['before1days_click'] = before1days_click
res_dict[app][time_]['before7days_trans'] = before7days_trans
res_dict[app][time_]['before3days_trans'] = before3days_trans
res_dict[app][time_]['before1days_trans'] = before1days_trans
json.dump(res_dict, open('app_stat.json', 'w'), indent=4)
| [
"[email protected]"
] | |
c6a2ae11e018c4fe3fc46b440948729170288cef | b660b61f2d82a221360a79e2dcbafdf151496cd1 | /store_data_in_database.py | a23ad43213efcbc27e5f61dd99b2a34ee7415bb5 | [] | no_license | aldotoci/automated_data_extracter | d74a28eda0d59208a1c016c32b09b559669e4aa1 | dcd2068ae010069ccd23d6b3344e5e4fbfec82ce | refs/heads/master | 2023-09-05T23:42:24.090834 | 2021-11-18T10:07:43 | 2021-11-18T10:07:43 | 429,380,386 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,609 | py | import requests
from bs4 import BeautifulSoup
from pymongo import MongoClient
import threading
class store_info_in_database:
def get_info_per_anime(self, title, number_of_episodes, link):
anime_data = []
anime_list_link = 'https://gogoanime.ai/category/' + link
#Getting the anime_list page
anime_page = requests.get(anime_list_link)
anime_soup = BeautifulSoup(anime_page.text, 'html.parser')
#Getting info about the anime
data = anime_soup.find('div', class_="anime_info_body_bg")
thumbnail = data.find('img').get('src')
data = data.find_all('p')
type = data[1].find('a').get_text()
plot = data[2].get_text()[14:]
genre = []
for gen in data[3].find_all('a'):
genre.append(gen.get('title'))
realease = data[4].get_text()[10:]
status = data[5].get_text()[9:len(data[5].get_text())-1]
other_names = []
names = data[6].get_text()[12:]
name = ""
for char in names:
if char == ",":
other_names.append(name)
name = ""
continue
elif name == "":
if char == " ":
continue
else:
name += char
else:
name += char
if char == names[len(names)-1]:
other_names.append(name)
data = {'title': title,'number_of_episodes':number_of_episodes,'url':link,'thumbnail': thumbnail,'type':type,'plot': plot,'genre':genre,'realease':realease,'status':status,'othernames':other_names}
return data
def store_data(self, anime, prog, anime_list,db):
data = self.get_info_per_anime(anime['original_title'],anime['episodes number'],anime['url'])
prog += 1
try:
db.anime_data_list.insert_one(data)
except:
print("An error occurred")
print('Progress: ' + str(prog) + " out of " + str(len(anime_list)))
def store_data_in_database(self):
client = MongoClient('mongodb+srv://admin:[email protected]/myFirstDatabase?retryWrites=true&w=majority')
db = client.animes
anime_list = []
prog = 0
db.anime_data_list.drop()
for anime in db.anime_list.find():
anime_list.append(anime)
for anime in anime_list: #Fix latter
t = threading.Thread(target=self.store_data, args=(anime,prog,anime_list,db))
t.start()
test = store_info_in_database()
test.store_data_in_database() | [
"[email protected]"
] | |
95a32b2a5bbfae0660d048c78d8d01b7feb5c48e | 591a05e50f2515f6bd4605de6ed9ed7d3936ad9d | /welib/BEM/examples/Example_BEM_1.py | ce403a98f34cecf7b29e89966a23ad351491e42a | [
"MIT"
] | permissive | ebranlard/welib | 679edeec85feb629dc27047a62422d469c6e0081 | 3486e87c6348e9580099fe5c360138e762ab3ea9 | refs/heads/main | 2023-08-09T13:31:40.253283 | 2023-06-16T18:17:09 | 2023-06-16T18:17:09 | 153,533,129 | 50 | 25 | MIT | 2023-06-16T18:17:11 | 2018-10-17T22:47:46 | Python | UTF-8 | Python | false | false | 2,015 | py | """
Performs simple BEM simulations of the NREL 5MW turbine for two operating conditions.
"""
# --- Common libraries
from welib.BEM.steadyBEM import calcSteadyBEM, FASTFile2SteadyBEM
import os
import glob
import matplotlib.pyplot as plt
MyDir=os.path.dirname(__file__)
def main(test=False):
# --- Read a FAST model to get the main parameters needed
nB,cone,r,chord,twist,polars,rho,KinVisc = FASTFile2SteadyBEM(os.path.join(MyDir,'../../../data/NREL5MW/Main_Onshore.fst'))
# --- Run BEM on a set of operating points
if test:
WS = [5,10]
RPM = [7,12]
PITCH = [0,0]
else:
WS = [4 ,6 ,8 ,10 ,12 ,14 ,16 ,18 ,20 ,22 ,24 ,26]
RPM = [2.38,7.92,9.13,11.3,12.1,12.1,12.1,12.1,12.1,12.0,12.0,12.1]
PITCH = [0 ,0 ,0 ,0 ,3.4 ,8.4 ,11.8,14.7,17.3,19.6,21.9,23.9 ]
a0, ap0 = None,None # inductions, used to speed up parametric study
for i,ws in enumerate(WS):
V0 = WS[i] # [m/s]
Omega = RPM[i] # [rpm]
pitch = PITCH[i] # [deg]
xdot = 0 # [m/s]
u_turb = 0 # [m/s]
BEM=calcSteadyBEM(Omega,pitch,V0,xdot,u_turb,
nB,cone,r,chord,twist,polars,
rho=rho,KinVisc=KinVisc,bTIDrag=False,bAIDrag=True,
a_init =a0,
ap_init=ap0
)
a0, ap0 = BEM.a, BEM.aprime
# --- Save radial distribution to a csv file
filename='_BEM_ws{}_radial.csv'.format(V0)
if not test:
print('WS ',V0, 'Power',BEM.Power,'Thrust',BEM.Thrust)
BEM.WriteRadialFile(filename)
if __name__=="__main__":
main()
if __name__=="__test__":
main(test=True)
[os.remove(f) for f in glob.glob(os.path.join(MyDir,'_*.csv'))]
if __name__=="__export__":
pass
#main()
#from welib.tools.repo import export_figs_callback
#export_figs_callback(__file__)
| [
"[email protected]"
] | |
f6c28743af596b2f3a6eb5510fd037d64ba61fd2 | b5afe10bc5fcddc95b96d8fbfa2e1f27fe55cc17 | /app.py | 34d57788d8ddb5b285f5e557c2cd3a47b0513105 | [] | no_license | jmutakura/firstFlaskApp | 688edd6bd9f331f2d7af4132b4998fe72b20a799 | ecac6b7fe0afc147a2f0c0d0b0615f35e9fbb469 | refs/heads/master | 2020-09-29T08:42:08.507091 | 2019-12-10T01:23:08 | 2019-12-10T01:23:08 | 227,002,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 558 | py | from flask import Flask, render_template, request
from data import Articles
app = Flask(__name__)
Articles = Articles()
# Routes
@app.route("/")
def index():
return render_template("home.html")
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/articles')
def articles():
return render_template('articles.html', articles=Articles)
@app.route('/article/<string:id>/')
def article(id):
return render_template('article.html', id=id)
# Run the app
if __name__ == '__main__':
app.run(debug=True)
| [
"[email protected]"
] | |
df7264eceeee50d4cf513657e6b7c6379a222927 | eb9dde3315aa3b48325830a7f375ac1f301e402c | /environment.py | e7d27849486de101d41932b4414433ac716fdd4c | [] | no_license | CristyanGil/Project2-DRL-Udacity | d554d219f13864a72c8d56d473997fb7100e1b67 | cd20ada0f43b99761a3e740260c0a49e4751cb6e | refs/heads/master | 2021-01-06T23:48:49.159724 | 2020-02-21T04:11:39 | 2020-02-21T04:11:39 | 241,517,585 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,267 | py | from unityagents import UnityEnvironment
class Env():
"""Simplifies the interaction with the environment."""
def __init__(self, file_name="Banana_Windows_x86_64/Banana.app"):
"""Iniliaze new unity enviroment
Params
======
file_name (str): the location of the enviroment to load
"""
self.env = UnityEnvironment(file_name=file_name)
# get the default brain
self.brain_name = self.env.brain_names[0]
self.brain = self.env.brains[self.brain_name]
env_info = self.env.reset(train_mode=False)[self.brain_name]
#get the action space
self.action_size = self.brain.vector_action_space_size
#get the state space
state = env_info.vector_observations[0]
self.state_size = len(state)
print("state_size:", self.state_size, " action_size:", self.action_size )
def reset(self, train_mode=False):
"""Reset the unity environment and returns the current state
Params
======
train_mode (bool): Whether you want to set training mode or not
"""
# reset the environment
env_info = self.env.reset(train_mode=train_mode)[self.brain_name]
states = env_info.vector_observations
return states
def execute(self, actions):
"""Execute a step on the environment corresponding to the action received.
Returns the next state, the reward received and a boolean value "done" that
indicates if the environment has come to a terminal state.
Params
======
action (int): The index for the action to perform.
"""
#action = int(action)
env_info = self.env.step(actions)[self.brain_name] # send the action to the environment
next_states = env_info.vector_observations # get the next state
rewards = env_info.rewards # get the reward
dones = env_info.local_done # get the flag of done
return next_states, rewards, dones
def close(self):
""" Closes the environment """
self.env.close() | [
"[email protected]"
] | |
8ecc3974806003a22c415c8e504372168a6114f2 | 72e11a80587342b3f278d4df18406cd4ce7531e8 | /mercurial/copies.py | ee103105920f23154f756850d0aa4a15d9d61a72 | [] | no_license | EnjoyLifeFund/Debian_py36_packages | 740666f290cef73a4f634558ccf3fd4926addeda | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | refs/heads/master | 2021-08-24T02:17:24.349195 | 2017-12-06T06:18:35 | 2017-12-06T06:18:35 | 113,167,612 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,068 | py | # copies.py - copy detection for Mercurial
#
# Copyright 2008 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import collections
import heapq
import os
from .i18n import _
from . import (
match as matchmod,
node,
pathutil,
scmutil,
util,
)
def _findlimit(repo, a, b):
"""
Find the last revision that needs to be checked to ensure that a full
transitive closure for file copies can be properly calculated.
Generally, this means finding the earliest revision number that's an
ancestor of a or b but not both, except when a or b is a direct descendent
of the other, in which case we can return the minimum revnum of a and b.
None if no such revision exists.
"""
# basic idea:
# - mark a and b with different sides
# - if a parent's children are all on the same side, the parent is
# on that side, otherwise it is on no side
# - walk the graph in topological order with the help of a heap;
# - add unseen parents to side map
# - clear side of any parent that has children on different sides
# - track number of interesting revs that might still be on a side
# - track the lowest interesting rev seen
# - quit when interesting revs is zero
cl = repo.changelog
working = len(cl) # pseudo rev for the working directory
if a is None:
a = working
if b is None:
b = working
side = {a: -1, b: 1}
visit = [-a, -b]
heapq.heapify(visit)
interesting = len(visit)
hascommonancestor = False
limit = working
while interesting:
r = -heapq.heappop(visit)
if r == working:
parents = [cl.rev(p) for p in repo.dirstate.parents()]
else:
parents = cl.parentrevs(r)
for p in parents:
if p < 0:
continue
if p not in side:
# first time we see p; add it to visit
side[p] = side[r]
if side[p]:
interesting += 1
heapq.heappush(visit, -p)
elif side[p] and side[p] != side[r]:
# p was interesting but now we know better
side[p] = 0
interesting -= 1
hascommonancestor = True
if side[r]:
limit = r # lowest rev visited
interesting -= 1
if not hascommonancestor:
return None
# Consider the following flow (see test-commit-amend.t under issue4405):
# 1/ File 'a0' committed
# 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1')
# 3/ Move back to first commit
# 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend')
# 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg'
#
# During the amend in step five, we will be in this state:
#
# @ 3 temporary amend commit for a1-amend
# |
# o 2 a1-amend
# |
# | o 1 a1
# |/
# o 0 a0
#
# When _findlimit is called, a and b are revs 3 and 0, so limit will be 2,
# yet the filelog has the copy information in rev 1 and we will not look
# back far enough unless we also look at the a and b as candidates.
# This only occurs when a is a descendent of b or visa-versa.
return min(limit, a, b)
def _chain(src, dst, a, b):
'''chain two sets of copies a->b'''
t = a.copy()
for k, v in b.iteritems():
if v in t:
# found a chain
if t[v] != k:
# file wasn't renamed back to itself
t[k] = t[v]
if v not in dst:
# chain was a rename, not a copy
del t[v]
if v in src:
# file is a copy of an existing file
t[k] = v
# remove criss-crossed copies
for k, v in t.items():
if k in src and v in dst:
del t[k]
return t
def _tracefile(fctx, am, limit=-1):
'''return file context that is the ancestor of fctx present in ancestor
manifest am, stopping after the first ancestor lower than limit'''
for f in fctx.ancestors():
if am.get(f.path(), None) == f.filenode():
return f
if limit >= 0 and f.linkrev() < limit and f.rev() < limit:
return None
def _dirstatecopies(d):
ds = d._repo.dirstate
c = ds.copies().copy()
for k in list(c):
if ds[k] not in 'anm':
del c[k]
return c
def _computeforwardmissing(a, b, match=None):
"""Computes which files are in b but not a.
This is its own function so extensions can easily wrap this call to see what
files _forwardcopies is about to process.
"""
ma = a.manifest()
mb = b.manifest()
return mb.filesnotin(ma, match=match)
def _forwardcopies(a, b, match=None):
'''find {dst@b: src@a} copy mapping where a is an ancestor of b'''
# check for working copy
w = None
if b.rev() is None:
w = b
b = w.p1()
if a == b:
# short-circuit to avoid issues with merge states
return _dirstatecopies(w)
# files might have to be traced back to the fctx parent of the last
# one-side-only changeset, but not further back than that
limit = _findlimit(a._repo, a.rev(), b.rev())
if limit is None:
limit = -1
am = a.manifest()
# find where new files came from
# we currently don't try to find where old files went, too expensive
# this means we can miss a case like 'hg rm b; hg cp a b'
cm = {}
# Computing the forward missing is quite expensive on large manifests, since
# it compares the entire manifests. We can optimize it in the common use
# case of computing what copies are in a commit versus its parent (like
# during a rebase or histedit). Note, we exclude merge commits from this
# optimization, since the ctx.files() for a merge commit is not correct for
# this comparison.
forwardmissingmatch = match
if b.p1() == a and b.p2().node() == node.nullid:
filesmatcher = scmutil.matchfiles(a._repo, b.files())
forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
for f in missing:
fctx = b[f]
fctx._ancestrycontext = ancestrycontext
ofctx = _tracefile(fctx, am, limit)
if ofctx:
cm[f] = ofctx.path()
# combine copies from dirstate if necessary
if w is not None:
cm = _chain(a, w, cm, _dirstatecopies(w))
return cm
def _backwardrenames(a, b):
if a._repo.ui.config('experimental', 'copytrace') == 'off':
return {}
# Even though we're not taking copies into account, 1:n rename situations
# can still exist (e.g. hg cp a b; hg mv a c). In those cases we
# arbitrarily pick one of the renames.
f = _forwardcopies(b, a)
r = {}
for k, v in sorted(f.iteritems()):
# remove copies
if v in a:
continue
r[v] = k
return r
def pathcopies(x, y, match=None):
'''find {dst@y: src@x} copy mapping for directed compare'''
if x == y or not x or not y:
return {}
a = y.ancestor(x)
if a == x:
return _forwardcopies(x, y, match=match)
if a == y:
return _backwardrenames(x, y)
return _chain(x, y, _backwardrenames(x, a),
_forwardcopies(a, y, match=match))
def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2, baselabel=''):
"""Computes, based on addedinm1 and addedinm2, the files exclusive to c1
and c2. This is its own function so extensions can easily wrap this call
to see what files mergecopies is about to process.
Even though c1 and c2 are not used in this function, they are useful in
other extensions for being able to read the file nodes of the changed files.
"baselabel" can be passed to help distinguish the multiple computations
done in the graft case.
"""
u1 = sorted(addedinm1 - addedinm2)
u2 = sorted(addedinm2 - addedinm1)
header = " unmatched files in %s"
if baselabel:
header += ' (from %s)' % baselabel
if u1:
repo.ui.debug("%s:\n %s\n" % (header % 'local', "\n ".join(u1)))
if u2:
repo.ui.debug("%s:\n %s\n" % (header % 'other', "\n ".join(u2)))
return u1, u2
def _makegetfctx(ctx):
"""return a 'getfctx' function suitable for _checkcopies usage
We have to re-setup the function building 'filectx' for each
'_checkcopies' to ensure the linkrev adjustment is properly setup for
each. Linkrev adjustment is important to avoid bug in rename
detection. Moreover, having a proper '_ancestrycontext' setup ensures
the performance impact of this adjustment is kept limited. Without it,
each file could do a full dag traversal making the time complexity of
the operation explode (see issue4537).
This function exists here mostly to limit the impact on stable. Feel
free to refactor on default.
"""
rev = ctx.rev()
repo = ctx._repo
ac = getattr(ctx, '_ancestrycontext', None)
if ac is None:
revs = [rev]
if rev is None:
revs = [p.rev() for p in ctx.parents()]
ac = repo.changelog.ancestors(revs, inclusive=True)
ctx._ancestrycontext = ac
def makectx(f, n):
if n in node.wdirnodes: # in a working context?
if ctx.rev() is None:
return ctx.filectx(f)
return repo[None][f]
fctx = repo.filectx(f, fileid=n)
# setup only needed for filectx not create from a changectx
fctx._ancestrycontext = ac
fctx._descendantrev = rev
return fctx
return util.lrucachefunc(makectx)
def _combinecopies(copyfrom, copyto, finalcopy, diverge, incompletediverge):
"""combine partial copy paths"""
remainder = {}
for f in copyfrom:
if f in copyto:
finalcopy[copyto[f]] = copyfrom[f]
del copyto[f]
for f in incompletediverge:
assert f not in diverge
ic = incompletediverge[f]
if ic[0] in copyto:
diverge[f] = [copyto[ic[0]], ic[1]]
else:
remainder[f] = ic
return remainder
def mergecopies(repo, c1, c2, base):
"""
The function calling different copytracing algorithms on the basis of config
which find moves and copies between context c1 and c2 that are relevant for
merging. 'base' will be used as the merge base.
Copytracing is used in commands like rebase, merge, unshelve, etc to merge
files that were moved/ copied in one merge parent and modified in another.
For example:
o ---> 4 another commit
|
| o ---> 3 commit that modifies a.txt
| /
o / ---> 2 commit that moves a.txt to b.txt
|/
o ---> 1 merge base
If we try to rebase revision 3 on revision 4, since there is no a.txt in
revision 4, and if user have copytrace disabled, we prints the following
message:
```other changed <file> which local deleted```
Returns five dicts: "copy", "movewithdir", "diverge", "renamedelete" and
"dirmove".
"copy" is a mapping from destination name -> source name,
where source is in c1 and destination is in c2 or vice-versa.
"movewithdir" is a mapping from source name -> destination name,
where the file at source present in one context but not the other
needs to be moved to destination by the merge process, because the
other context moved the directory it is in.
"diverge" is a mapping of source name -> list of destination names
for divergent renames.
"renamedelete" is a mapping of source name -> list of destination
names for files deleted in c1 that were renamed in c2 or vice-versa.
"dirmove" is a mapping of detected source dir -> destination dir renames.
This is needed for handling changes to new files previously grafted into
renamed directories.
"""
# avoid silly behavior for update from empty dir
if not c1 or not c2 or c1 == c2:
return {}, {}, {}, {}, {}
# avoid silly behavior for parent -> working dir
if c2.node() is None and c1.node() == repo.dirstate.p1():
return repo.dirstate.copies(), {}, {}, {}, {}
copytracing = repo.ui.config('experimental', 'copytrace')
# Copy trace disabling is explicitly below the node == p1 logic above
# because the logic above is required for a simple copy to be kept across a
# rebase.
if copytracing == 'off':
return {}, {}, {}, {}, {}
elif copytracing == 'heuristics':
# Do full copytracing if only non-public revisions are involved as
# that will be fast enough and will also cover the copies which could
# be missed by heuristics
if _isfullcopytraceable(repo, c1, base):
return _fullcopytracing(repo, c1, c2, base)
return _heuristicscopytracing(repo, c1, c2, base)
else:
return _fullcopytracing(repo, c1, c2, base)
def _isfullcopytraceable(repo, c1, base):
""" Checks that if base, source and destination are all no-public branches,
if yes let's use the full copytrace algorithm for increased capabilities
since it will be fast enough.
`experimental.copytrace.sourcecommitlimit` can be used to set a limit for
number of changesets from c1 to base such that if number of changesets are
more than the limit, full copytracing algorithm won't be used.
"""
if c1.rev() is None:
c1 = c1.p1()
if c1.mutable() and base.mutable():
sourcecommitlimit = repo.ui.configint('experimental',
'copytrace.sourcecommitlimit')
commits = len(repo.revs('%d::%d', base.rev(), c1.rev()))
return commits < sourcecommitlimit
return False
def _fullcopytracing(repo, c1, c2, base):
""" The full copytracing algorithm which finds all the new files that were
added from merge base up to the top commit and for each file it checks if
this file was copied from another file.
This is pretty slow when a lot of changesets are involved but will track all
the copies.
"""
# In certain scenarios (e.g. graft, update or rebase), base can be
# overridden We still need to know a real common ancestor in this case We
# can't just compute _c1.ancestor(_c2) and compare it to ca, because there
# can be multiple common ancestors, e.g. in case of bidmerge. Because our
# caller may not know if the revision passed in lieu of the CA is a genuine
# common ancestor or not without explicitly checking it, it's better to
# determine that here.
#
# base.descendant(wc) and base.descendant(base) are False, work around that
_c1 = c1.p1() if c1.rev() is None else c1
_c2 = c2.p1() if c2.rev() is None else c2
# an endpoint is "dirty" if it isn't a descendant of the merge base
# if we have a dirty endpoint, we need to trigger graft logic, and also
# keep track of which endpoint is dirty
dirtyc1 = not (base == _c1 or base.descendant(_c1))
dirtyc2 = not (base == _c2 or base.descendant(_c2))
graft = dirtyc1 or dirtyc2
tca = base
if graft:
tca = _c1.ancestor(_c2)
limit = _findlimit(repo, c1.rev(), c2.rev())
if limit is None:
# no common ancestor, no copies
return {}, {}, {}, {}, {}
repo.ui.debug(" searching for copies back to rev %d\n" % limit)
m1 = c1.manifest()
m2 = c2.manifest()
mb = base.manifest()
# gather data from _checkcopies:
# - diverge = record all diverges in this dict
# - copy = record all non-divergent copies in this dict
# - fullcopy = record all copies in this dict
# - incomplete = record non-divergent partial copies here
# - incompletediverge = record divergent partial copies here
diverge = {} # divergence data is shared
incompletediverge = {}
data1 = {'copy': {},
'fullcopy': {},
'incomplete': {},
'diverge': diverge,
'incompletediverge': incompletediverge,
}
data2 = {'copy': {},
'fullcopy': {},
'incomplete': {},
'diverge': diverge,
'incompletediverge': incompletediverge,
}
# find interesting file sets from manifests
addedinm1 = m1.filesnotin(mb)
addedinm2 = m2.filesnotin(mb)
bothnew = sorted(addedinm1 & addedinm2)
if tca == base:
# unmatched file from base
u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
u1u, u2u = u1r, u2r
else:
# unmatched file from base (DAG rotation in the graft case)
u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2,
baselabel='base')
# unmatched file from topological common ancestors (no DAG rotation)
# need to recompute this for directory move handling when grafting
mta = tca.manifest()
u1u, u2u = _computenonoverlap(repo, c1, c2, m1.filesnotin(mta),
m2.filesnotin(mta),
baselabel='topological common ancestor')
for f in u1u:
_checkcopies(c1, c2, f, base, tca, dirtyc1, limit, data1)
for f in u2u:
_checkcopies(c2, c1, f, base, tca, dirtyc2, limit, data2)
copy = dict(data1['copy'])
copy.update(data2['copy'])
fullcopy = dict(data1['fullcopy'])
fullcopy.update(data2['fullcopy'])
if dirtyc1:
_combinecopies(data2['incomplete'], data1['incomplete'], copy, diverge,
incompletediverge)
else:
_combinecopies(data1['incomplete'], data2['incomplete'], copy, diverge,
incompletediverge)
renamedelete = {}
renamedeleteset = set()
divergeset = set()
for of, fl in list(diverge.items()):
if len(fl) == 1 or of in c1 or of in c2:
del diverge[of] # not actually divergent, or not a rename
if of not in c1 and of not in c2:
# renamed on one side, deleted on the other side, but filter
# out files that have been renamed and then deleted
renamedelete[of] = [f for f in fl if f in c1 or f in c2]
renamedeleteset.update(fl) # reverse map for below
else:
divergeset.update(fl) # reverse map for below
if bothnew:
repo.ui.debug(" unmatched files new in both:\n %s\n"
% "\n ".join(bothnew))
bothdiverge = {}
bothincompletediverge = {}
remainder = {}
both1 = {'copy': {},
'fullcopy': {},
'incomplete': {},
'diverge': bothdiverge,
'incompletediverge': bothincompletediverge
}
both2 = {'copy': {},
'fullcopy': {},
'incomplete': {},
'diverge': bothdiverge,
'incompletediverge': bothincompletediverge
}
for f in bothnew:
_checkcopies(c1, c2, f, base, tca, dirtyc1, limit, both1)
_checkcopies(c2, c1, f, base, tca, dirtyc2, limit, both2)
if dirtyc1:
# incomplete copies may only be found on the "dirty" side for bothnew
assert not both2['incomplete']
remainder = _combinecopies({}, both1['incomplete'], copy, bothdiverge,
bothincompletediverge)
elif dirtyc2:
assert not both1['incomplete']
remainder = _combinecopies({}, both2['incomplete'], copy, bothdiverge,
bothincompletediverge)
else:
# incomplete copies and divergences can't happen outside grafts
assert not both1['incomplete']
assert not both2['incomplete']
assert not bothincompletediverge
for f in remainder:
assert f not in bothdiverge
ic = remainder[f]
if ic[0] in (m1 if dirtyc1 else m2):
# backed-out rename on one side, but watch out for deleted files
bothdiverge[f] = ic
for of, fl in bothdiverge.items():
if len(fl) == 2 and fl[0] == fl[1]:
copy[fl[0]] = of # not actually divergent, just matching renames
if fullcopy and repo.ui.debugflag:
repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
"% = renamed and deleted):\n")
for f in sorted(fullcopy):
note = ""
if f in copy:
note += "*"
if f in divergeset:
note += "!"
if f in renamedeleteset:
note += "%"
repo.ui.debug(" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
note))
del divergeset
if not fullcopy:
return copy, {}, diverge, renamedelete, {}
repo.ui.debug(" checking for directory renames\n")
# generate a directory move map
d1, d2 = c1.dirs(), c2.dirs()
# Hack for adding '', which is not otherwise added, to d1 and d2
d1.addpath('/')
d2.addpath('/')
invalid = set()
dirmove = {}
# examine each file copy for a potential directory move, which is
# when all the files in a directory are moved to a new directory
for dst, src in fullcopy.iteritems():
dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
if dsrc in invalid:
# already seen to be uninteresting
continue
elif dsrc in d1 and ddst in d1:
# directory wasn't entirely moved locally
invalid.add(dsrc + "/")
elif dsrc in d2 and ddst in d2:
# directory wasn't entirely moved remotely
invalid.add(dsrc + "/")
elif dsrc + "/" in dirmove and dirmove[dsrc + "/"] != ddst + "/":
# files from the same directory moved to two different places
invalid.add(dsrc + "/")
else:
# looks good so far
dirmove[dsrc + "/"] = ddst + "/"
for i in invalid:
if i in dirmove:
del dirmove[i]
del d1, d2, invalid
if not dirmove:
return copy, {}, diverge, renamedelete, {}
for d in dirmove:
repo.ui.debug(" discovered dir src: '%s' -> dst: '%s'\n" %
(d, dirmove[d]))
movewithdir = {}
# check unaccounted nonoverlapping files against directory moves
for f in u1r + u2r:
if f not in fullcopy:
for d in dirmove:
if f.startswith(d):
# new file added in a directory that was moved, move it
df = dirmove[d] + f[len(d):]
if df not in copy:
movewithdir[f] = df
repo.ui.debug((" pending file src: '%s' -> "
"dst: '%s'\n") % (f, df))
break
return copy, movewithdir, diverge, renamedelete, dirmove
def _heuristicscopytracing(repo, c1, c2, base):
""" Fast copytracing using filename heuristics
Assumes that moves or renames are of following two types:
1) Inside a directory only (same directory name but different filenames)
2) Move from one directory to another
(same filenames but different directory names)
Works only when there are no merge commits in the "source branch".
Source branch is commits from base up to c2 not including base.
If merge is involved it fallbacks to _fullcopytracing().
Can be used by setting the following config:
[experimental]
copytrace = heuristics
In some cases the copy/move candidates found by heuristics can be very large
in number and that will make the algorithm slow. The number of possible
candidates to check can be limited by using the config
`experimental.copytrace.movecandidateslimit` which defaults to 100.
"""
if c1.rev() is None:
c1 = c1.p1()
if c2.rev() is None:
c2 = c2.p1()
copies = {}
changedfiles = set()
m1 = c1.manifest()
if not repo.revs('%d::%d', base.rev(), c2.rev()):
# If base is not in c2 branch, we switch to fullcopytracing
repo.ui.debug("switching to full copytracing as base is not "
"an ancestor of c2\n")
return _fullcopytracing(repo, c1, c2, base)
ctx = c2
while ctx != base:
if len(ctx.parents()) == 2:
# To keep things simple let's not handle merges
repo.ui.debug("switching to full copytracing because of merges\n")
return _fullcopytracing(repo, c1, c2, base)
changedfiles.update(ctx.files())
ctx = ctx.p1()
cp = _forwardcopies(base, c2)
for dst, src in cp.iteritems():
if src in m1:
copies[dst] = src
# file is missing if it isn't present in the destination, but is present in
# the base and present in the source.
# Presence in the base is important to exclude added files, presence in the
# source is important to exclude removed files.
missingfiles = filter(lambda f: f not in m1 and f in base and f in c2,
changedfiles)
if missingfiles:
basenametofilename = collections.defaultdict(list)
dirnametofilename = collections.defaultdict(list)
for f in m1.filesnotin(base.manifest()):
basename = os.path.basename(f)
dirname = os.path.dirname(f)
basenametofilename[basename].append(f)
dirnametofilename[dirname].append(f)
# in case of a rebase/graft, base may not be a common ancestor
anc = c1.ancestor(c2)
for f in missingfiles:
basename = os.path.basename(f)
dirname = os.path.dirname(f)
samebasename = basenametofilename[basename]
samedirname = dirnametofilename[dirname]
movecandidates = samebasename + samedirname
# f is guaranteed to be present in c2, that's why
# c2.filectx(f) won't fail
f2 = c2.filectx(f)
# we can have a lot of candidates which can slow down the heuristics
# config value to limit the number of candidates moves to check
maxcandidates = repo.ui.configint('experimental',
'copytrace.movecandidateslimit')
if len(movecandidates) > maxcandidates:
repo.ui.status(_("skipping copytracing for '%s', more "
"candidates than the limit: %d\n")
% (f, len(movecandidates)))
continue
for candidate in movecandidates:
f1 = c1.filectx(candidate)
if _related(f1, f2, anc.rev()):
# if there are a few related copies then we'll merge
# changes into all of them. This matches the behaviour
# of upstream copytracing
copies[candidate] = f
return copies, {}, {}, {}, {}
def _related(f1, f2, limit):
"""return True if f1 and f2 filectx have a common ancestor
Walk back to common ancestor to see if the two files originate
from the same file. Since workingfilectx's rev() is None it messes
up the integer comparison logic, hence the pre-step check for
None (f1 and f2 can only be workingfilectx's initially).
"""
if f1 == f2:
return f1 # a match
g1, g2 = f1.ancestors(), f2.ancestors()
try:
f1r, f2r = f1.linkrev(), f2.linkrev()
if f1r is None:
f1 = next(g1)
if f2r is None:
f2 = next(g2)
while True:
f1r, f2r = f1.linkrev(), f2.linkrev()
if f1r > f2r:
f1 = next(g1)
elif f2r > f1r:
f2 = next(g2)
elif f1 == f2:
return f1 # a match
elif f1r == f2r or f1r < limit or f2r < limit:
return False # copy no longer relevant
except StopIteration:
return False
def _checkcopies(srcctx, dstctx, f, base, tca, remotebase, limit, data):
"""
check possible copies of f from msrc to mdst
srcctx = starting context for f in msrc
dstctx = destination context for f in mdst
f = the filename to check (as in msrc)
base = the changectx used as a merge base
tca = topological common ancestor for graft-like scenarios
remotebase = True if base is outside tca::srcctx, False otherwise
limit = the rev number to not search beyond
data = dictionary of dictionary to store copy data. (see mergecopies)
note: limit is only an optimization, and provides no guarantee that
irrelevant revisions will not be visited
there is no easy way to make this algorithm stop in a guaranteed way
once it "goes behind a certain revision".
"""
msrc = srcctx.manifest()
mdst = dstctx.manifest()
mb = base.manifest()
mta = tca.manifest()
# Might be true if this call is about finding backward renames,
# This happens in the case of grafts because the DAG is then rotated.
# If the file exists in both the base and the source, we are not looking
# for a rename on the source side, but on the part of the DAG that is
# traversed backwards.
#
# In the case there is both backward and forward renames (before and after
# the base) this is more complicated as we must detect a divergence.
# We use 'backwards = False' in that case.
backwards = not remotebase and base != tca and f in mb
getsrcfctx = _makegetfctx(srcctx)
getdstfctx = _makegetfctx(dstctx)
if msrc[f] == mb.get(f) and not remotebase:
# Nothing to merge
return
of = None
seen = {f}
for oc in getsrcfctx(f, msrc[f]).ancestors():
ocr = oc.linkrev()
of = oc.path()
if of in seen:
# check limit late - grab last rename before
if ocr < limit:
break
continue
seen.add(of)
# remember for dir rename detection
if backwards:
data['fullcopy'][of] = f # grafting backwards through renames
else:
data['fullcopy'][f] = of
if of not in mdst:
continue # no match, keep looking
if mdst[of] == mb.get(of):
return # no merge needed, quit early
c2 = getdstfctx(of, mdst[of])
# c2 might be a plain new file on added on destination side that is
# unrelated to the droids we are looking for.
cr = _related(oc, c2, tca.rev())
if cr and (of == f or of == c2.path()): # non-divergent
if backwards:
data['copy'][of] = f
elif of in mb:
data['copy'][f] = of
elif remotebase: # special case: a <- b <- a -> b "ping-pong" rename
data['copy'][of] = f
del data['fullcopy'][f]
data['fullcopy'][of] = f
else: # divergence w.r.t. graft CA on one side of topological CA
for sf in seen:
if sf in mb:
assert sf not in data['diverge']
data['diverge'][sf] = [f, of]
break
return
if of in mta:
if backwards or remotebase:
data['incomplete'][of] = f
else:
for sf in seen:
if sf in mb:
if tca == base:
data['diverge'].setdefault(sf, []).append(f)
else:
data['incompletediverge'][sf] = [of, f]
return
def duplicatecopies(repo, wctx, rev, fromrev, skiprev=None):
'''reproduce copies from fromrev to rev in the dirstate
If skiprev is specified, it's a revision that should be used to
filter copy records. Any copies that occur between fromrev and
skiprev will not be duplicated, even if they appear in the set of
copies between fromrev and rev.
'''
exclude = {}
if (skiprev is not None and
repo.ui.config('experimental', 'copytrace') != 'off'):
# copytrace='off' skips this line, but not the entire function because
# the line below is O(size of the repo) during a rebase, while the rest
# of the function is much faster (and is required for carrying copy
# metadata across the rebase anyway).
exclude = pathcopies(repo[fromrev], repo[skiprev])
for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
# copies.pathcopies returns backward renames, so dst might not
# actually be in the dirstate
if dst in exclude:
continue
wctx[dst].markcopied(src)
| [
"[email protected]"
] | |
2076e87530efc1a612a2e913284bf9236f2eb9f1 | 79bb25fbd8f94ea8448d15f4c3f472866656c2b3 | /pylung/libinfo.py | 6a96a92c438dc35dc6e9041c3b2332e2c84d270b | [] | no_license | dact931/test-alexnet-nodule-detection | 621e08600df5a4113f6ff927199ca4800d8fe355 | 09ae4ea7969b3136761a1ce90ba9cb26bfc1ff29 | refs/heads/master | 2023-03-16T07:26:52.292359 | 2020-03-25T09:17:18 | 2020-03-25T09:17:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,663 | py | # coding: utf-8
"""Information about pylung."""
from __future__ import absolute_import
import os
import platform
def find_lib_path():
"""Find PyLung dynamic library files.
Returns
-------
lib_path : list(string)
List of all found path to the libraries
"""
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
api_path = os.path.join(curr_path, '../../lib/')
cmake_build_path = os.path.join(curr_path, '../../build/Release/')
dll_path = [curr_path, api_path, cmake_build_path]
if os.name == 'nt':
vs_configuration = 'Release'
if platform.architecture()[0] == '64bit':
dll_path.append(os.path.join(curr_path, '../../build', vs_configuration))
dll_path.append(os.path.join(curr_path, '../../windows/x64', vs_configuration))
else:
dll_path.append(os.path.join(curr_path, '../../build', vs_configuration))
dll_path.append(os.path.join(curr_path, '../../windows', vs_configuration))
elif os.name == "posix" and os.environ.get('LD_LIBRARY_PATH', None):
dll_path.extend([p.strip() for p in os.environ['LD_LIBRARY_PATH'].split(":")])
if os.name == 'nt':
dll_path = [os.path.join(p, 'libpylung.dll') for p in dll_path]
else:
dll_path = [os.path.join(p, 'libpylung.so') for p in dll_path]
lib_path = [p for p in dll_path if os.path.exists(p) and os.path.isfile(p)]
if len(lib_path) == 0:
raise RuntimeError('Cannot find the files.\n' +
'List of candidates:\n' + str('\n'.join(dll_path)))
return lib_path
# current version
__version__ = "0.0.1"
| [
"[email protected]"
] | |
f5b6c75f58424d4a61a85593047d73da700b6966 | 9f206c2741163881f37a8123d0bedc34359da4ca | /tests/test_comment.py | e3c11b296057b0413d10ed4c8e6993a008e38f19 | [
"MIT"
] | permissive | OsmanMariam/On-the-blog | 6eaa41a04b330669fe4535b52668cb9dd227e400 | 455bdb22d6768ca607afaff3b8587e710738e85d | refs/heads/master | 2022-10-17T20:07:15.233294 | 2019-12-03T08:38:25 | 2019-12-03T08:38:25 | 224,815,084 | 0 | 0 | null | 2022-09-16T18:13:55 | 2019-11-29T08:54:23 | Python | UTF-8 | Python | false | false | 1,490 | py | import unittest
from app.models import Comment,User
from flask_login import current_user
from app import db
class TestComment(unittest.TestCase):
def setUp(self):
self.user_Mariam = User(username = 'Mariam',
password = 'jasonmusa8',
email = '[email protected]')
self.new_comment = Comment(id=12345,
post_comment="great post!",
category_id='funny',
blogposts="great post,
user_id = self.user_Mariam)
def tearDown(self):
Comment.query.delete()
User.query.delete()
def test_instance(self):
self.assertTrue(isinstance(self.new_comment,Comment))
def test_check_instance_variables(self):
self.assertEquals(self.new_comment.id,12345)
self.assertEquals(self.new_comment.post_comment,"great psot!")
self.assertEquals(self.new_comment.category_id,'funny')
self.assertEquals(self.new_comment.blogposts, 'great post')
self.assertEquals(self.new_comment.user,self.user_Mariam)
def test_save_comment(self):
self.new_comment.save_comment()
self.assertTrue(len(Comment.query.all())>0)
def test_get_comment_by_id(self):
self.new_comment.save_comment()
got_comments = Comment.get_comments(12345)
self.assertTrue(len(got_comments) == 1) | [
"[email protected]"
] | |
b9e0fecc192934b729bd9d3dcdb10babe1e359d4 | 633944f913050debf0764c2a29cf3e88f912670e | /v8/depot_tools/bootstrap-3.8.0b1.chromium.1_bin/python3/lib/python3.8/idlelib/idle_test/test_autoexpand.py | 894e53a824f59742554580e124a0407a4653b948 | [
"BSD-3-Clause",
"bzip2-1.0.6",
"SunPro",
"Apache-2.0"
] | permissive | bopopescu/V8-lgtm | 0474c2ff39baf754f556ef57619ceae93e7320fd | da307e2f7abfca5fa0e860a809de6cd07fd1b72b | refs/heads/master | 2022-02-16T19:10:54.008520 | 2019-09-25T07:51:13 | 2019-09-25T07:51:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | ../../../../../.cipd/pkgs/2/_current/lib/python3.8/idlelib/idle_test/test_autoexpand.py | [
"[email protected]"
] | |
fdf2ebb912149a60688c5be344b9b422603aa93d | 20f4a06139d9e0b43d5a93f1955b4064ca08a966 | /Pymol/scripts/nVisualize-Pymol-PDB.py | ec5b3bc7570b4da92ea1f88cb3be3d65e94b5daa | [
"BSD-3-Clause"
] | permissive | tjmustard/Eta_Scripts | ca4c9e18f6c4bb6676e3672e77cb36a3173205cc | 7aa44cd8ce9648e27db62d58d683705d8fcb4120 | refs/heads/master | 2021-05-21T06:25:31.757798 | 2020-10-15T15:31:12 | 2020-10-15T15:31:12 | 252,584,079 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,517 | py | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Thomas J. L. Mustard, O. Maduka Ogba, Paul Ha-Yeon Cheong
#
# PHYC Group
# Oregon State University
# College of Science
# Department of Chemistry
# 153 Gilbert Hall
# Corvallis OR, 97331
# E-mail: [email protected]
# Ph. (541)-737-2081
# http://phyc.chem.oregonstate.edu/
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the {organization} nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
from sys import *
import sys
import getopt
def make_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
return
def getbasename(name, extension):
basename = name[::-1].replace(extension[::-1], '')[::-1]
return basename
### --- Arguments --- ###
program = 'nVisualize-Pymol-PDB.py'
dist = False
EtaDir = os.environ['ETADIR']
build = 'Pymolv1.3-Build.pml'
visual = 'Pymolv1.3-Visualize.pml'
### Read command line args
try:
(myopts, args) = getopt.getopt(sys.argv[1:], 'b:v:dh')
except getopt.GetoptError:
print(program + ' -d (generate distances) -h')
sys.exit(2)
###############################
# o == option
# a == argument passed to the o
###############################
for (o, a) in myopts:
if o == '-d':
dist = True
elif o == '-v':
visual = a
elif o == '-b':
build = a
elif o == '-h':
print(program + ' -h')
sys.exit(0)
else:
print('Usage: %s -h' % sys.argv[0])
sys.exit(0)
### --- Make the Pymol folder --- ###
make_dir('Pymol-Picture')
### --- Grab the build and visualize configuration file --- ###
f = open(EtaDir + '/Pymol/snippets/' + build, 'r')
buildlines = f.readlines()
f.close()
f = open(EtaDir + '/Pymol/snippets/' + visual, 'r')
vislines = f.readlines()
f.close()
os.system('cp ' + EtaDir
+ '/Pymol/snippets/Pymolv1.3-Visualize.pml Pymol-Picture/')
### --- Creat the Visual-Script.pml file --- ###
f = open('Pymol-Picture/Visual-Script.pml', 'w')
f.write('''#==========================================
#Start
''')
### --- Iterating through a folder of files --- ###
for i in os.listdir(os.getcwd()):
if i.endswith('.pdb'):
ifile = i
basename = getbasename(ifile, '.pdb')
os.system('cp ' + basename + '.pdb Pymol-Picture/' + basename
+ '_orig.pdb')
os.system('cp ' + basename + '.pdb Pymol-Picture/' + basename
+ '_high.pdb')
os.system('cp ' + basename + '.pdb Pymol-Picture/' + basename
+ '_med.pdb')
os.system('cp ' + basename + '.pdb Pymol-Picture/' + basename
+ '_low.pdb')
os.system('cp ' + basename + '.pdb Pymol-Picture/' + basename
+ '_dat.pdb')
f.write('load ' + basename + '_orig.pdb\n')
f.write('load ' + basename + '_high.pdb\n')
f.write('load ' + basename + '_med.pdb\n')
f.write('load ' + basename + '_low.pdb\n')
f.write('load ' + basename + '_dat.pdb\n')
f.write('#grouping objects\n')
f.write('#------------------------------------------------------------------------------------------------------------------------------------------\n'
)
f.write('group ' + basename + ', ' + basename + '_orig\n')
f.write('group ' + basename + ', ' + basename + '_high\n')
f.write('group ' + basename + ', ' + basename + '_med\n')
f.write('group ' + basename + ', ' + basename + '_low\n')
f.write('group ' + basename + ', ' + basename + '_dat\n')
f.write('\n')
if dist:
f.write('#------------------------------------------------------------------------------------------------------------------------------------------\n'
)
f.write('#DISTANCE, DISTANCES\n')
f.write('#Defining distances (dist)\n')
f.write('distance Distance_HX_' + basename
+ '_DIST = (elem H) and ' + basename
+ '_high, (neighbor elem H) and ' + basename
+ '_high, 2.0;color Grey, Distance_HX\n')
f.write('distance Distance_CX_' + basename
+ '_DIST = (elem C) and ' + basename
+ '_high, (neighbor elem C) and ' + basename
+ '_high, 2.0;color Grey, Distance_CX\n')
f.write('distance Distance_HB_' + basename
+ '_DIST = (elem B) and ' + basename
+ '_high, (elem H and neighbor elem B) and '
+ basename
+ '_high, 2.0;color purple, Distance_HB\n')
f.write('distance Distance_HC_' + basename
+ '_DIST = (elem C) and ' + basename
+ '_high, (elem H and neighbor elem C) and '
+ basename + '_high, 2.0;color Black, Distance_HC\n'
)
f.write('distance Distance_HN_' + basename
+ '_DIST = (elem N) and ' + basename
+ '_high, (elem H and neighbor elem N) and '
+ basename + '_high, 2.0;color Blue, Distance_HN\n')
f.write('distance Distance_HO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem H and neighbor elem O) and '
+ basename + '_high, 2.0;color Red, Distance_HO\n')
f.write('distance Distance_HSi_' + basename
+ '_DIST = (name Si) and ' + basename
+ '_high, (elem H and neighbor elem Si) and '
+ basename
+ '_high, 2.0;color Orange, Distance_HSi\n')
f.write('distance Distance_HP_' + basename
+ '_DIST = (elem P) and ' + basename
+ '_high, (elem H and neighbor elem P) and '
+ basename
+ '_high, 2.0;color Orange, Distance_HP\n')
f.write('distance Distance_HS_' + basename
+ '_DIST = (elem S) and ' + basename
+ '_high, (elem H and neighbor elem S) and '
+ basename
+ '_high, 2.0;color Orange, Distance_HS\n')
f.write('distance Distance_BC_' + basename
+ '_DIST = (elem B) and ' + basename
+ '_high, (elem C and neighbor elem B) and '
+ basename + '_high, 2.0;color Black, Distance_BC\n'
)
f.write('distance Distance_BN_' + basename
+ '_DIST = (elem N) and ' + basename
+ '_high, (elem B and neighbor elem N) and '
+ basename + '_high, 2.0;color Blue, Distance_BN\n')
f.write('distance Distance_BO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem B and neighbor elem O) and '
+ basename + '_high, 2.0;color Red, Distance_BO\n')
f.write('distance Distance_CC_' + basename
+ '_DIST = (elem C) and ' + basename
+ '_high, (elem C) and ' + basename
+ '_high, 1.6;color Black, Distance_CC\n')
f.write('distance Distance_CN_' + basename
+ '_DIST = (elem N) and ' + basename
+ '_high, (elem C and neighbor elem N) and '
+ basename + '_high, 2.0;color Blue, Distance_CN\n')
f.write('distance Distance_CO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem C and neighbor elem O) and '
+ basename + '_high, 2.0;color Red, Distance_CO\n')
f.write('distance Distance_CSi_' + basename
+ '_DIST = (name Si) and ' + basename
+ '_high, (elem C and neighbor elem Si) and '
+ basename
+ '_high, 2.0;color Orange, Distance_CSi\n')
f.write('distance Distance_CP_' + basename
+ '_DIST = (elem P) and ' + basename
+ '_high, (elem C and neighbor elem P) and '
+ basename
+ '_high, 2.0;color Orange, Distance_CP\n')
f.write('distance Distance_CS_' + basename
+ '_DIST = (elem S) and ' + basename
+ '_high, (elem C and neighbor elem S) and '
+ basename
+ '_high, 2.0;color Orange, Distance_CS\n')
f.write('distance Distance_NN_' + basename
+ '_DIST = (elem N) and ' + basename
+ '_high, (elem N and neighbor elem N) and '
+ basename + '_high, 2.0;color Blue, Distance_NN\n')
f.write('distance Distance_NO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem N neighbor elem O) and ' + basename
+ '_high, 2.0;color Blue, Distance_NO\n')
f.write('distance Distance_NSi_' + basename
+ '_DIST = (name Si) and ' + basename
+ '_high, (elem N and neighbor elem Si) and '
+ basename
+ '_high, 2.0;color Orange, Distance_NSi\n')
f.write('distance Distance_NP_' + basename
+ '_DIST = (elem P) and ' + basename
+ '_high, (elem N and neighbor elem P) and '
+ basename
+ '_high, 2.0;color Orange, Distance_NP\n')
f.write('distance Distance_NS_' + basename
+ '_DIST = (elem S) and ' + basename
+ '_high, (elem N and neighbor elem S) and '
+ basename
+ '_high, 2.0;color Orange, Distance_NS\n')
f.write('distance Distance_OO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem O and neighbor elem O) and '
+ basename + '_high, 2.0;color Red, Distance_OO\n')
f.write('distance Distance_OSi_' + basename
+ '_DIST = (name Si) and ' + basename
+ '_high, (elem O and neighbor elem Si) and '
+ basename
+ '_high, 2.0;color Orange, Distance_OSi\n')
f.write('distance Distance_OP_' + basename
+ '_DIST = (elem P) and ' + basename
+ '_high, (elem O and neighbor elem P) and '
+ basename
+ '_high, 2.0;color Orange, Distance_OP\n')
f.write('distance Distance_OS_' + basename
+ '_DIST = (elem S) and ' + basename
+ '_high, (elem O and neighbor elem S) and '
+ basename
+ '_high, 2.0;color Orange, Distance_OS\n')
f.write('distance Distance_PRh_' + basename
+ '_DIST = (elem P) and ' + basename
+ '_high, (name Rh and neighbor elem P) and '
+ basename
+ '_high, 2.5;color Orange, Distance_PRh\n')
f.write('\n')
f.write('#------------------------------------------------------------------------------------------------------------------------------------------\n'
)
f.write('#HYDROGEN-BONDING, H-BOND, H-BONDING\n')
f.write('#Defining hydrogen bonding distances (h_bond, hbond)\n'
)
f.write('set h_bond_cutoff_center, 3.6\n')
f.write('set h_bond_cutoff_edge, 3.2\n')
f.write('\n')
f.write('#------------------------------------------------------------------------------------------------------------------------------------------\n'
)
f.write('#TRANSITION STATE BONDS (TS, TSS, TS-BOND, TS-BONDS, TSBOND, TSBONDS)\n'
)
f.write('#Defining ts bonds (ts,ts_bond)\n')
f.write('distance TS_C-C_' + basename
+ '_DIST = (/////C) and ' + basename
+ '_high, (/////C) and ' + basename + '_high, 3.0\n'
)
f.write('\n')
f.write('#------------------------------------------------------------------------------------------------------------------------------------------\n'
)
f.write('#STERIC (STERIC, STERICS)\n')
f.write('#Defining steric interactions between hydrogens (hh, steric, sterics)\n'
)
f.write('distance Steric_HH_21_' + basename
+ '_DIST = (elem H) and ' + basename
+ '_high, (elem H) and ' + basename
+ '_high, 2.1; color Grey, Steric_HH_21\n')
f.write('distance Steric_HH_22_' + basename
+ '_DIST = (elem H) and ' + basename
+ '_high, (elem H) and ' + basename
+ '_high, 2.2; color Grey, Steric_HH_22\n')
f.write('distance Steric_HH_23_' + basename
+ '_DIST = (elem H) and ' + basename
+ '_high, (elem H) and ' + basename
+ '_high, 2.3; color Grey, Steric_HH_23\n')
f.write('distance Steric_HH_24_' + basename
+ '_DIST = (elem H) and ' + basename
+ '_high, (elem H) and ' + basename
+ '_high, 2.4; color Grey, Steric_HH_24\n')
f.write('#------------------------------------------------------------------------------------------------------------------------------------------\n'
)
f.write('#ELECTROSTATIC (ELECTROSTATIC, ELECTROSTATICS, ESP, ESPS)\n'
)
f.write('#Defining electrostatic interactions and contacts (electrostatic, esp, esc)\n'
)
f.write('select XH_32, /////H and (neighbor /////N or neighbor /////O or neighbor /////S)\n'
)
f.write('distance Classic_ESP_NH_32_' + basename
+ '_DIST = /////N, XH_32, 3.2; color Navy_Blue, Classic_ESP_NH_*\n'
)
f.write('distance Classic_ESP_OH_32_' + basename
+ '_DIST = /////O, XH_32, 3.2; color Red, Classic_ESP_OH_*\n'
)
f.write('distance Classic_ESP_HX_32_' + basename
+ '_DIST = XH_32 and ' + basename
+ '_high, (/////N or /////O or /////S) and '
+ basename
+ '_high, 3.2; color Grey, Classic_ESP_HX_*; delete XH_32\n'
)
f.write('distance ESP_CH_32_' + basename
+ '_DIST = (elem C) and ' + basename
+ '_high, ((elem H) and not (neighbor (elem H))) and '
+ basename + '_high, 3.2; color Grey, ESP_CH_*\n')
f.write('distance ESP_NH_32_' + basename
+ '_DIST = (elem N) and ' + basename
+ '_high, ((elem H) and not (neighbor (elem H))) and '
+ basename
+ '_high, 3.2; color Navy_Blue, ESP_NH_*\n')
f.write('distance ESP_OH_32_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, ((elem H) and not (neighbor (elem H))) and '
+ basename + '_high, 3.2; color Pink, ESP_OH_*\n')
f.write('distance ESP_SH_32_' + basename
+ '_DIST = (elem S) and ' + basename
+ '_high, ((elem H) and not (neighbor (elem H))) and '
+ basename + '_high, 3.2; color Yellow, ESP_SH_*\n'
)
f.write('distance ESP_BN_32_' + basename
+ '_DIST = (elem B) and ' + basename
+ '_high, (elem N) and ' + basename
+ '_high, 3.2; color Pink, ESP_BN_*\n')
f.write('distance ESP_OO_32_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem O) and ' + basename
+ '_high, 3.2; color Red, ESP_OO_*\n')
f.write('distance ESP_ON_32_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem N) and ' + basename
+ '_high, 3.2; color purple, ESP_ON_*\n')
f.write('distance ESP_SS_32_' + basename
+ '_DIST = (elem S) and ' + basename
+ '_high, (elem S) and ' + basename
+ '_high, 3.2; color sulfur, ESP_SS_*\n')
f.write('distance ESP_SO_32_' + basename
+ '_DIST = (elem S) and ' + basename
+ '_high, (elem O) and ' + basename
+ '_high, 3.2; color violet, ESP_SO_*\n')
f.write('distance ESP_SN_32_' + basename
+ '_DIST = (elem S) and ' + basename
+ '_high, (elem N) and ' + basename
+ '_high, 3.2; color purple, ESP_SN_*\n')
f.write('distance ESP_ClH_32_' + basename
+ '_DIST = (name Cl) and ' + basename
+ '_high, (elem H) and ' + basename
+ '_high, 3.2; color Green, ESP_ClH_*\n')
f.write('\n')
f.write('#------------------------------------------------------------------------------------------------------------------------------------------\n'
)
f.write('distance TMBond_SiO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem S) and ' + basename
+ '_high, 2.5; Bond_SiO\n')
f.write('distance TMBond_SnO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem S) and ' + basename
+ '_high, 2.5; Bond_SnO\n')
f.write('distance TMBond_ZnO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem Zn) and ' + basename
+ '_high, 2.5; Bond_ZnO\n')
f.write('distance TMBond_InO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem In) and ' + basename
+ '_high, 2.5; Bond_InO\n')
f.write('distance TMBond_GaO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem Ga) and ' + basename
+ '_high, 2.5; Bond_GaO\n')
f.write('distance TMBond_AlO_' + basename
+ '_DIST = (elem O) and ' + basename
+ '_high, (elem Al) and ' + basename
+ '_high, 2.5; Bond_AlO\n')
f.write('\n')
f.write('group ' + basename + ', *_' + basename + '_DIST\n')
if dist:
f.write('#------------------------------------------------------------------------------------------------------------------------------------------\n'
)
f.write('#Defining all distances\n')
f.write('distance ALL_MainGroup_Distance = *, *, 1.75, 1; color Black, ALL_MainGroup_Distance\n'
)
f.write('distance ALL_TMGroup_Distance = *, *, 2.6, 1; color Black, ALL_TMGroup_Distance\n'
)
f.write('distance ALL_Distance= *, neighbor, 3.0, 1; color Black, ALL_Distance\n'
)
for line in buildlines:
f.write(line)
for line in vislines:
f.write(line)
f.close()
print("Run \"pymol Visual-Script.pml\", or your pymol program name, "\
"in the created Pymol-Picture directory")
os.system('open Pymol-Picture/Visual-Script.pml')
######################################################################
### END OF SCRIPT
######################################################################
| [
"[email protected]"
] | |
1cd7f9e0f3c4df2c36183550bab4529fd3cc1c11 | 4a9035a8c74fa3d12b6717d94dbb2fa9727e4d29 | /doc/LectureNotes/_build/jupyter_execute/chapter3.py | 5623c11694a390ef49ce4501193e706e2fe268a5 | [
"CC0-1.0"
] | permissive | smithis7/Physics321 | 6f877fc89f6a99a8bea1022551e6e878e9902881 | 406297c2a87b10557075435e38e8cb7f38088229 | refs/heads/master | 2023-04-11T12:38:26.201740 | 2021-04-23T20:53:16 | 2021-04-23T20:53:16 | 331,421,603 | 0 | 0 | CC0-1.0 | 2021-04-23T20:53:16 | 2021-01-20T20:19:36 | HTML | UTF-8 | Python | false | false | 69,243 | py | # Simple Motion problems and Reminder on Newton's Laws
## Basic Steps of Scientific Investigations
An overarching aim in this course is to give you a deeper
understanding of the scientific method. The problems we study will all
involve cases where we can apply classical mechanics. In our previous
material we already assumed that we had a model for the motion of an
object. Alternatively we could have data from experiment (like Usain
Bolt's 100m world record run in 2008). Or we could have performed
ourselves an experiment and we want to understand which forces are at
play and whether these forces can be understood in terms of
fundamental forces.
Our first step consists in identifying the problem. What we sketch
here may include a mix of experiment and theoretical simulations, or
just experiment or only theory.
### Identifying our System
Here we can ask questions like
1. What kind of object is moving
2. What kind of data do we have
3. How do we measure position, velocity, acceleration etc
4. Which initial conditions influence our system
5. Other aspects which allow us to identify the system
### Defining a Model
With our eventual data and observations we would now like to develop a
model for the system. In the end we want obviously to be able to
understand which forces are at play and how they influence our
specific system. That is, can we extract some deeper insights about a
system?
We need then to
1. Find the forces that act on our system
2. Introduce models for the forces
3. Identify the equations which can govern the system (Newton's second law for example)
4. More elements we deem important for defining our model
### Solving the Equations
With the model at hand, we can then solve the equations. In classical mechanics we normally end up with solving sets of coupled ordinary differential equations or partial differential equations.
1. Using Newton's second law we have equations of the type $\boldsymbol{F}=m\boldsymbol{a}=md\boldsymbol{v}/dt$
2. We need to define the initial conditions (typically the initial velocity and position as functions of time) and/or initial conditions and boundary conditions
3. The solution of the equations give us then the position, the velocity and other time-dependent quantities which may specify the motion of a given object.
We are not yet done. With our lovely solvers, we need to start thinking.
### Analyze
Now it is time to ask the big questions. What do our results mean? Can we give a simple interpretation in terms of fundamental laws? What do our results mean? Are they correct?
Thus, typical questions we may ask are
1. Are our results for say $\boldsymbol{r}(t)$ valid? Do we trust what we did? Can you validate and verify the correctness of your results?
2. Evaluate the answers and their implications
3. Compare with experimental data if possible. Does our model make sense?
4. and obviously many other questions.
The analysis stage feeds back to the first stage. It may happen that
the data we had were not good enough, there could be large statistical
uncertainties. We may need to collect more data or perhaps we did a
sloppy job in identifying the degrees of freedom.
All these steps are essential elements in a scientific
enquiry. Hopefully, through a mix of numerical simulations, analytical
calculations and experiments we may gain a deeper insight about the
physics of a specific system.
## Newton's Laws
Let us now remind ourselves of Newton's laws, since these are the laws of motion we will study in this course.
When analyzing a physical system we normally start with distinguishing between the object we are studying (we will label this in more general terms as our **system**) and how this system interacts with the environment (which often means everything else!)
In our investigations we will thus analyze a specific physics problem in terms of the system and the environment.
In doing so we need to identify the forces that act on the system and assume that the
forces acting on the system must have a source, an identifiable cause in
the environment.
A force acting on for example a falling object must be related to an interaction with something in the environment.
This also means that we do not consider internal forces. The latter are forces between
one part of the object and another part. In this course we will mainly focus on external forces.
Forces are either contact forces or long-range forces.
Contact forces, as evident from the name, are forces that occur at the contact between
the system and the environment. Well-known long-range forces are the gravitional force and the electromagnetic force.
In order to set up the forces which act on an object, the following steps may be useful
1. Divide the problem into system and environment.
2. Draw a figure of the object and everything in contact with the object.
3. Draw a closed curve around the system.
4. Find contact points—these are the points where contact forces may act.
5. Give names and symbols to all the contact forces.
6. Identify the long-range forces.
7. Make a drawing of the object. Draw the forces as arrows, vectors, starting from where the force is acting. The direction of the vector(s) indicates the (positive) direction of the force. Try to make the length of the arrow indicate the relative magnitude of the forces.
8. Draw in the axes of the coordinate system. It is often convenient to make one axis parallel to the direction of motion. When you choose the direction of the axis you also choose the positive direction for the axis.
Newton’s second law of motion: The force $\boldsymbol{F}$ on an object of inertial mass $m$
is related to the acceleration a of the object through
$$
\boldsymbol{F} = m\boldsymbol{a},
$$
where $\boldsymbol{a}$ is the acceleration.
Newton’s laws of motion are laws of nature that have been found by experimental
investigations and have been shown to hold up to continued experimental investigations.
Newton’s laws are valid over a wide range of length- and time-scales. We
use Newton’s laws of motion to describe everything from the motion of atoms to the
motion of galaxies.
The second law is a vector equation with the acceleration having the same
direction as the force. The acceleration is proportional to the force via the mass $m$ of the system under study.
Newton’s second law introduces a new property of an object, the so-called
inertial mass $m$. We determine the inertial mass of an object by measuring the
acceleration for a given applied force.
### Then the First Law
What happens if the net external force on a body is zero? Applying Newton’s second
law, we find:
$$
\boldsymbol{F} = 0 = m\boldsymbol{a},
$$
which gives using the definition of the acceleration
$$
\boldsymbol{a} = \frac{d\boldsymbol{v}}{dt}=0.
$$
The acceleration is zero, which means that the velocity of the object is constant. This
is often referred to as Newton’s first law. An object in a state of uniform motion tends to remain in
that state unless an external force changes its state of motion.
Why do we need a separate law for this? Is it not simply a special case of Newton’s
second law? Yes, Newton’s first law can be deduced from the second law as we have
illustrated. However, the first law is often used for a different purpose: Newton’s
First Law tells us about the limit of applicability of Newton’s Second law. Newton’s
Second law can only be used in reference systems where the First law is obeyed. But
is not the First law always valid? No! The First law is only valid in reference systems
that are not accelerated. If you observe the motion of a ball from an accelerating
car, the ball will appear to accelerate even if there are no forces acting on it. We call
systems that are not accelerating inertial systems, and Newton’s first law is often
called the law of inertia. Newton’s first and second laws of motion are only valid in
inertial systems.
A system is an inertial system if it is not accelerated. It means that the reference system
must not be accelerating linearly or rotating. Unfortunately, this means that most
systems we know are not really inertial systems. For example, the surface of the
Earth is clearly not an inertial system, because the Earth is rotating. The Earth is also
not an inertial system, because it ismoving in a curved path around the Sun. However,
even if the surface of the Earth is not strictly an inertial system, it may be considered
to be approximately an inertial system for many laboratory-size experiments.
### And finally the Third Law
If there is a force from object A on object B, there is also a force from object B on object A.
This fundamental principle of interactions is called Newton’s third law. We do not
know of any force that do not obey this law: All forces appear in pairs. Newton’s
third law is usually formulated as: For every action there is an equal and opposite
reaction.
## Falling baseball in one dimension
We anticipate the mathematical model to come and assume that we have a
model for the motion of a falling baseball without air resistance.
Our system (the baseball) is at an initial height $y_0$ (which we will
specify in the program below) at the initial time $t_0=0$. In our program example here we will plot the position in steps of $\Delta t$ up to a final time $t_f$.
The mathematical formula for the position $y(t)$ as function of time $t$ is
$$
y(t) = y_0-\frac{1}{2}gt^2,
$$
where $g=9.80665=0.980655\times 10^1$m/s$^2$ is a constant representing the standard acceleration due to gravity.
We have here adopted the conventional standard value. This does not take into account other effects, such as buoyancy or drag.
Furthermore, we stop when the ball hits the ground, which takes place at
$$
y(t) = 0= y_0-\frac{1}{2}gt^2,
$$
which gives us a final time $t_f=\sqrt{2y_0/g}$.
As of now we simply assume that we know the formula for the falling object. Afterwards, we will derive it.
We start with preparing folders for storing our calculations, figures and if needed, specific data files we use as input or output files.
%matplotlib inline
# Common imports
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
# Where to save the figures and data files
PROJECT_ROOT_DIR = "Results"
FIGURE_ID = "Results/FigureFiles"
DATA_ID = "DataFiles/"
if not os.path.exists(PROJECT_ROOT_DIR):
os.mkdir(PROJECT_ROOT_DIR)
if not os.path.exists(FIGURE_ID):
os.makedirs(FIGURE_ID)
if not os.path.exists(DATA_ID):
os.makedirs(DATA_ID)
def image_path(fig_id):
return os.path.join(FIGURE_ID, fig_id)
def data_path(dat_id):
return os.path.join(DATA_ID, dat_id)
def save_fig(fig_id):
plt.savefig(image_path(fig_id) + ".png", format='png')
#in case we have an input file we wish to read in
#infile = open(data_path("MassEval2016.dat"),'r')
You could also define a function for making our plots. You
can obviously avoid this and simply set up various **matplotlib**
commands every time you need them. You may however find it convenient
to collect all such commands in one function and simply call this
function.
from pylab import plt, mpl
plt.style.use('seaborn')
mpl.rcParams['font.family'] = 'serif'
def MakePlot(x,y, styles, labels, axlabels):
plt.figure(figsize=(10,6))
for i in range(len(x)):
plt.plot(x[i], y[i], styles[i], label = labels[i])
plt.xlabel(axlabels[0])
plt.ylabel(axlabels[1])
plt.legend(loc=0)
Thereafter we start setting up the code for the falling object.
%matplotlib inline
import matplotlib.patches as mpatches
g = 9.80655 #m/s^2
y_0 = 10.0 # initial position in meters
DeltaT = 0.1 # time step
# final time when y = 0, t = sqrt(2*10/g)
tfinal = np.sqrt(2.0*y_0/g)
#set up arrays
t = np.arange(0,tfinal,DeltaT)
y =y_0 -g*.5*t**2
# Then make a nice printout in table form using Pandas
import pandas as pd
from IPython.display import display
data = {'t[s]': t,
'y[m]': y
}
RawData = pd.DataFrame(data)
display(RawData)
plt.style.use('ggplot')
plt.figure(figsize=(8,8))
plt.scatter(t, y, color = 'b')
blue_patch = mpatches.Patch(color = 'b', label = 'Height y as function of time t')
plt.legend(handles=[blue_patch])
plt.xlabel("t[s]")
plt.ylabel("y[m]")
save_fig("FallingBaseball")
plt.show()
Here we used **pandas** (see below) to systemize the output of the position as function of time.
We define now the average velocity as
$$
\overline{v}(t) = \frac{y(t+\Delta t)-y(t)}{\Delta t}.
$$
In the code we have set the time step $\Delta t$ to a given value. We could define it in terms of the number of points $n$ as
$$
\Delta t = \frac{t_{\mathrm{final}-}t_{\mathrm{initial}}}{n+1}.
$$
Since we have discretized the variables, we introduce the counter $i$ and let $y(t)\rightarrow y(t_i)=y_i$ and $t\rightarrow t_i$
with $i=0,1,\dots, n$. This gives us the following shorthand notations that we will use for the rest of this course. We define
$$
y_i = y(t_i),\hspace{0.2cm} i=0,1,2,\dots,n.
$$
This applies to other variables which depend on say time. Examples are the velocities, accelerations, momenta etc.
Furthermore we use the shorthand
$$
y_{i\pm 1} = y(t_i\pm \Delta t),\hspace{0.12cm} i=0,1,2,\dots,n.
$$
### Compact equations
We can then rewrite in a more compact form the average velocity as
$$
\overline{v}_i = \frac{y_{i+1}-y_{i}}{\Delta t}.
$$
The velocity is defined as the change in position per unit time.
In the limit $\Delta t \rightarrow 0$ this defines the instantaneous velocity, which is nothing but the slope of the position at a time $t$.
We have thus
$$
v(t) = \frac{dy}{dt}=\lim_{\Delta t \rightarrow 0}\frac{y(t+\Delta t)-y(t)}{\Delta t}.
$$
Similarly, we can define the average acceleration as the change in velocity per unit time as
$$
\overline{a}_i = \frac{v_{i+1}-v_{i}}{\Delta t},
$$
resulting in the instantaneous acceleration
$$
a(t) = \frac{dv}{dt}=\lim_{\Delta t\rightarrow 0}\frac{v(t+\Delta t)-v(t)}{\Delta t}.
$$
**A note on notations**: When writing for example the velocity as $v(t)$ we are then referring to the continuous and instantaneous value. A subscript like
$v_i$ refers always to the discretized values.
We can rewrite the instantaneous acceleration as
$$
a(t) = \frac{dv}{dt}=\frac{d}{dt}\frac{dy}{dt}=\frac{d^2y}{dt^2}.
$$
This forms the starting point for our definition of forces later. It is a famous second-order differential equation. If the acceleration is constant we can now recover the formula for the falling ball we started with.
The acceleration can depend on the position and the velocity. To be more formal we should then write the above differential equation as
$$
\frac{d^2y}{dt^2}=a(t,y(t),\frac{dy}{dt}).
$$
With given initial conditions for $y(t_0)$ and $v(t_0)$ we can then
integrate the above equation and find the velocities and positions at
a given time $t$.
If we multiply with mass, we have one of the famous expressions for Newton's second law,
$$
F(y,v,t)=m\frac{d^2y}{dt^2}=ma(t,y(t),\frac{dy}{dt}),
$$
where $F$ is the force acting on an object with mass $m$. We see that it also has the right dimension, mass times length divided by time squared.
We will come back to this soon.
### Integrating our equations
Formally we can then, starting with the acceleration (suppose we have measured it, how could we do that?)
compute say the height of a building. To see this we perform the following integrations from an initial time $t_0$ to a given time $t$
$$
\int_{t_0}^t dt a(t) = \int_{t_0}^t dt \frac{dv}{dt} = v(t)-v(t_0),
$$
or as
$$
v(t)=v(t_0)+\int_{t_0}^t dt a(t).
$$
When we know the velocity as function of time, we can find the position as function of time starting from the defintion of velocity as the derivative with respect to time, that is we have
$$
\int_{t_0}^t dt v(t) = \int_{t_0}^t dt \frac{dy}{dt} = y(t)-y(t_0),
$$
or as
$$
y(t)=y(t_0)+\int_{t_0}^t dt v(t).
$$
These equations define what is called the integration method for
finding the position and the velocity as functions of time. There is
no loss of generality if we extend these equations to more than one
spatial dimension.
Let us compute the velocity using the constant value for the acceleration given by $-g$. We have
$$
v(t)=v(t_0)+\int_{t_0}^t dt a(t)=v(t_0)+\int_{t_0}^t dt (-g).
$$
Using our initial time as $t_0=0$s and setting the initial velocity $v(t_0)=v_0=0$m/s we get when integrating
$$
v(t)=-gt.
$$
The more general case is
$$
v(t)=v_0-g(t-t_0).
$$
We can then integrate the velocity and obtain the final formula for the position as function of time through
$$
y(t)=y(t_0)+\int_{t_0}^t dt v(t)=y_0+\int_{t_0}^t dt v(t)=y_0+\int_{t_0}^t dt (-gt),
$$
With $y_0=10$m and $t_0=0$s, we obtain the equation we started with
$$
y(t)=10-\frac{1}{2}gt^2.
$$
### Computing the averages
After this mathematical background we are now ready to compute the mean velocity using our data.
# Now we can compute the mean velocity using our data
# We define first an array Vaverage
n = np.size(t)
Vaverage = np.zeros(n)
for i in range(1,n-1):
Vaverage[i] = (y[i+1]-y[i])/DeltaT
# Now we can compute the mean accelearatio using our data
# We define first an array Aaverage
n = np.size(t)
Aaverage = np.zeros(n)
Aaverage[0] = -g
for i in range(1,n-1):
Aaverage[i] = (Vaverage[i+1]-Vaverage[i])/DeltaT
data = {'t[s]': t,
'y[m]': y,
'v[m/s]': Vaverage,
'a[m/s^2]': Aaverage
}
NewData = pd.DataFrame(data)
display(NewData[0:n-2])
Note that we don't print the last values!
## Including Air Resistance in our model
In our discussions till now of the falling baseball, we have ignored
air resistance and simply assumed that our system is only influenced
by the gravitational force. We will postpone the derivation of air
resistance till later, after our discussion of Newton's laws and
forces.
For our discussions here it suffices to state that the accelerations is now modified to
$$
\boldsymbol{a}(t) = -g +D\boldsymbol{v}(t)\vert v(t)\vert,
$$
where $\vert v(t)\vert$ is the absolute value of the velocity and $D$ is a constant which pertains to the specific object we are studying.
Since we are dealing with motion in one dimension, we can simplify the above to
$$
a(t) = -g +Dv^2(t).
$$
We can rewrite this as a differential equation
$$
a(t) = \frac{dv}{dt}=\frac{d^2y}{dt^2}= -g +Dv^2(t).
$$
Using the integral equations discussed above we can integrate twice
and obtain first the velocity as function of time and thereafter the
position as function of time.
For this particular case, we can actually obtain an analytical
solution for the velocity and for the position. Here we will first
compute the solutions analytically, thereafter we will derive Euler's
method for solving these differential equations numerically.
For simplicity let us just write $v(t)$ as $v$. We have
$$
\frac{dv}{dt}= -g +Dv^2(t).
$$
We can solve this using the technique of separation of variables. We
isolate on the left all terms that involve $v$ and on the right all
terms that involve time. We get then
$$
\frac{dv}{g -Dv^2(t) }= -dt,
$$
We scale now the equation to the left by introducing a constant
$v_T=\sqrt{g/D}$. This constant has dimension length/time. Can you
show this?
Next we integrate the left-hand side (lhs) from $v_0=0$ m/s to $v$ and
the right-hand side (rhs) from $t_0=0$ to $t$ and obtain
$$
\int_{0}^v\frac{dv}{g -Dv^2(t) }= \frac{v_T}{g}\mathrm{arctanh}(\frac{v}{v_T}) =-\int_0^tdt = -t.
$$
We can reorganize these equations as
$$
v_T\mathrm{arctanh}(\frac{v}{v_T}) =-gt,
$$
which gives us $v$ as function of time
$$
v(t)=v_T\tanh{-(\frac{gt}{v_T})}.
$$
With the velocity we can then find the height $y(t)$ by integrating yet another time, that is
$$
y(t)=y(t_0)+\int_{t_0}^t dt v(t)=\int_{0}^t dt[v_T\tanh{-(\frac{gt}{v_T})}].
$$
This integral is a little bit trickier but we can look it up in a table over
known integrals and we get
$$
y(t)=y(t_0)-\frac{v_T^2}{g}\log{[\cosh{(\frac{gt}{v_T})}]}.
$$
Alternatively we could have used the symbolic Python package **Sympy** (example will be inserted later).
In most cases however, we need to revert to numerical solutions.
## Our first attempt at solving differential equations
Here we will try the simplest possible approach to solving the second-order differential
equation
$$
a(t) =\frac{d^2y}{dt^2}= -g +Dv^2(t).
$$
We rewrite it as two coupled first-order equations (this is a standard approach)
$$
\frac{dy}{dt} = v(t),
$$
with initial condition $y(t_0)=y_0$ and
$$
a(t) =\frac{dv}{dt}= -g +Dv^2(t),
$$
with initial condition $v(t_0)=v_0$.
Many of the algorithms for solving differential equations start with simple Taylor equations.
If we now Taylor expand $y$ and $v$ around a value $t+\Delta t$ we have
$$
y(t+\Delta t) = y(t)+\Delta t \frac{dy}{dt}+\frac{\Delta t^2}{2!} \frac{d^2y}{dt^2}+O(\Delta t^3),
$$
and
$$
v(t+\Delta t) = v(t)+\Delta t \frac{dv}{dt}+\frac{\Delta t^2}{2!} \frac{d^2v}{dt^2}+O(\Delta t^3).
$$
Using the fact that $dy/dt = v$ and $dv/dt=a$ and keeping only terms up to $\Delta t$ we have
$$
y(t+\Delta t) = y(t)+\Delta t v(t)+O(\Delta t^2),
$$
and
$$
v(t+\Delta t) = v(t)+\Delta t a(t)+O(\Delta t^2).
$$
### Discretizing our equations
Using our discretized versions of the equations with for example
$y_{i}=y(t_i)$ and $y_{i\pm 1}=y(t_i+\Delta t)$, we can rewrite the
above equations as (and truncating at $\Delta t$)
$$
y_{i+1} = y_i+\Delta t v_i,
$$
and
$$
v_{i+1} = v_i+\Delta t a_i.
$$
These are the famous Euler equations (forward Euler).
To solve these equations numerically we start at a time $t_0$ and simply integrate up these equations to a final time $t_f$,
The step size $\Delta t$ is an input parameter in our code.
You can define it directly in the code below as
DeltaT = 0.1
With a given final time **tfinal** we can then find the number of integration points via the **ceil** function included in the **math** package of Python
as
#define final time, assuming that initial time is zero
from math import ceil
tfinal = 0.5
n = ceil(tfinal/DeltaT)
print(n)
The **ceil** function returns the smallest integer not less than the input in say
x = 21.15
print(ceil(x))
which in the case here is 22.
x = 21.75
print(ceil(x))
which also yields 22. The **floor** function in the **math** package
is used to return the closest integer value which is less than or equal to the specified expression or value.
Compare the previous result to the usage of **floor**
from math import floor
x = 21.75
print(floor(x))
Alternatively, we can define ourselves the number of integration(mesh) points. In this case we could have
n = 10
tinitial = 0.0
tfinal = 0.5
DeltaT = (tfinal-tinitial)/(n)
print(DeltaT)
Since we will set up one-dimensional arrays that contain the values of
various variables like time, position, velocity, acceleration etc, we
need to know the value of $n$, the number of data points (or
integration or mesh points). With $n$ we can initialize a given array
by setting all elelements to zero, as done here
# define array a
a = np.zeros(n)
print(a)
In the code here we implement this simple Eurler scheme choosing a value for $D=0.0245$ m/s.
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
import os
# Where to save the figures and data files
PROJECT_ROOT_DIR = "Results"
FIGURE_ID = "Results/FigureFiles"
DATA_ID = "DataFiles/"
if not os.path.exists(PROJECT_ROOT_DIR):
os.mkdir(PROJECT_ROOT_DIR)
if not os.path.exists(FIGURE_ID):
os.makedirs(FIGURE_ID)
if not os.path.exists(DATA_ID):
os.makedirs(DATA_ID)
def image_path(fig_id):
return os.path.join(FIGURE_ID, fig_id)
def data_path(dat_id):
return os.path.join(DATA_ID, dat_id)
def save_fig(fig_id):
plt.savefig(image_path(fig_id) + ".png", format='png')
g = 9.80655 #m/s^2
D = 0.00245 #m/s
DeltaT = 0.1
#set up arrays
tfinal = 0.5
n = ceil(tfinal/DeltaT)
# define scaling constant vT
vT = sqrt(g/D)
# set up arrays for t, a, v, and y and we can compare our results with analytical ones
t = np.zeros(n)
a = np.zeros(n)
v = np.zeros(n)
y = np.zeros(n)
yanalytic = np.zeros(n)
# Initial conditions
v[0] = 0.0 #m/s
y[0] = 10.0 #m
yanalytic[0] = y[0]
# Start integrating using Euler's method
for i in range(n-1):
# expression for acceleration
a[i] = -g + D*v[i]*v[i]
# update velocity and position
y[i+1] = y[i] + DeltaT*v[i]
v[i+1] = v[i] + DeltaT*a[i]
# update time to next time step and compute analytical answer
t[i+1] = t[i] + DeltaT
yanalytic[i+1] = y[0]-(vT*vT/g)*log(cosh(g*t[i+1]/vT))
if ( y[i+1] < 0.0):
break
a[n-1] = -g + D*v[n-1]*v[n-1]
data = {'t[s]': t,
'y[m]': y-yanalytic,
'v[m/s]': v,
'a[m/s^2]': a
}
NewData = pd.DataFrame(data)
display(NewData)
#finally we plot the data
fig, axs = plt.subplots(3, 1)
axs[0].plot(t, y, t, yanalytic)
axs[0].set_xlim(0, tfinal)
axs[0].set_ylabel('y and exact')
axs[1].plot(t, v)
axs[1].set_ylabel('v[m/s]')
axs[2].plot(t, a)
axs[2].set_xlabel('time[s]')
axs[2].set_ylabel('a[m/s^2]')
fig.tight_layout()
save_fig("EulerIntegration")
plt.show()
Try different values for $\Delta t$ and study the difference between the exact solution and the numerical solution.
### Simple extension, the Euler-Cromer method
The Euler-Cromer method is a simple variant of the standard Euler
method. We use the newly updated velocity $v_{i+1}$ as an input to the
new position, that is, instead of
$$
y_{i+1} = y_i+\Delta t v_i,
$$
and
$$
v_{i+1} = v_i+\Delta t a_i,
$$
we use now the newly calculate for $v_{i+1}$ as input to $y_{i+1}$, that is
we compute first
$$
v_{i+1} = v_i+\Delta t a_i,
$$
and then
$$
y_{i+1} = y_i+\Delta t v_{i+1},
$$
Implementing the Euler-Cromer method yields a simple change to the previous code. We only need to change the following line in the loop over time
steps
for i in range(n-1):
# more codes in between here
v[i+1] = v[i] + DeltaT*a[i]
y[i+1] = y[i] + DeltaT*v[i+1]
# more code
## Air Resistance in One Dimension
Here we look at both a quadratic in velocity resistance
and linear in velocity. But first we give a qualitative argument
about the mathematical expression for the air resistance we used last
Friday.
Air resistance tends to scale as the square of the velocity. This is
in contrast to many problems chosen for textbooks, where it is linear
in the velocity. The choice of a linear dependence is motivated by
mathematical simplicity (it keeps the differential equation linear)
rather than by physics. One can see that the force should be quadratic
in velocity by considering the momentum imparted on the air
molecules. If an object sweeps through a volume $dV$ of air in time
$dt$, the momentum imparted on the air is
<!-- Equation labels as ordinary links -->
<div id="_auto1"></div>
$$
\begin{equation}
dP=\rho_m dV v,
\label{_auto1} \tag{1}
\end{equation}
$$
where $v$ is the velocity of the object and $\rho_m$ is the mass
density of the air. If the molecules bounce back as opposed to stop
you would double the size of the term. The opposite value of the
momentum is imparted onto the object itself. Geometrically, the
differential volume is
<!-- Equation labels as ordinary links -->
<div id="_auto2"></div>
$$
\begin{equation}
dV=Avdt,
\label{_auto2} \tag{2}
\end{equation}
$$
where $A$ is the cross-sectional area and $vdt$ is the distance the
object moved in time $dt$.
Plugging this into the expression above,
<!-- Equation labels as ordinary links -->
<div id="_auto3"></div>
$$
\begin{equation}
\frac{dP}{dt}=-\rho_m A v^2.
\label{_auto3} \tag{3}
\end{equation}
$$
This is the force felt by the particle, and is opposite to its
direction of motion. Now, because air doesn't stop when it hits an
object, but flows around the best it can, the actual force is reduced
by a dimensionless factor $c_W$, called the drag coefficient.
<!-- Equation labels as ordinary links -->
<div id="_auto4"></div>
$$
\begin{equation}
F_{\rm drag}=-c_W\rho_m Av^2,
\label{_auto4} \tag{4}
\end{equation}
$$
and the acceleration is
$$
\begin{eqnarray}
\frac{dv}{dt}=-\frac{c_W\rho_mA}{m}v^2.
\end{eqnarray}
$$
For a particle with initial velocity $v_0$, one can separate the $dt$
to one side of the equation, and move everything with $v$s to the
other side. We did this in our discussion of simple motion and will not repeat it here.
On more general terms,
for many systems, e.g. an automobile, there are multiple sources of
resistance. In addition to wind resistance, where the force is
proportional to $v^2$, there are dissipative effects of the tires on
the pavement, and in the axel and drive train. These other forces can
have components that scale proportional to $v$, and components that
are independent of $v$. Those independent of $v$, e.g. the usual
$f=\mu_K N$ frictional force you consider in your first Physics courses, only set in
once the object is actually moving. As speeds become higher, the $v^2$
components begin to dominate relative to the others. For automobiles
at freeway speeds, the $v^2$ terms are largely responsible for the
loss of efficiency. To travel a distance $L$ at fixed speed $v$, the
energy/work required to overcome the dissipative forces are $fL$,
which for a force of the form $f=\alpha v^n$ becomes
$$
\begin{eqnarray}
W=\int dx~f=\alpha v^n L.
\end{eqnarray}
$$
For $n=0$ the work is
independent of speed, but for the wind resistance, where $n=2$,
slowing down is essential if one wishes to reduce fuel consumption. It
is also important to consider that engines are designed to be most
efficient at a chosen range of power output. Thus, some cars will get
better mileage at higher speeds (They perform better at 50 mph than at
5 mph) despite the considerations mentioned above.
As an example of Newton's Laws we consider projectile motion (or a
falling raindrop or a ball we throw up in the air) with a drag force. Even though air resistance is
largely proportional to the square of the velocity, we will consider
the drag force to be linear to the velocity, $\boldsymbol{F}=-m\gamma\boldsymbol{v}$,
for the purposes of this exercise.
Such a dependence can be extracted from experimental data for objects moving at low velocities, see for example Malthe-Sørenssen chapter 5.6.
We will here focus on a two-dimensional problem.
The acceleration for a projectile moving upwards,
$\boldsymbol{a}=\boldsymbol{F}/m$, becomes
$$
\begin{eqnarray}
\frac{dv_x}{dt}=-\gamma v_x,\\
\nonumber
\frac{dv_y}{dt}=-\gamma v_y-g,
\end{eqnarray}
$$
and $\gamma$ has dimensions of inverse time.
If you on the other hand have a falling raindrop, how do these equations change? See for example Figure 2.1 in Taylor.
Let us stay with a ball which is thrown up in the air at $t=0$.
## Ways of solving these equations
We will go over two different ways to solve this equation. The first
by direct integration, and the second as a differential equation. To
do this by direct integration, one simply multiplies both sides of the
equations above by $dt$, then divide by the appropriate factors so
that the $v$s are all on one side of the equation and the $dt$ is on
the other. For the $x$ motion one finds an easily integrable equation,
$$
\begin{eqnarray}
\frac{dv_x}{v_x}&=&-\gamma dt,\\
\nonumber
\int_{v_{0x}}^{v_{x}}\frac{dv_x}{v_x}&=&-\gamma\int_0^{t}dt,\\
\nonumber
\ln\left(\frac{v_{x}}{v_{0x}}\right)&=&-\gamma t,\\
\nonumber
v_{x}(t)&=&v_{0x}e^{-\gamma t}.
\end{eqnarray}
$$
This is very much the result you would have written down
by inspection. For the $y$-component of the velocity,
$$
\begin{eqnarray}
\frac{dv_y}{v_y+g/\gamma}&=&-\gamma dt\\
\nonumber
\ln\left(\frac{v_{y}+g/\gamma}{v_{0y}-g/\gamma}\right)&=&-\gamma t_f,\\
\nonumber
v_{fy}&=&-\frac{g}{\gamma}+\left(v_{0y}+\frac{g}{\gamma}\right)e^{-\gamma t}.
\end{eqnarray}
$$
Whereas $v_x$ starts at some value and decays
exponentially to zero, $v_y$ decays exponentially to the terminal
velocity, $v_t=-g/\gamma$.
Although this direct integration is simpler than the method we invoke
below, the method below will come in useful for some slightly more
difficult differential equations in the future. The differential
equation for $v_x$ is straight-forward to solve. Because it is first
order there is one arbitrary constant, $A$, and by inspection the
solution is
<!-- Equation labels as ordinary links -->
<div id="_auto5"></div>
$$
\begin{equation}
v_x=Ae^{-\gamma t}.
\label{_auto5} \tag{5}
\end{equation}
$$
The arbitrary constants for equations of motion are usually determined
by the initial conditions, or more generally boundary conditions. By
inspection $A=v_{0x}$, the initial $x$ component of the velocity.
## Differential Equations, contn
The differential equation for $v_y$ is a bit more complicated due to
the presence of $g$. Differential equations where all the terms are
linearly proportional to a function, in this case $v_y$, or to
derivatives of the function, e.g., $v_y$, $dv_y/dt$,
$d^2v_y/dt^2\cdots$, are called linear differential equations. If
there are terms proportional to $v^2$, as would happen if the drag
force were proportional to the square of the velocity, the
differential equation is not longer linear. Because this expression
has only one derivative in $v$ it is a first-order linear differential
equation. If a term were added proportional to $d^2v/dt^2$ it would be
a second-order differential equation. In this case we have a term
completely independent of $v$, the gravitational acceleration $g$, and
the usual strategy is to first rewrite the equation with all the
linear terms on one side of the equal sign,
<!-- Equation labels as ordinary links -->
<div id="_auto6"></div>
$$
\begin{equation}
\frac{dv_y}{dt}+\gamma v_y=-g.
\label{_auto6} \tag{6}
\end{equation}
$$
Now, the solution to the equation can be broken into two
parts. Because this is a first-order differential equation we know
that there will be one arbitrary constant. Physically, the arbitrary
constant will be determined by setting the initial velocity, though it
could be determined by setting the velocity at any given time. Like
most differential equations, solutions are not "solved". Instead,
one guesses at a form, then shows the guess is correct. For these
types of equations, one first tries to find a single solution,
i.e. one with no arbitrary constants. This is called the {\it
particular} solution, $y_p(t)$, though it should really be called
"a" particular solution because there are an infinite number of such
solutions. One then finds a solution to the {\it homogenous} equation,
which is the equation with zero on the right-hand side,
<!-- Equation labels as ordinary links -->
<div id="_auto7"></div>
$$
\begin{equation}
\frac{dv_{y,h}}{dt}+\gamma v_{y,h}=0.
\label{_auto7} \tag{7}
\end{equation}
$$
Homogenous solutions will have arbitrary constants.
The particular solution will solve the same equation as the original
general equation
<!-- Equation labels as ordinary links -->
<div id="_auto8"></div>
$$
\begin{equation}
\frac{dv_{y,p}}{dt}+\gamma v_{y,p}=-g.
\label{_auto8} \tag{8}
\end{equation}
$$
However, we don't need find one with arbitrary constants. Hence, it is
called a **particular** solution.
The sum of the two,
<!-- Equation labels as ordinary links -->
<div id="_auto9"></div>
$$
\begin{equation}
v_y=v_{y,p}+v_{y,h},
\label{_auto9} \tag{9}
\end{equation}
$$
is a solution of the total equation because of the linear nature of
the differential equation. One has now found a *general* solution
encompassing all solutions, because it both satisfies the general
equation (like the particular solution), and has an arbitrary constant
that can be adjusted to fit any initial condition (like the homogeneous
solution). If the equations were not linear, that is if there were terms
such as $v_y^2$ or $v_y\dot{v}_y$, this technique would not work.
Returning to the example above, the homogenous solution is the same as
that for $v_x$, because there was no gravitational acceleration in
that case,
<!-- Equation labels as ordinary links -->
<div id="_auto10"></div>
$$
\begin{equation}
v_{y,h}=Be^{-\gamma t}.
\label{_auto10} \tag{10}
\end{equation}
$$
In this case a particular solution is one with constant velocity,
<!-- Equation labels as ordinary links -->
<div id="_auto11"></div>
$$
\begin{equation}
v_{y,p}=-g/\gamma.
\label{_auto11} \tag{11}
\end{equation}
$$
Note that this is the terminal velocity of a particle falling from a
great height. The general solution is thus,
<!-- Equation labels as ordinary links -->
<div id="_auto12"></div>
$$
\begin{equation}
v_y=Be^{-\gamma t}-g/\gamma,
\label{_auto12} \tag{12}
\end{equation}
$$
and one can find $B$ from the initial velocity,
<!-- Equation labels as ordinary links -->
<div id="_auto13"></div>
$$
\begin{equation}
v_{0y}=B-g/\gamma,~~~B=v_{0y}+g/\gamma.
\label{_auto13} \tag{13}
\end{equation}
$$
Plugging in the expression for $B$ gives the $y$ motion given the initial velocity,
<!-- Equation labels as ordinary links -->
<div id="_auto14"></div>
$$
\begin{equation}
v_y=(v_{0y}+g/\gamma)e^{-\gamma t}-g/\gamma.
\label{_auto14} \tag{14}
\end{equation}
$$
It is easy to see that this solution has $v_y=v_{0y}$ when $t=0$ and
$v_y=-g/\gamma$ when $t\rightarrow\infty$.
One can also integrate the two equations to find the coordinates $x$
and $y$ as functions of $t$,
$$
\begin{eqnarray}
x&=&\int_0^t dt'~v_{0x}(t')=\frac{v_{0x}}{\gamma}\left(1-e^{-\gamma t}\right),\\
\nonumber
y&=&\int_0^t dt'~v_{0y}(t')=-\frac{gt}{\gamma}+\frac{v_{0y}+g/\gamma}{\gamma}\left(1-e^{-\gamma t}\right).
\end{eqnarray}
$$
If the question was to find the position at a time $t$, we would be
finished. However, the more common goal in a projectile equation
problem is to find the range, i.e. the distance $x$ at which $y$
returns to zero. For the case without a drag force this was much
simpler. The solution for the $y$ coordinate would have been
$y=v_{0y}t-gt^2/2$. One would solve for $t$ to make $y=0$, which would
be $t=2v_{0y}/g$, then plug that value for $t$ into $x=v_{0x}t$ to
find $x=2v_{0x}v_{0y}/g=v_0\sin(2\theta_0)/g$. One follows the same
steps here, except that the expression for $y(t)$ is more
complicated. Searching for the time where $y=0$, and we get
<!-- Equation labels as ordinary links -->
<div id="_auto15"></div>
$$
\begin{equation}
0=-\frac{gt}{\gamma}+\frac{v_{0y}+g/\gamma}{\gamma}\left(1-e^{-\gamma t}\right).
\label{_auto15} \tag{15}
\end{equation}
$$
This cannot be inverted into a simple expression $t=\cdots$. Such
expressions are known as "transcendental equations", and are not the
rare instance, but are the norm. In the days before computers, one
might plot the right-hand side of the above graphically as
a function of time, then find the point where it crosses zero.
Now, the most common way to solve for an equation of the above type
would be to apply Newton's method numerically. This involves the
following algorithm for finding solutions of some equation $F(t)=0$.
1. First guess a value for the time, $t_{\rm guess}$.
2. Calculate $F$ and its derivative, $F(t_{\rm guess})$ and $F'(t_{\rm guess})$.
3. Unless you guessed perfectly, $F\ne 0$, and assuming that $\Delta F\approx F'\Delta t$, one would choose
4. $\Delta t=-F(t_{\rm guess})/F'(t_{\rm guess})$.
5. Now repeat step 1, but with $t_{\rm guess}\rightarrow t_{\rm guess}+\Delta t$.
If the $F(t)$ were perfectly linear in $t$, one would find $t$ in one
step. Instead, one typically finds a value of $t$ that is closer to
the final answer than $t_{\rm guess}$. One breaks the loop once one
finds $F$ within some acceptable tolerance of zero. A program to do
this will be added shortly.
## Motion in a Magnetic Field
Another example of a velocity-dependent force is magnetism,
$$
\begin{eqnarray}
\boldsymbol{F}&=&q\boldsymbol{v}\times\boldsymbol{B},\\
\nonumber
F_i&=&q\sum_{jk}\epsilon_{ijk}v_jB_k.
\end{eqnarray}
$$
For a uniform field in the $z$ direction $\boldsymbol{B}=B\hat{z}$, the force can only have $x$ and $y$ components,
$$
\begin{eqnarray}
F_x&=&qBv_y\\
\nonumber
F_y&=&-qBv_x.
\end{eqnarray}
$$
The differential equations are
$$
\begin{eqnarray}
\dot{v}_x&=&\omega_c v_y,\omega_c= qB/m\\
\nonumber
\dot{v}_y&=&-\omega_c v_x.
\end{eqnarray}
$$
One can solve the equations by taking time derivatives of either equation, then substituting into the other equation,
$$
\begin{eqnarray}
\ddot{v}_x=\omega_c\dot{v_y}=-\omega_c^2v_x,\\
\nonumber
\ddot{v}_y&=&-\omega_c\dot{v}_x=-\omega_cv_y.
\end{eqnarray}
$$
The solution to these equations can be seen by inspection,
$$
\begin{eqnarray}
v_x&=&A\sin(\omega_ct+\phi),\\
\nonumber
v_y&=&A\cos(\omega_ct+\phi).
\end{eqnarray}
$$
One can integrate the equations to find the positions as a function of time,
$$
\begin{eqnarray}
x-x_0&=&\int_{x_0}^x dx=\int_0^t dt v(t)\\
\nonumber
&=&\frac{-A}{\omega_c}\cos(\omega_ct+\phi),\\
\nonumber
y-y_0&=&\frac{A}{\omega_c}\sin(\omega_ct+\phi).
\end{eqnarray}
$$
The trajectory is a circle centered at $x_0,y_0$ with amplitude $A$ rotating in the clockwise direction.
The equations of motion for the $z$ motion are
<!-- Equation labels as ordinary links -->
<div id="_auto16"></div>
$$
\begin{equation}
\dot{v_z}=0,
\label{_auto16} \tag{16}
\end{equation}
$$
which leads to
<!-- Equation labels as ordinary links -->
<div id="_auto17"></div>
$$
\begin{equation}
z-z_0=V_zt.
\label{_auto17} \tag{17}
\end{equation}
$$
Added onto the circle, the motion is helical.
Note that the kinetic energy,
<!-- Equation labels as ordinary links -->
<div id="_auto18"></div>
$$
\begin{equation}
T=\frac{1}{2}m(v_x^2+v_y^2+v_z^2)=\frac{1}{2}m(\omega_c^2A^2+V_z^2),
\label{_auto18} \tag{18}
\end{equation}
$$
is constant. This is because the force is perpendicular to the
velocity, so that in any differential time element $dt$ the work done
on the particle $\boldsymbol{F}\cdot{dr}=dt\boldsymbol{F}\cdot{v}=0$.
One should think about the implications of a velocity dependent
force. Suppose one had a constant magnetic field in deep space. If a
particle came through with velocity $v_0$, it would undergo cyclotron
motion with radius $R=v_0/\omega_c$. However, if it were still its
motion would remain fixed. Now, suppose an observer looked at the
particle in one reference frame where the particle was moving, then
changed their velocity so that the particle's velocity appeared to be
zero. The motion would change from circular to fixed. Is this
possible?
The solution to the puzzle above relies on understanding
relativity. Imagine that the first observer believes $\boldsymbol{B}\ne 0$ and
that the electric field $\boldsymbol{E}=0$. If the observer then changes
reference frames by accelerating to a velocity $\boldsymbol{v}$, in the new
frame $\boldsymbol{B}$ and $\boldsymbol{E}$ both change. If the observer moved to the
frame where the charge, originally moving with a small velocity $v$,
is now at rest, the new electric field is indeed $\boldsymbol{v}\times\boldsymbol{B}$,
which then leads to the same acceleration as one had before. If the
velocity is not small compared to the speed of light, additional
$\gamma$ factors come into play,
$\gamma=1/\sqrt{1-(v/c)^2}$. Relativistic motion will not be
considered in this course.
## Summarizing the various motion problems
The examples we have discussed above were included in order to
illustrate various methods (which depend on the specific problem) to
find the solutions of the equations of motion.
We have solved the equations of motion in the following ways:
**Solve the differential equations analytically.**
We did this for example with the following object in one or two dimensions or the sliding block.
Here we had for example an equation set like
$$
\frac{dv_x}{dt}=-\gamma v_x,
$$
and
$$
\frac{dv_y}{dt}=-\gamma v_y-g,
$$
and $\gamma$ has dimension of inverse time.
We could also in case we can separate the degrees of freedom integrate. Take for example one of the equations in the previous slide
$$
\frac{dv_x}{dt}=-\gamma v_x,
$$
which we can rewrite in terms of a left-hand side which depends only on the velocity and a right-hand side which depends only on time
$$
\frac{dv_x}{v_x}=-\gamma dt.
$$
Integrating we have (since we can separate $v_x$ and $t$)
$$
\int_{v_0}^{v_t}\frac{dv_x}{v_x}=-\int_{t_0}^{t_f}\gamma dt,
$$
where $v_f$ is the velocity at a final time and $t_f$ is the final time.
In this case we found, after having integrated the above two sides that
$$
v_f(t)=v_0\exp{-\gamma t}.
$$
Finally, using for example Euler's method, we can solve the
differential equations numerically. If we can compare our numerical
solutions with analytical solutions, we have an extra check of our
numerical approaches.
## Exercises
### Electron moving into an electric field
An electron is sent through a varying electrical
field. Initially, the electron is moving in the $x$-direction with a velocity
$v_x = 100$ m/s. The electron enters the field when it passes the origin. The field
varies with time, causing an acceleration of the electron that varies in time
$$
\boldsymbol{a}(t)=\left(−20 \mathrm{m/s}^2 −10\mathrm{m/s}^3t\right) \boldsymbol{e}_y,
$$
or if we replace $\boldsymbol{e}_y$ with $\boldsymbol{e}_2$ (the unit vectors in the $y$-direction) we have
$$
\boldsymbol{a}(t)=\left(−20 \mathrm{m/s}^2 −10\mathrm{m/s}^3t\right) \boldsymbol{e}_2.
$$
Note that the velocity in the $x$-direction is a constant and is not affected by the force which acts only in the $y$-direction.
This means that we can decouple the two degrees of freedom and skip the vector symbols.
We have then a constant velocity in the $x$-direction
$$
v_x(t) = 100\mathrm{m/s},
$$
and integrating up the acceleration in the $y$-direction (and using that the initial time $t_0=0$) we get
$$
v_y(t) = -20\mathrm{m/s^2}t-5\mathrm{m/s^3}t^2.
$$
Find the position as a function of time for the electron.
We integrate again in the $x$-direction
$$
x(t) = 100\mathrm{m/s}t,
$$
and in the $y$-direction (remember that these two degrees of freedom don't depend on each other)
we get
$$
y(t) = -10\mathrm{m/s^2}t^2-\frac{5}{3}\mathrm{m/s^3}t^3.
$$
The field is only acting inside a box of length $L = 2m$.
How long time is the electron inside the field?
If we use the equation for the $x$-direction (the length of the box), we can then use the equation for $x(t) = 100\mathrm{m/s}t$
and simply set $x=2$m and we find
$$
t=\frac{1}{50}\mathrm{s}.
$$
What is the displacement in the $y$-direction when the electron leaves the box. (We call this the deflection of the electron).
Here we simply use
$$
y(t) = -10\mathrm{m/s^2}t^2-\frac{5}{3}\mathrm{m/s^3}t^3,
$$
and use $t=1/50$s and find that
$$
y = -0.004013 \mathrm{m}.
$$
Find the angle the velocity vector forms with the horizontal axis as the electron leaves the box.
Again, we use $t=1/50$s and calculate the velocities in the $x$- and the $y$-directions (the velocity in the $x$-direction is just a constant) and find the angle using
$$
\tan{\alpha} = \frac{v_y(t=1/50)}{v_x(t=1/50)},
$$
which leads to
$$
\alpha = -0.23,
$$
in degrees (not radians).
### Drag force
Using equations (2.84) and (2.82) in Taylor, we have that $f_{\mathrm{quad}}/f_{\mathrm{lin}}=(\kappa\rho Av^2)/(3\pi\eta Dv)$. With $\kappa =1/4$ and $A=\pi D^2/4$ we obtain $f_{\mathrm{quad}}/f_{\mathrm{lin}}=(\rho Dv)/(48\eta)$ or $R/48$ with $R$ given by equation (2.83) of Taylor.
With these numbers $R=1.1\times 10^{-2}$ and it is safe to neglect the quadratic drag.
### Falling object
If we insert Taylor series for $\exp{-(t/\tau)}$ into equation (2.33) of Taylor, we have
$$
v_y(t) = v_{\mathrm{ter}}\left[1-\exp{-(t/\tau)}\right] = v_{\mathrm{ter}}\left[1-(1-\frac{t}{\tau}+\frac{t^2}{2\tau^2}+\dots )\right].
$$
The first two terms on the right cancel and, if $t$ is sufficiently small, we can neglect terms with higher powers than two in $t$. This gives us
$$
v_y(t) \approx v_{\mathrm{ter}}\frac{t}{\tau}=gt,
$$
where we used that $v_{\mathrm{ter}}=g\tau$ from equation (2.34) in Taylor. This means that for small velocities it is the gravitational force which dominates.
Setting $v_y(t_0)=0$ in equation (2.35) of Taylor and using the Taylor series for the exponential we find that
$$
y(t) = v_{\mathrm{ter}}t-v_{\mathrm{ter}}\tau\left[1-\exp{-(t/\tau)}\right] = v_{\mathrm{ter}}t-v_{\mathrm{ter}}\tau\left[1-(1-\frac{t}{\tau}+\frac{t^2}{2\tau^2}+\dots )\right].
$$
On the rhs the second and third terms cancel, as do the first and fourth. If we neglect all terms beyond $t^2$, this leaves us with
$$
y(t) \approx v_{\mathrm{ter}}\frac{t^2}{2\tau}=\frac{1}{2}gt^2.
$$
Again, for small times, as expected, the gravitational force plays the major role.
### Motion of a cyclist
Putting in the numbers for the characteristic time we find
$$
\tau = \frac{m}{Dv_0} = \frac{80}{0.20\times 20}=20\mathrm{s}.
$$
From an initial velocity of 20m/s we will slow down to half the initial speed, 10m/s in 20s. From Taylor equation (2.45) we have then that the time to slow down to any speed $v$ is
$$
t = \frac{M}{D}\left(\frac{1}{v}-\frac{1}{v_0}\right).
$$
This gives a time of 6.7s for a velocity of 15m/s, 20s for a velocity of 10m/s and 60s for a velocity of 5m/s. We see that this approximation leads to an infinite time before we come to rest. To ignore ordinary friction at low speeds is indeed a bad approximation.
### Falling ball and preparing for the numerical exercise
In this example we study the motion of an object subject to a constant force, a velocity dependent
force, and for the numerical part a position-dependent force.
Without the position dependent force, we can solve the problem analytically. This is what we will do in this exercise.
The position dependent force requires numerical efforts (exercise 7).
In addition to the falling ball case, we will include the effect of the ball bouncing back from the floor in exercises 7.
Here we limit ourselves to a ball that is thrown from a height $h$
above the ground with an initial velocity
$\boldsymbol{v}_0$ at time $t=t_0$.
We assume we have only a gravitational force and a force due to the air resistance.
The position of the ball as function of time is $\boldsymbol{r}(t)$ where $t$ is time.
The position is measured with respect to a coordinate system with origin at the floor.
We assume we have an initial position $\boldsymbol{r}(t_0)=h\boldsymbol{e}_y$ and an initial velocity $\boldsymbol{v}_0=v_{x,0}\boldsymbol{e}_x+v_{y,0}\boldsymbol{e}_y$.
In this exercise we assume the system is influenced by the gravitational force
$$
\boldsymbol{G}=-mg\boldsymbol{e}_y
$$
and an air resistance given by a square law
$$
-Dv\boldsymbol{v}.
$$
The analytical expressions for velocity and position as functions of
time will be used to compare with the numerical results in exercise 6.
Identify the forces acting on the ball and set up a diagram with the forces acting on the ball. Find the acceleration of the falling ball.
The forces acting on the ball are the gravitational force $\boldsymbol{G}=-mg\boldsymbol{e}_y$ and the air resistance $\boldsymbol{F}_D=-D\boldsymbol{v}v$ with $v$ the absolute value of the velocity. The accelaration in the $x$-direction is
$$
a_x = -\frac{Dv_x\vert v\vert}{m},
$$
and in the $y$-direction
$$
a_y = -g-\frac{Dv_y\vert v\vert}{m},
$$
where $\vert v\vert=\sqrt{v_x^2+v_y^2}$. Note that due to the dependence on $v_x$ and $v_y$ in each equation, it means we may not be able find an analytical solution. In this case we cannot.
In order to compare our code with analytical results, we will thus study the problem only in the $y$-direction.
In the general code below we would write this as (pseudocode style)
ax = -D*vx[i]*abs(v[i])/m
ay = -g - D*vy[i]*abs(v[i])/m
Integrate the acceleration from an initial time $t_0$ to a final time $t$ and find the velocity.
We reduce our problem to a one-dimensional in the $y$-direction only since for the two-dimensional motion we cannot find an analtical solution. For one dimension however, we have an analytical solution.
We specialize our equations for the $y$-direction only
$$
\frac{dv_y}{dt}= -g +Dv_y^2(t).
$$
We can solve this using the technique of separation of variables. We
isolate on the left all terms that involve $v$ and on the right all
terms that involve time. We get then
$$
\frac{dv_y}{g -Dv_y^2(t) }= -dt,
$$
We scale now the equation to the left by introducing a constant
$v_T=\sqrt{g/D}$. This constant has dimension length/time.
Next we integrate the left-hand side (lhs) from $v_{y0}=0$ m/s to $v$ and
the right-hand side (rhs) from $t_0=0$ to $t$ and obtain
$$
\int_{0}^{v_y}\frac{dv_y}{g -Dv_y^2(t) }= \frac{v_T}{g}\mathrm{arctanh}(\frac{v_y}{v_T}) =-\int_0^tdt = -t.
$$
We can reorganize these equations as
$$
v_T\mathrm{arctanh}(\frac{v_y}{v_T}) =-gt,
$$
which gives us $v_y$ as function of time
$$
v_y(t)=v_T\tanh{-(\frac{gt}{v_T})}.
$$
With a finite initial velocity we need simply to add $v_{y0}$.
Find thereafter the position as function of time starting with an initial time $t_0$. Find the time it takes to hit the floor. Here you will find it convenient to set the initial velocity in the $y$-direction to zero.
With the velocity we can then find the height $y(t)$ by integrating yet another time, that is
$$
y(t)=y(t_0)+\int_{t_0}^t dt v_y(t)=\int_{0}^t dt[v_T\tanh{-(\frac{gt}{v_T})}].
$$
This integral is a little bit trickier but we can look it up in a table over
known integrals and we get
$$
y(t)=y(t_0)-\frac{v_T^2}{g}\log{[\cosh{(\frac{gt}{v_T})}]}.
$$
Here we have assumed that we set the initial velocity in the $y$-direction to zero, that is $v_y(t_0)=0$m/s. Adding a non-zero velocity gives us an additional term of $v_{y0}t$.
Using a zero initial velocity and setting
$$
y(t)=0=y(t_0)-\frac{v_T^2}{g}\log{[\cosh{(-\frac{gt}{v_T})}]}=y(t_0)-\frac{v_T^2}{g}\log{[\cosh{(\frac{gt}{v_T})}]},
$$
(note that $\cosh$ yields the same values for negative and positive arguments)
allows us to find the final time by solving
$$
y(t_0)=\frac{v_T^2}{g}\log{[\cosh{(\frac{gt}{v_T})}]},
$$
which gives
$$
t = \frac{v_T}{g}\mathrm{arccosh}(\exp{(gy_0/v_T^2)}).
$$
In the code below we would code these analytical expressions (with zero initial velocity in the $y$-direction) as
yanalytic[i+1] = y[0]-(vT*vT/g)*log(cosh(g*t[i+1]/vT))+vy[0]*t[i+1]
We will use the above analytical results in our numerical calculations in the next exercise
### Numerical elements, solving the previous exercise numerically and adding the bouncing from the floor
Here we will:
1. Learn and utilize Euler's Method to find the position and the velocity
2. Compare analytical and computational solutions
3. Add additional forces to our model
# let's start by importing useful packages we are familiar with
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
We will choose the following values
1. mass $m=0,2$ kg
2. accelleration (gravity) $g=9.81$ m/s$^{2}$.
3. initial position is the height $h=2$ m
4. initial velocities $v_{x,0}=v_{y,0}=10$ m/s
Can you find a reasonable value for the drag coefficient $D$?
You need also to define an initial time and
the step size $\Delta t$. We can define the step size $\Delta t$ as the difference between any
two neighboring values in time (time steps) that we analyze within
some range. It can be determined by dividing the interval we are
analyzing, which in our case is time $t_{\mathrm{final}}-t_0$, by the number of steps we
are taking $(N)$. This gives us a step size $\Delta t = \dfrac{t_{\mathrm{final}}-t_0}{N}$.
With these preliminaries we are now ready to plot our results from exercise 5.
Set up arrays for time, velocity, acceleration and positions for the results from exercise 5. Define an initial and final time. Choose the final time to be the time when the ball hits the ground for the first time. Make a plot of the position and velocity as functions of time. Here you could set the initial velocity in the $y$-direction to zero and use the result from exercise 5. Else you need to try different initial times using the result from exercise 5 as a starting guess. It is not critical if you don't reach the ground when the initial velocity in the $y$-direction is not zero.
We move now to the numerical solution of the differential equations as discussed in the [lecture notes](https://mhjensen.github.io/Physics321/doc/pub/motion/html/motion.html) or Malthe-Sørenssen chapter 7.5.
Let us remind ourselves about Euler's Method.
Suppose we know $f(t)$ and its derivative $f'(t)$. To find $f(t+\Delta t)$ at the next step, $t+\Delta t$,
we can consider the Taylor expansion:
$f(t+\Delta t) = f(t) + \dfrac{(\Delta t)f'(t)}{1!} + \dfrac{(\Delta t)^2f''(t)}{2!} + ...$
If we ignore the $f''$ term and higher derivatives, we obtain
$f(t+\Delta t) \approx f(t) + (\Delta t)f'(t)$.
This approximation is the basis of Euler's method, and the Taylor
expansion suggests that it will have errors of $O(\Delta t^2)$. Thus, one
would expect it to work better, the smaller the step size $h$ that you
use. In our case the step size is $\Delta t$.
In setting up our code we need to
1. Define and obtain all initial values, constants, and time to be analyzed with step sizes as done above (you can use the same values)
2. Calculate the velocity using $v_{i+1} = v_{i} + (\Delta t)*a_{i}$
3. Calculate the position using $pos_{i+1} = r_{i} + (\Delta t)*v_{i}$
4. Calculate the new acceleration $a_{i+1}$.
5. Repeat steps 2-4 for all time steps within a loop.
Write a code which implements Euler's method and compute numerically and plot the position and velocity as functions of time for various values of $\Delta t$. Comment your results.
Below you will find two codes, one which uses explicit expressions for the $x$- and $y$-directions and one which rewrites the expressions as compact vectors, as done in homework 2. Running the codes shows a sensitivity to the chosen step size $\Delta t$. You will clearly notice that when comparing with the analytical results, that larger values of the step size in time result in a poorer agreement with the analytical solutions.
* Compare your numerically obtained positions and velocities with the analytical results from exercise 5. Comment again your results.
The codes follow here. Running them allows you to probe the various parameters and compare with analytical solutions as well.
The analytical results are discussed in the lecture notes, see the slides of the week of January 25-29 <https://mhjensen.github.io/Physics321/doc/pub/week4/html/week4-bs.html>.
The codes here show two different ways of solving the two-dimensional problem. The first one defines arrays for the $x$- and $y$-directions explicitely, while the second code uses a more
compact (and thus closer to the mathmeatics) notation with a full two-dimensional vector.
The initial conditions for the first example are set so that we only an object falling in the $y$-direction. Then it makes sense to compare with the analytical solution. If you change the initial conditions, this comparison is no longer correct.
# Exercise 6, hw3, brute force way with declaration of vx, vy, x and y
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
import os
# Where to save the figures and data files
PROJECT_ROOT_DIR = "Results"
FIGURE_ID = "Results/FigureFiles"
DATA_ID = "DataFiles/"
if not os.path.exists(PROJECT_ROOT_DIR):
os.mkdir(PROJECT_ROOT_DIR)
if not os.path.exists(FIGURE_ID):
os.makedirs(FIGURE_ID)
if not os.path.exists(DATA_ID):
os.makedirs(DATA_ID)
def image_path(fig_id):
return os.path.join(FIGURE_ID, fig_id)
def data_path(dat_id):
return os.path.join(DATA_ID, dat_id)
def save_fig(fig_id):
plt.savefig(image_path(fig_id) + ".png", format='png')
# Output file
outfile = open(data_path("Eulerresults.dat"),'w')
from pylab import plt, mpl
plt.style.use('seaborn')
mpl.rcParams['font.family'] = 'serif'
g = 9.80655 #m/s^2
# The mass and the drag constant D
D = 0.00245 #mass/length kg/m
m = 0.2 #kg, mass of falling object
DeltaT = 0.001
#set up arrays
tfinal = 1.4
# set up number of points for all variables
n = ceil(tfinal/DeltaT)
# define scaling constant vT used in analytical solution
vT = sqrt(m*g/D)
# set up arrays for t, a, v, and y and arrays for analytical results
#brute force setting up of arrays for x and y, vx, vy, ax and ay
t = np.zeros(n)
vy = np.zeros(n)
y = np.zeros(n)
vx = np.zeros(n)
x = np.zeros(n)
yanalytic = np.zeros(n)
# Initial conditions, note that these correspond to an object falling in the y-direction only.
vx[0] = 0.0 #m/s
vy[0] = 0.0 #m/s
y[0] = 10.0 #m
x[0] = 0.0 #m
yanalytic[0] = y[0]
# Start integrating using Euler's method
for i in range(n-1):
# expression for acceleration, note the absolute value and division by mass
ax = -D*vx[i]*sqrt(vx[i]**2+vy[i]**2)/m
ay = -g - D*vy[i]*sqrt(vx[i]**2+vy[i]**2)/m
# update velocity and position
vx[i+1] = vx[i] + DeltaT*ax
x[i+1] = x[i] + DeltaT*vx[i]
vy[i+1] = vy[i] + DeltaT*ay
y[i+1] = y[i] + DeltaT*vy[i]
# update time to next time step and compute analytical answer
t[i+1] = t[i] + DeltaT
yanalytic[i+1] = y[0]-(vT*vT/g)*log(cosh(g*t[i+1]/vT))+vy[0]*t[i+1]
if ( y[i+1] < 0.0):
break
data = {'t[s]': t,
'Relative error in y': abs((y-yanalytic)/yanalytic),
'vy[m/s]': vy,
'x[m]': x,
'vx[m/s]': vx
}
NewData = pd.DataFrame(data)
display(NewData)
# save to file
NewData.to_csv(outfile, index=False)
#then plot
fig, axs = plt.subplots(4, 1)
axs[0].plot(t, y)
axs[0].set_xlim(0, tfinal)
axs[0].set_ylabel('y')
axs[1].plot(t, vy)
axs[1].set_ylabel('vy[m/s]')
axs[1].set_xlabel('time[s]')
axs[2].plot(t, x)
axs[2].set_xlim(0, tfinal)
axs[2].set_ylabel('x')
axs[3].plot(t, vx)
axs[3].set_ylabel('vx[m/s]')
axs[3].set_xlabel('time[s]')
fig.tight_layout()
save_fig("EulerIntegration")
plt.show()
We see a good agreement with the analytical solution. This agreement
improves if we decrease $\Delta t$. Furthermore, since we put the
initial velocity and position in the $x$ direction to zero,
the motion in the $x$-direction is
zero, as expected.
# Smarter way with declaration of vx, vy, x and y
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
import os
# Where to save the figures and data files
PROJECT_ROOT_DIR = "Results"
FIGURE_ID = "Results/FigureFiles"
DATA_ID = "DataFiles/"
if not os.path.exists(PROJECT_ROOT_DIR):
os.mkdir(PROJECT_ROOT_DIR)
if not os.path.exists(FIGURE_ID):
os.makedirs(FIGURE_ID)
if not os.path.exists(DATA_ID):
os.makedirs(DATA_ID)
def image_path(fig_id):
return os.path.join(FIGURE_ID, fig_id)
def data_path(dat_id):
return os.path.join(DATA_ID, dat_id)
def save_fig(fig_id):
plt.savefig(image_path(fig_id) + ".png", format='png')
from pylab import plt, mpl
plt.style.use('seaborn')
mpl.rcParams['font.family'] = 'serif'
g = 9.80655 #m/s^2 g to 6 leading digits after decimal point
D = 0.00245 #m/s
m = 0.2 # kg
# Define Gravitational force as a vector in x and y. It is a constant
G = -m*g*np.array([0.0,1])
DeltaT = 0.01
#set up arrays
tfinal = 1.3
n = ceil(tfinal/DeltaT)
# set up arrays for t, a, v, and x
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
# Initial conditions as compact 2-dimensional arrays
r0 = np.array([0.0,10.0])
v0 = np.array([10.0,0.0])
r[0] = r0
v[0] = v0
# Start integrating using Euler's method
for i in range(n-1):
# Set up forces, air resistance FD, not now that we need the norm of the vector
# Here you could have defined your own function for this
vabs = sqrt(sum(v[i]*v[i]))
FD = -D*v[i]*vabs
# Final net forces acting on falling object
Fnet = FD+G
# The accelration at a given time t_i
a = Fnet/m
# update velocity, time and position using Euler's method
v[i+1] = v[i] + DeltaT*a
r[i+1] = r[i] + DeltaT*v[i]
t[i+1] = t[i] + DeltaT
fig, axs = plt.subplots(4, 1)
axs[0].plot(t, r[:,1])
axs[0].set_xlim(0, tfinal)
axs[0].set_ylabel('y')
axs[1].plot(t, v[:,1])
axs[1].set_ylabel('vy[m/s]')
axs[1].set_xlabel('time[s]')
axs[2].plot(t, r[:,0])
axs[2].set_xlim(0, tfinal)
axs[2].set_ylabel('x')
axs[3].plot(t, v[:,0])
axs[3].set_ylabel('vx[m/s]')
axs[3].set_xlabel('time[s]')
fig.tight_layout()
save_fig("EulerIntegration")
plt.show()
Till now we have only introduced gravity and air resistance and studied
their effects via a constant acceleration due to gravity and the force
arising from air resistance. But what happens when the ball hits the
floor? What if we would like to simulate the normal force from the floor acting on the ball?
We need then to include a force model for the normal force from
the floor on the ball. The simplest approach to such a system is to introduce a contact force
model represented by a spring model. We model the interaction between the floor
and the ball as a single spring. But the normal force is zero when
there is no contact. Here we define a simple model that allows us to include
such effects in our models.
The normal force from the floor on the ball is represented by a spring force. This
is a strong simplification of the actual deformation process occurring at the contact
between the ball and the floor due to the deformation of both the ball and the floor.
The deformed region corresponds roughly to the region of **overlap** between the
ball and the floor. The depth of this region is $\Delta y = R − y(t)$, where $R$
is the radius of the ball. This is supposed to represent the compression of the spring.
Our model for the normal force acting on the ball is then
$$
\boldsymbol{N} = −k (R − y(t)) \boldsymbol{e}_y.
$$
The normal force must act upward when $y < R$,
hence the sign must be negative.
However, we must also ensure that the normal force only acts when the ball is in
contact with the floor, otherwise the normal force is zero. The full formation of the
normal force is therefore
$$
\boldsymbol{N} = −k (R − y(t)) \boldsymbol{e}_y,
$$
when $y(t) < R$ and zero when $y(t) \le R$.
In the numerical calculations you can choose $R=0.1$ m and the spring constant $k=1000$ N/m.
* Identify the forces acting on the ball and set up a diagram with the forces acting on the ball. Find the acceleration of the falling ball now with the normal force as well.
* Choose a large enough final time so you can study the ball bouncing up and down several times. Add the normal force and compute the height of the ball as function of time with and without air resistance. Comment your results.
The following code shows how
to set up the problem with gravitation, a drag force and a normal
force from the ground. The normal force makes the ball bounce up
again.
The code here includes all forces. Commenting out the air resistance will result in a ball which bounces up and down to the same height.
Furthermore, you will note that for larger values of $\Delta t$ the results will not be physically meaningful. Can you figure out why? Try also different values for the step size in order to see whether the final results agrees with what you expect.
# Smarter way with declaration of vx, vy, x and y
# Here we have added a normal force from the ground
# Common imports
import numpy as np
import pandas as pd
from math import *
import matplotlib.pyplot as plt
import os
# Where to save the figures and data files
PROJECT_ROOT_DIR = "Results"
FIGURE_ID = "Results/FigureFiles"
DATA_ID = "DataFiles/"
if not os.path.exists(PROJECT_ROOT_DIR):
os.mkdir(PROJECT_ROOT_DIR)
if not os.path.exists(FIGURE_ID):
os.makedirs(FIGURE_ID)
if not os.path.exists(DATA_ID):
os.makedirs(DATA_ID)
def image_path(fig_id):
return os.path.join(FIGURE_ID, fig_id)
def data_path(dat_id):
return os.path.join(DATA_ID, dat_id)
def save_fig(fig_id):
plt.savefig(image_path(fig_id) + ".png", format='png')
from pylab import plt, mpl
plt.style.use('seaborn')
mpl.rcParams['font.family'] = 'serif'
# Define constants
g = 9.80655 #in m/s^2
D = 0.0245 # in mass/length, kg/m
m = 0.2 # in kg
R = 0.1 # in meters
k = 1000.0 # in mass/time^2
# Define Gravitational force as a vector in x and y, zero x component
G = -m*g*np.array([0.0,1])
DeltaT = 0.001
#set up arrays
tfinal = 15.0
n = ceil(tfinal/DeltaT)
# set up arrays for t, v, and r, the latter contain the x and y comps
t = np.zeros(n)
v = np.zeros((n,2))
r = np.zeros((n,2))
# Initial conditions
r0 = np.array([0.0,2.0])
v0 = np.array([10.0,10.0])
r[0] = r0
v[0] = v0
# Start integrating using Euler's method
for i in range(n-1):
# Set up forces, air resistance FD
if ( r[i,1] < R):
N = k*(R-r[i,1])*np.array([0,1])
else:
N = np.array([0,0])
vabs = sqrt(sum(v[i]*v[i]))
FD = -D*v[i]*vabs
Fnet = FD+G+N
a = Fnet/m
# update velocity, time and position
v[i+1] = v[i] + DeltaT*a
r[i+1] = r[i] + DeltaT*v[i]
t[i+1] = t[i] + DeltaT
fig, ax = plt.subplots()
ax.set_xlim(0, tfinal)
ax.set_ylabel('y[m]')
ax.set_xlabel('x[m]')
ax.plot(r[:,0], r[:,1])
fig.tight_layout()
save_fig("BouncingBallEuler")
plt.show() | [
"[email protected]"
] | |
aabaddb27174173d297db88b42fd6b3a544340a2 | b0b514f98fff9882ead77ebc768070c2f2e9c275 | /tests/urls.py | 032daa6ec752252f329131270a7c4e624d5c6b42 | [
"BSD-3-Clause"
] | permissive | bfirsh/pytest_django | 901aa54927210f483a4460c4537017e744827895 | 504fb57bccbdb3c06cbf126c7f34af50e686c83e | refs/heads/master | 2021-01-23T07:16:33.634613 | 2012-11-15T00:09:05 | 2012-11-15T00:09:05 | 286,373 | 7 | 2 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'admin-required/', 'tests.views.admin_required_view'),
)
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.