blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ce45e39c5d8bac1037969802999ef4d0ac487163
|
e3d969e2c9e4b57f4f7d58af5e44a00aa8fb15d3
|
/0886 Possible Bipartition.py
|
efd3c30cb3226e421b6cc27ccfa5dbea2357067a
|
[
"MIT"
] |
permissive
|
kevin-fang/leetcode
|
2744ff01e791db6f60edf946ef71451fae92ef6f
|
3958f888b30bb3e29916880ecec49b3870a0bea3
|
refs/heads/master
| 2022-12-15T07:50:01.056016 | 2020-09-10T03:47:53 | 2020-09-10T03:47:53 | 294,296,037 | 3 | 0 |
MIT
| 2020-09-10T03:47:39 | 2020-09-10T03:47:38 | null |
UTF-8
|
Python
| false | false | 951 |
py
|
class Solution:
def possibleBipartition(self, N: int, dislikes: List[List[int]]) -> bool:
neighbors = defaultdict(set)
for a,b in dislikes:
neighbors[a].add(b)
neighbors[b].add(a)
colors = defaultdict(int)
for i in range(1,N+1):
if colors[i] == 0:
colors[i] = 1
bfs = deque([i])
while bfs:
for j in range(len(bfs)):
cur = bfs.popleft()
for neighbor in neighbors[cur]:
if colors[neighbor] == colors[cur]:
return False
if colors[neighbor] == 0:
colors[neighbor] = 3-colors[cur]
bfs.append(neighbor)
return True
|
[
"[email protected]"
] | |
d5563aac9230b36f5b80103075450f10c7274578
|
2de2437bbf480f6518554bcb204106dd37262023
|
/office365/runtime/serviceOperationQuery.py
|
54dddc29343498b339aa8a62c19dcbc2079b3d14
|
[
"MIT"
] |
permissive
|
stardust85/Office365-REST-Python-Client
|
386e5bba16cdee1472b7e23d405a4bf9b6f5e73a
|
cd369c607c7d137a000734e9c5e8f03ae3e3c603
|
refs/heads/master
| 2022-09-29T19:44:02.166438 | 2020-06-03T23:12:40 | 2020-06-03T23:12:40 | 269,356,313 | 0 | 0 |
MIT
| 2020-06-04T12:41:03 | 2020-06-04T12:41:02 | null |
UTF-8
|
Python
| false | false | 974 |
py
|
from office365.runtime.client_query import ClientQuery
from office365.runtime.resource_path_service_operation import ResourcePathServiceOperation
class ServiceOperationQuery(ClientQuery):
def __init__(self, binding_type, method_name=None, method_params=None, parameter_type=None,
parameter_name=None, return_type=None):
"""
:type method_params: list or dict or None
:type method_name: str or None
"""
super(ServiceOperationQuery, self).__init__(binding_type, parameter_type, parameter_name, return_type)
self._method_name = method_name
self._method_params = method_params
self.static = False
@property
def method_url(self):
return ResourcePathServiceOperation(self.method_name, self.method_parameters).to_url()
@property
def method_name(self):
return self._method_name
@property
def method_parameters(self):
return self._method_params
|
[
"Ajilon80!"
] |
Ajilon80!
|
6ea6c324e5ab3ebb985b2079358f585f8734be93
|
76e7feaea74beb9d337885dcaa3ee59e26d9db70
|
/basics/nn2.py
|
6f9cbd8aea264996b27941a924761f4a52324c18
|
[] |
no_license
|
sayantansatpati/dlf
|
8f9bec134212a6608f2b6854c120253677c71959
|
ce8b083f31cd1b4f67ea3718cbbad5cac1eff1f4
|
refs/heads/master
| 2021-01-11T15:47:02.118653 | 2017-11-14T21:04:19 | 2017-11-14T21:04:19 | 79,931,519 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 619 |
py
|
import numpy as np
def disp(a,msg):
print '\n### {0}'.format(msg)
print 'nDim: {0}, shape: {1}'.format(np.ndim(a), a.shape)
print a[:5]
X = np.array([ [0,0,1],[0,1,1],[1,0,1],[1,1,1] ])
disp(X,"X")
y = np.array([[0,1,1,0]]).T
disp(y,"y")
syn0 = 2*np.random.random((3,4)) - 1
disp(syn0,"syn0")
syn1 = 2*np.random.random((4,1)) - 1
disp(syn1,"syn1")
for j in xrange(1000):
l1 = 1/(1+np.exp(-(np.dot(X,syn0))))
l2 = 1/(1+np.exp(-(np.dot(l1,syn1))))
l2_delta = (y - l2)*(l2*(1-l2))
l1_delta = l2_delta.dot(syn1.T) * (l1 * (1-l1))
syn1 += l1.T.dot(l2_delta)
syn0 += X.T.dot(l1_delta)
|
[
"[email protected]"
] | |
f6cb4b6acd7359ae58644de8ce42a06ca40d370e
|
ce722f35f63d7e7af3e9890cbea50b05d32c34c7
|
/crawler/dspider/spiders/hkexSituationSpider.py
|
f5c24ebe06cbdf7bfa80ffba929a9fa2839a4b2a
|
[] |
no_license
|
tfangz888/smart_deal_tool
|
bc6645047e2c3ff36af0baed62e31d1c6cec4a15
|
0f0e4edfec582e93146b30273621a28c36a5d6ca
|
refs/heads/master
| 2020-05-17T03:12:16.720526 | 2019-04-23T14:11:10 | 2019-04-23T14:11:10 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,409 |
py
|
#coding=utf-8
import json
import scrapy
from datetime import datetime, timedelta
from scrapy.spidermiddlewares.httperror import HttpError
from twisted.internet.error import DNSLookupError
from twisted.internet.error import TimeoutError
from dspider.myspider import BasicSpider
from dspider.items import HkexTradeOverviewItem, HkexTradeTopTenItem
class HkexSpider(BasicSpider):
name = 'hkexSpider'
custom_settings = {
'ITEM_PIPELINES': {
'dspider.pipelines.DspiderPipeline': 2
}
}
def start_requests(self):
matching_url = "https://sc.hkex.com.hk/TuniS/www.hkex.com.hk/chi/csm/DailyStat/data_tab_daily_{}c.js"
end_date = datetime.now().strftime('%Y.%m.%d')
start_date = self.get_nday_ago(end_date, 10, dformat = '%Y.%m.%d')
while start_date <= end_date: # 自己控制下时间范围
start_date = self.get_tomorrow_date(sdate = start_date)
url = matching_url.format(start_date.replace('.', ''))
yield scrapy.Request(url=url, callback=self.parse, errback=self.errback_httpbin, dont_filter=True)
def parse(self, response):
try:
jsonstr = response.text.split("=")[1]
data = json.loads(jsonstr)
sse_northbond = data[0]
sse_northbond_overview_item = self.parseTradeOverviewItem(sse_northbond, "sse", "north")
yield sse_northbond_overview_item
sse_northbond_top_ten_items = self.parseTradeTopTenItem(sse_northbond, "sse", "north")
for i in range(len(sse_northbond_top_ten_items)):
yield sse_northbond_top_ten_items[i]
sse_southbond = data[1]
sse_southbond_overview_item = self.parseTradeOverviewItem(sse_southbond, "sse", "south")
yield sse_southbond_overview_item
sse_southbond_top_ten_items = self.parseTradeTopTenItem(sse_southbond, "sse", "south")
for i in range(len(sse_southbond_top_ten_items)):
yield sse_southbond_top_ten_items[i]
szse_northbond = data[2]
szse_northbond_overview_item = self.parseTradeOverviewItem(szse_northbond, "szse", "north")
yield szse_northbond_overview_item
szse_northbond_top_ten_items = self.parseTradeTopTenItem(szse_northbond, "szse", "north")
for i in range(len(szse_northbond_top_ten_items)):
yield szse_northbond_top_ten_items[i]
szse_southbond = data[3]
szse_southbond_overview_item = self.parseTradeOverviewItem(szse_southbond, "szse", "south")
yield szse_southbond_overview_item
szse_southbond_top_ten_items = self.parseTradeTopTenItem(szse_southbond, "szse", "south")
for i in range(len(szse_southbond_top_ten_items)):
yield szse_southbond_top_ten_items[i]
except Exception as e:
print(e)
def parseTradeOverviewItem(self, need_parse_data, market, direction):
trade_overview_tr = need_parse_data["content"][0]["table"]["tr"]
item = HkexTradeOverviewItem()
item['market'] = market
item['direction'] = direction
item['date'] = need_parse_data["date"]
item['total_turnover'] = trade_overview_tr[0]["td"][0][0]
item['buy_turnover'] = trade_overview_tr[1]["td"][0][0]
item['sell_turnover'] = trade_overview_tr[2]["td"][0][0]
item['total_trade_count'] = trade_overview_tr[3]["td"][0][0]
item['buy_trade_count'] = trade_overview_tr[4]["td"][0][0]
item['sell_trade_count'] = trade_overview_tr[5]["td"][0][0]
if need_parse_data["market"] == "SSE Northbound" or need_parse_data["market"] == "SZSE Northbound":
#使用额度总额和和使用额度总额比例
item['dqb'] = trade_overview_tr[6]["td"][0][0]
item['dqb_ratio'] = trade_overview_tr[7]["td"][0][0]
else:
item['dqb'] = None
item['dqb_ratio'] = None
return item
def parseTradeTopTenItem(self, need_parse_data, market, direction):
items = []
trade_top_ten_tr = need_parse_data["content"][1]["table"]["tr"]
for i in range(10):
item = HkexTradeTopTenItem()
item['market'] = market
item['direction'] = direction
item['date'] = need_parse_data["date"]
item['rank'] = trade_top_ten_tr[i]["td"][0][0]
item['code'] = trade_top_ten_tr[i]["td"][0][1]
item['name'] = trade_top_ten_tr[i]["td"][0][2].strip()
item['buy_turnover'] = trade_top_ten_tr[i]["td"][0][3]
item['sell_turnover'] = trade_top_ten_tr[i]["td"][0][4]
item['total_turnover'] = trade_top_ten_tr[i]["td"][0][5]
items.append(item)
return items
def errback_httpbin(self, failure):
# log all errback failures, in case you want to do something special for some errors, you may need the failure's type
#print(repr(failure))
if failure.check(HttpError):
response = failure.value.response
#print('HttpError on %s', response.url)
elif failure.check(DNSLookupError):
request = failure.request
#print('DNSLookupError on %s', request.url)
elif failure.check(TimeoutError):
request = failure.request
#print('TimeoutError on %s', request.url)
|
[
"[email protected]"
] | |
24050d06ae4154d502447a8ccfe7d229fa83e08b
|
2286b880df34e1bfabe79b3605de287040404560
|
/04-04/todolist/mahasiswa/views.py
|
7259eaec7ed0098020a0f8469da5bc93517f17b6
|
[] |
no_license
|
iklimah27/praxis-academy-2
|
e5d8b08807980d6fd8ff6ab73caa6ea18083c7f8
|
925853b520c9a8d7a87d8980d7fedfa604d3b4c8
|
refs/heads/master
| 2022-12-25T01:54:45.572190 | 2020-10-15T07:22:06 | 2020-10-15T07:22:06 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,173 |
py
|
from django.shortcuts import render
from django.shortcuts import render, redirect
from . import models, forms
def index(req):
tasks = models.Mhs.objects.all()
return render(req, 'mahasiswa/index.html',{
'data': tasks,
})
def new(req):
form_input = forms.MhsForm()
if req.POST:
form_input = forms.MhsForm(req.POST)
if form_input.is_valid():
form_input.save()
return redirect('/mahasiswa/')
return render(req, 'mahasiswa/new.html',{
'form' : form_input,
})
def detail(req, id):
mhs = models.Mhs.objects.filter(pk=id).first()
return render(req, 'mahasiswa/detail.html', {
'data': mhs,
})
def delete(req, id):
models.Mhs.objects.filter(pk=id).delete()
return redirect('/mahasiswa/')
def update(req, id):
if req.POST:
mhs = models.Mhs.objects.filter(pk=id).update(nama=req.POST['nama'], nim=req.POST['nim'], status=req.POST['status'], telp=req.POST['telp'], alamat=req.POST['alamat'])
return redirect('/mahasiswa/')
mhs = models.Mhs.objects.filter(pk=id).first()
return render(req, 'mahasiswa/update.html', {
'data': mhs,
})
|
[
"[email protected]"
] | |
0b25adda0ddd3b445d9fee6ae58a58302663df91
|
f8457b044305b5ef4944ab649c44268e9a32a0bc
|
/data/ship/color_gen.py
|
7a85a179f64da38b63c1d1d1d76d5e6716e7f723
|
[
"MIT"
] |
permissive
|
PcloD/PixelShipGenerator
|
163d2b2d7d1f9eea2d316c2d7d9c29d1c84d581a
|
33e4fa004890f388cd679bbbb6837bcc05465bbe
|
refs/heads/master
| 2020-05-05T08:19:39.968725 | 2016-12-19T00:45:10 | 2016-12-19T00:45:10 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,774 |
py
|
from random import *
class ColorPalette(object):
def __init__(self, num, variance=30):
self.num = num
self.variance = variance
self.base = self.set_random_color()
self.palette = self.set_palette()
def set_palette(self):
palette = {
1: self.base,
2: self.vary_color(self.base),
3: self.lighten_color(self.base),
4: self.darken_color(self.base),
}
return palette
@staticmethod
def verify_color(col):
verified = []
for v in col:
if v > 255:
v = 255
if v < 0:
v = 0
verified.append(v)
return tuple(verified)
def vary_color(self, (r, g, b)):
r_var = randint(-self.variance, self.variance)
g_var = randint(-self.variance, self.variance)
b_var = randint(-self.variance, self.variance)
new = r + r_var, g + g_var, b + b_var
return self.verify_color(new)
def lighten_color(self, (r, g, b)):
r_var = randint(0, self.variance)
g_var = randint(0, self.variance)
b_var = randint(0, self.variance)
new = r + r_var, g + g_var, b + b_var
return self.verify_color(new)
def darken_color(self, (r, g, b)):
r_var = randint(-self.variance, 0)
g_var = randint(-self.variance, 0)
b_var = randint(-self.variance, 0)
new = r + r_var, g + g_var, b + b_var
return self.verify_color(new)
def set_random_color(self):
r = randint(0, 255)
g = randint(0, 255)
b = randint(0, 255)
return r, g, b
def get_color(self):
return choice(self.palette.keys())
|
[
"[email protected]"
] | |
8d07dbb7d243048f69e7a6b17948b087345934c2
|
7e0cdabf1e7514fb0e3d53548eaadd7be85ae5e6
|
/configs/helmet/merge/faster_rcnn_mobilenetv2_64_fpn_2x.py
|
a668d159c1d3ba2cfd6199b16e82ba9a061d865f
|
[
"Apache-2.0"
] |
permissive
|
fanqie03/mmdetection.bak
|
ce2697f3a0ca5603d923856fbdc8b7bb32066939
|
0bc0ea591b5725468f83f9f48630a1e3ad599303
|
refs/heads/master
| 2023-05-12T02:44:11.209749 | 2020-03-09T01:58:39 | 2020-03-09T01:58:39 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,419 |
py
|
# model settings
model = dict(
type='FasterRCNN',
pretrained='torchvision://mobilenet_v2',
backbone=dict(
type='MobileNetV2',
out_indices=(3, 6, 13, 18)),
neck=dict(
type='FPN',
in_channels=[24, 32, 96, 1280],
out_channels=64,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=64,
feat_channels=64,
anchor_scales=[8],
anchor_ratios=[1.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2), # sample_num采样点
out_channels=64,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=64,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=3,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=64,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False))
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05, nms=dict(type='nms', iou_thr=0.3), max_per_img=100) # iou_thr越小,越减少重叠
# soft-nms is also supported for rcnn testing
# e.g., nms=dict(type='soft_nms', iou_thr=0.5, min_score=0.05)
)
# classes = ['']
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
# dict(type='PhotoMetricDistortion',),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
# dataset settings
dataset_type = 'HelmetMergeDataset'
data_root = '/datasets/HelmetMerge/'
data = dict(
imgs_per_gpu=4,
workers_per_gpu=4,
train=dict(type=dataset_type,
data_root=data_root,
ann_file=data_root + 'ImageSets/Main/trainval.txt',
img_prefix=data_root,
pipeline=train_pipeline,
use_ignore=True),
val=dict(
type=dataset_type,
data_root=data_root,
ann_file=data_root + 'ImageSets/Main/test.txt',
img_prefix=data_root,
pipeline=test_pipeline),
test=dict(
type=dataset_type,
data_root=data_root,
ann_file=data_root + 'ImageSets/Main/test.txt',
img_prefix=data_root,
pipeline=test_pipeline),)
# optimizer
optimizer = dict(type='SGD', lr=0.005, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[16, 22])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 24
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = __file__.replace('configs', 'work_dirs').rstrip('.py')
load_from = None
resume_from = None
workflow = [('train', 1)]
|
[
"[email protected]"
] | |
c5e37668b4e5631218e5d75f9260e597329993b2
|
7296c1214741a8cd3e2b70c90de6784d9fa53dba
|
/Assignments/Assignment_2/score.py
|
7fd043b850098a87a8580534f7d2b08d979708c8
|
[] |
no_license
|
msaad1311/MLOps
|
949912c5417db5c08ce69df46867c3e84b90f810
|
484f2124cd84472c7971d428982507b9215a400f
|
refs/heads/main
| 2023-03-29T12:13:38.286713 | 2021-03-30T14:01:57 | 2021-03-30T14:01:57 | 349,442,396 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,788 |
py
|
import preprocessing_functions as pf
import config
# =========== scoring pipeline =========
# impute categorical variables
def predict(data):
# extract first letter from cabin
data['cabin']=pf.extract_cabin_letter(data,'cabin')
# impute NA categorical
data[config.CATEGORICAL_VARS]=pf.impute_na(data[config.CATEGORICAL_VARS],'Missing')
# impute NA numerical
data[config.NUMERICAL_TO_IMPUTE]=pf.impute_na(data[config.NUMERICAL_TO_IMPUTE],'Numerical')
# Group rare labels
for var in config.CATEGORICAL_VARS:
data[var] = pf.remove_rare_labels(data, var,config.FREQUENT_LABELS[var])
# encode variables
data = pf.encode_categorical(data,config.CATEGORICAL_VARS)
print(data.shape)
# check all dummies were added
data = pf.check_dummy_variables(data,config.DUMMY_VARIABLES)
print(data.shape)
# scale variables
data = pf.scale_features(data,config.OUTPUT_SCALER_PATH)
# make predictions
predictions = pf.predict(data,config.OUTPUT_MODEL_PATH)
return predictions
# ======================================
# small test that scripts are working ok
if __name__ == '__main__':
from sklearn.metrics import accuracy_score
import warnings
warnings.simplefilter(action='ignore')
# Load data
data = pf.load_data(config.PATH_TO_DATASET)
X_train, X_test, y_train, y_test = pf.divide_train_test(data,
config.TARGET)
pred = predict(X_test)
# evaluate
# if your code reprodues the notebook, your output should be:
# test accuracy: 0.6832
print('test accuracy: {}'.format(accuracy_score(y_test, pred)))
print()
|
[
"[email protected]"
] | |
5a8c28170c5db7fe6028857aa65802779eba4dee
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/sieve-big-6747.py
|
4ecb7b1858f7119a72a2b841dd3562b74bafd51c
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 31,752 |
py
|
# A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
$Target = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
|
[
"[email protected]"
] | |
3269ab51f45d81768072c57f786466adaf54e3a9
|
4331b28f22a2efb12d462ae2a8270a9f666b0df1
|
/.history/dvdstore/webapp/views_20190914162701.py
|
35796077408bdddb2144ad85082afb3e14261908
|
[] |
no_license
|
ZiyaadLakay/csc312.group.project
|
ba772a905e0841b17478eae7e14e43d8b078a95d
|
9cdd9068b5e24980c59a53595a5d513c2e738a5e
|
refs/heads/master
| 2020-07-26T23:30:22.542450 | 2019-09-16T11:46:41 | 2019-09-16T11:46:41 | 200,703,160 | 0 | 0 | null | 2019-08-05T17:52:37 | 2019-08-05T17:52:37 | null |
UTF-8
|
Python
| false | false | 10,190 |
py
|
from django.shortcuts import render
from .models import DVD, Transaction, Customer
from django.core.paginator import EmptyPage,PageNotAnInteger, Paginator
from django.db.models import Q
from django.contrib.auth.models import User, auth
from django.shortcuts import render, redirect
from django.contrib import messages
from django.core.files.storage import FileSystemStorage
from django.contrib.auth.decorators import login_required, permission_required
from .form import DocumentForm
import datetime
#This is the homepage for the User
def home(request):
dvds = DVD.objects.all() #imports dvds from database
query = request.GET.get("query")
gen = request.GET.get("gen")
if query:
dvds = DVD.objects.filter(Q(Title__icontains=query))#Search Function according to name
if not DVD.objects.filter(Q(Title__icontains=query)).exists():
messages.info(request,'No search results for : '+query)
elif gen:
dvds = DVD.objects.filter(Q(genre__icontains=gen))#Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
genre = {'Action', 'Comedy', 'Drama', 'Family', 'Romance'}
return render(request, 'home.html', {'dvds':dvds}, {'genre':genre}) #renders the page
#This is the page for clerks
@login_required
def clerk(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
dvds = DVD.objects.filter(Q(Title__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'clerk.html',context_dict)
@login_required
def userstbl(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
users = User.objects.filter(Q(username__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'userstbl.html',context_dict)
@login_required
def transactions(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
trans = Transaction.objects.filter(Q(TransactionNumber__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'transactions.html',context_dict)
def register2(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
username= request.POST['username']
email= request.POST['email']
password1= first_name[0]+last_name
if User.objects.filter(username=username).exists():
messages.info(request, 'Username Taken')
return redirect('clerk')
elif User.objects.filter(email=email).exists():
messages.info(request, 'Email Taken')
user = User.objects.create_user(username=username, password=password1, email=email, first_name=first_name, last_name=last_name)
user.save()
messages.info(request, 'User Created')
return redirect('/clerk')
def model_form_upload(request):
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect('/clerk')
def booking(request):
username= request.POST['username']
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).update(BookingPickup=username)
return redirect('home')
def checkout(request):
dvdID= request.POST['dvdID']
numOfDays=request.POST['numDaysBooked']
dvdPrice=request.POST['dvdPrice']
users_ID=request.POST['user_ID']
MovieTitle=request.POST['MovieTitle']
payment=request.POST['payment']
bill=int(numOfDays)*int(dvdPrice)
DVD.objects.filter(id=dvdID).update(NumDaysBooked=numOfDays,InStock=False)
RentDate= datetime.date.today()
DueDate=RentDate+datetime.timedelta(days=int(numOfDays))
t = datetime.datetime.now().strftime("%H%M%S")
TransactionNumber=payment+str(RentDate)[0:4]+str(RentDate)[8:10]+t
#Amount
trans = Transaction(users_ID=users_ID, TransactionNumber=TransactionNumber, RentDate=RentDate, DueDate=DueDate, MovieTitle=MovieTitle, Payment_Method=payment,Amount="R"+str(bill),dvdID=dvdID)
trans.save()
return redirect('/clerk')
def checkin(request):
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).update(BookingPickup='None',InStock=True,NumDaysBooked=0)
return redirect('/clerk')
def deleteMovie(request):
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).delete()
return redirect('/clerk')
def deleteTransaction(request):
transID= request.POST['transID']
Transaction.objects.filter(id=transID).delete()
return redirect('/transactions')
def deleteUser(request):
userID= request.POST['userID']
User.objects.filter(id=userID).delete()
return redirect('/userstbl')
def user_detail(request):
id = None
if request.user.is_authenticated:
id = request.user.id
print(id)
detail2 = Customer.objects.all()
detail1 = User.objects.filter( id = id )
print(str(detail1[0]))
detail2 = Customer.objects.filter(Q(username__icontains=str(detail1[0]))).values()
answers_list = list(detail2)
myString=str(answers_list[0])
import re
myarray=re.split(':|,',myString)#myString.split(":,")
if len(myarray)>39:
for i in range(len(myarray)):
print(str(i)+" "+str(myarray[i]))
phone_number=str(myarray[39])
address=str(myarray[41])
identification=str(myarray[43])
return render(request, 'user_detail.html',{'detail1':detail1 , 'detail2' : detail2,})
return render(request, 'user_detail.html',{'detail1':detail1 , 'detail2' : detail2})
def registerCustomer(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
phone_number= request.POST['phone_number']
address= request.POST['address']
identification= request.POST['identification']
email= request.POST['email']
password1= request.POST['password1']
password2= request.POST['password2']
username= request.POST['username']
if password1 == password2 :
if Customer.objects.filter(username=username).exists():
messages.info(request, 'Username Taken')
return redirect('register.html')
elif Customer.objects.filter(email=email).exists():
messages.info(request, 'Email Taken')
return redirect('register.html')
user = Customer.objects.create_user(phone_number=phone_number, address=address,identification=identification,username=username, password=password1, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
user.save()
# customer.save()
messages.info(request, 'User Created')
# messages.info(request, 'Customer Created')
return redirect('login.html')
else:
print('password does not match')
messages.info(request, 'Password does not match')
return redirect('register.html')
return redirect('login.html')
else:
return render(request, 'register.html')
def updateCustomer(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
phone_number= request.POST['phone_number']
address= request.POST['address']
identification= request.POST['identification']
email= request.POST['email']
username= request.POST['username']
userID=request.POST['userID']
user = Customer.objects.filter(id=userID).update(phone_number=phone_number, address=address,identification=identification,username=username, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
return redirect('home')
def updateUser(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
email= request.POST['email']
username= request.POST['username']
userID=request.POST['userID']
user = User.objects.filter(id=userID).update(username=username, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
return redirect('home')
|
[
"[email protected]"
] | |
7495126aafd1f8be0bb43992f7a74b53439b8e24
|
0b027f27ff6951c6447fcc6c8ae984e2d0d24534
|
/first_project_20443/settings.py
|
a022a17f2b4c202d95257fd99e99039cfec6c886
|
[] |
no_license
|
crowdbotics-apps/first-project-20443
|
4fb6f29497797fb72f477d3bb862e69f525e54d4
|
48d036a3b30583e21705b9000231a0ad465272c0
|
refs/heads/master
| 2022-12-24T20:11:27.110948 | 2020-09-19T17:11:06 | 2020-09-19T17:11:06 | 296,915,301 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,639 |
py
|
"""
Django settings for first_project_20443 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sites",
]
LOCAL_APPS = [
"home",
"users.apps.UsersConfig",
]
THIRD_PARTY_APPS = [
"rest_framework",
"rest_framework.authtoken",
"rest_auth",
"rest_auth.registration",
"bootstrap4",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"django_extensions",
"drf_yasg",
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "first_project_20443.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "first_project_20443.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {"default": env.db()}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
MIDDLEWARE += ["whitenoise.middleware.WhiteNoiseMiddleware"]
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG:
# output email to console instead of sending
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
[
"[email protected]"
] | |
aa0aea44ef815acaed2f9f28a547f09da18bcd98
|
378745aa3bbfabbe341fa0e6a3b0b572cc83259a
|
/test.py
|
bfa99442cda06de79df24867c6edec8e0f9394da
|
[] |
no_license
|
stemcloud01/test
|
c6c8dd643874a08f34db2a3272c7896587422817
|
72a1fbf77b4f2094b6127261bdf53bf43d4fc02f
|
refs/heads/master
| 2020-08-26T19:23:16.214851 | 2019-10-23T19:58:52 | 2019-10-23T19:58:52 | 217,119,266 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 79 |
py
|
#!/usr/bin/python
import subprocess
subprocess.call(["echo","helloworld1"])
|
[
"[email protected]"
] | |
7dc978fbff839ff4c2fc09ba79dd84913b12d20a
|
fbcac9ff8664e857e1f7d5409b170a23f432763f
|
/tests/test_util.py
|
4466050d91c1fb68eb4969ac37cebed560b4c6ea
|
[] |
no_license
|
claraj/iss_slack_bot
|
289d4af3cf108e888db3170048d7aa491984f759
|
8e36e81d8d571dfceb7e40ede44ca720fc699e50
|
refs/heads/master
| 2021-09-06T08:59:07.966377 | 2018-02-04T17:01:53 | 2018-02-04T17:01:53 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,471 |
py
|
from unittest import TestCase
from utils import in_future
from datetime import datetime, timedelta
import time
# Testing functions unrelated to GAE features
class TestInFuture(TestCase):
def setUp(self):
self.now = datetime.today()
self.add_ten_seconds = timedelta(seconds = 10)
self.add_fourty_seconds = timedelta(seconds = 40)
def test_in_future(self):
# Example: pass_time is 10 seconds in the future. Return True
now_plus_ten = self.now + self.add_ten_seconds
now_plus_ten_ts = time.mktime(now_plus_ten.timetuple()) # Python 3 has a datetime.timestamp() function, but...
self.assertTrue(in_future(now_plus_ten_ts))
def test_in_future_now(self):
# Example: pass_time is now. Return False
now_ts = time.mktime(self.now.timetuple())
self.assertFalse(in_future(now_ts))
def test_in_future_beyond_min(self):
# Example: pass_time is 10 seconds in the future. min_time_in_future is 5. Return True
now_plus_ten = self.now + self.add_ten_seconds
now_plus_ten_ts = time.mktime(now_plus_ten.timetuple())
self.assertTrue(in_future(now_plus_ten_ts, 5))
def test_in_future_at_min(self):
# Example: pass_time is 10 seconds in the future. min_time_in_future is 10. Return False
now_plus_ten = self.now + self.add_ten_seconds
now_plus_ten_ts = time.mktime(now_plus_ten.timetuple())
self.assertFalse(in_future(now_plus_ten_ts, 10))
def test_in_future_in_past(self):
# Example: pass_time is in the past. return False
now_minus_ten = self.now - self.add_ten_seconds
now_minus_ten_ts = time.mktime(now_minus_ten.timetuple())
self.assertFalse(in_future(now_minus_ten_ts))
def test_in_future_in_past_negative_min(self):
# Example: pass_time is 40 seconds in the past, min_time_delta is -60. return True
now_minus_fourty = self.now - self.add_fourty_seconds
now_minus_fourty_ts = time.mktime(now_minus_fourty.timetuple())
self.assertTrue(in_future(now_minus_fourty_ts, -60))
def test_in_future_in_past_beyond_negative_min(self):
# Example: pass_time is 40 seconds in the past, min_time_delta is -10. return False
now_minus_fourty = self.now - self.add_fourty_seconds
now_minus_fourty_ts = time.mktime(now_minus_fourty.timetuple())
self.assertFalse(in_future(now_minus_fourty_ts, -10))
|
[
"[email protected]"
] | |
753f1bc6189e0ee7037082baa53044333b792640
|
275a96a33ae1f89e7b2ee0ecdbac7d78abe6d6cc
|
/test/test_pos_api.py
|
8c2788266c7397b2d42f5f30041879ecad09d054
|
[] |
no_license
|
cascadiarc/cyclos-python-client
|
8029ce07174f2fe92350a92dda9a60976b2bb6c2
|
a2e22a30e22944587293d51be2b8268bce808d70
|
refs/heads/main
| 2023-04-03T16:52:01.618444 | 2021-04-04T00:00:52 | 2021-04-04T00:00:52 | 354,419,532 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,602 |
py
|
# coding: utf-8
"""
Cyclos 4.11.5 API
The REST API for Cyclos 4.11.5 # noqa: E501
OpenAPI spec version: 4.11.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.api.pos_api import POSApi # noqa: E501
from swagger_client.rest import ApiException
class TestPOSApi(unittest.TestCase):
"""POSApi unit test stubs"""
def setUp(self):
self.api = swagger_client.api.pos_api.POSApi() # noqa: E501
def tearDown(self):
pass
def test_calculate_receive_payment_installments(self):
"""Test case for calculate_receive_payment_installments
Calculates the default installments for a scheduled payment # noqa: E501
"""
pass
def test_data_for_receive_payment(self):
"""Test case for data_for_receive_payment
Returns configuration data for receiving a payment (POS) # noqa: E501
"""
pass
def test_preview_receive_payment(self):
"""Test case for preview_receive_payment
Previews a POS payment before receiving it # noqa: E501
"""
pass
def test_receive_payment(self):
"""Test case for receive_payment
Receives a payment (POS) # noqa: E501
"""
pass
def test_receive_payment_otp(self):
"""Test case for receive_payment_otp
Generates a new One-Time-Password (OTP) for a pos payment # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
b49cd004b6a2b08e9956b2115e38c5393bc16c91
|
9919439783a3d9ec7a4435e50e0225ea1d6f2b69
|
/django_rest_json_api_example/models.py
|
f5f2ab812b4f5594b6975c8ef4dc23d0ff0dd16c
|
[] |
no_license
|
newcontext-oss/django-rest-json-api
|
19c2e5210c59d02eee88afb3061761f02f4037d6
|
107ef896397d93715d9f3eed34fcb6f14d5893b9
|
refs/heads/master
| 2021-01-15T20:27:51.771682 | 2017-10-02T18:41:28 | 2017-10-02T18:41:28 | 99,850,109 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 970 |
py
|
"""
Django ORM models for the JSON API examples.
"""
import uuid
from django.db import models
class Person(models.Model):
"""
JSON API example person model.
"""
uuid = models.UUIDField(default=uuid.uuid4())
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
class Article(models.Model):
"""
JSON API example article model.
"""
uuid = models.UUIDField(default=uuid.uuid4())
title = models.CharField(max_length=255)
description = models.CharField(max_length=255, blank=True)
author = models.ForeignKey(Person, null=True, blank=True)
class Comment(models.Model):
"""
JSON API example comment model.
"""
class Meta:
ordering = ["uuid"]
uuid = models.UUIDField(default=uuid.uuid4())
body = models.TextField()
article = models.ForeignKey(Article, blank=False, related_name='comments')
author = models.ForeignKey(Person, blank=False)
|
[
"[email protected]"
] | |
e53a2778967e857301d30abbdf2195fae233efaa
|
691793de7d07b17918d076b319281c706f7275c0
|
/signing_today_client/models/inline_response2013.py
|
2e6154a84d21e42c1c2a9d692ad4fc248a6ebdca
|
[
"MIT"
] |
permissive
|
signingtoday/signingtoday-sdk-python
|
1ddfae5340690c80760c500436631d4a8ff9c87f
|
ed267279622fb59f2ad8fa289157fc9cdf9d8a5b
|
refs/heads/master
| 2020-12-03T15:32:35.755222 | 2020-03-24T08:27:11 | 2020-03-24T08:27:11 | 231,372,803 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 13,273 |
py
|
# coding: utf-8
"""
Signing Today API
*Signing Today* enables seamless integration of digital signatures into any
website by the use of easy requests to our API. This is the smart way of
adding digital signature support with a great user experience.


*Signing Today APIs* use HTTP methods and are RESTful based, moreover they
are protected by a *server to server authentication* standard by the use of
tokens.


*Signing Today APIs* can be used in these environments:


| Environment | Description | Endpoint |
| ----------- | ----------- | -------- |
| Sandbox     | Test environment | `https://sandbox.signingtoday.com` |
| Live        | Production environment | `https://api.signingtoday.com` |


For every single request to Signing Today has to be defined the following
*HTTP* header:
- `Authorization`, which contains the authentication token.

If the request has a body than another *HTTP* header is requested:
- `Content-Type`, with `application/json` value.


Follows an example of usage to enumerate all the user of *my-org*
organization.

**Example**

```json
$ curl https://sandbox.signingtoday.com/api/v1/my-org/users \
    -H 'Authorization: Token <access-token>'
```

## HTTP methods used

APIs use the right HTTP verb in every situation.

| Method   | Description                    |
| -------- | ------------------------------ |
| `GET`    | Request data from a resource   |
| `POST`   | Send data to create a resource |
| `PUT`    | Update a resource              |
| `PATCH`  | Partially update a resource    |
| `DELETE` | Delete a resourse              |


## Response definition

All the response are in JSON format.
As response to a request of all users of an organization you will have a
result like this:

```json
{
    "pagination": {
      "count": 75,
      "previous": "https://sandbox.signingtoday.com/api/v1/my-org/users?page=1",
      "next": "https://sandbox.signingtoday.com/api/v1/my-org/users?page=3",
      "pages": 8,
      "page": 2
    },
    "meta": {
      "code": 200
    },
    "data": [
      {
        "id": "jdo",
        "status": "enabled",
        "type": "Basic user account",
        "email": johndoe@dummyemail.com,
        "first_name": "John",
        "last_name": "Doe",
        "wallet": [],
        "created_by": "system",
        "owner": false,
        "automatic": false,
        "rao": false
      },
      ...
    ]
  }
```

The JSON of the response is made of three parts:
- Pagination
- Meta
- Data

### Pagination

*Pagination* object allows to split the response into parts and then to
rebuild it sequentially by the use of `next` and `previous` parameters, by
which you get previous and following blocks. The *Pagination* is present
only if the response is a list of objects.

The general structure of *Pagination* object is the following:

```json
{
    "pagination": {
      "count": 75,
      "previous": "https://sandbox.signingtoday.com/api/v1/my-org/users?page=1",
      "next": "https://sandbox.signingtoday.com/api/v1/my-org/users?page=3",
      "pages": 8,
      "page": 2
    },
    ...
  }
```

### Meta

*Meta* object is used to enrich the information about the response. In the
previous example, a successful case of response, *Meta* will have value
`status: 2XX`. In case of unsuccessful response, *Meta* will have further
information, as follows:

```json
{
    "meta": {
      "code": <HTTP STATUS CODE>,
      "error_type": <STATUS CODE DESCRIPTION>,
      "error_message": <ERROR DESCRIPTION>
    }
  }
```

### Data

*Data* object outputs as object or list of them. Contains the expected data
as requested to the API.

## Search filters

Search filters of the API have the following structure:

`where_ATTRIBUTENAME`=`VALUE`

In this way you make a case-sensitive search of *VALUE*. You can extend it
through the Django lookup, obtaining more specific filters. For example:

`where_ATTRIBUTENAME__LOOKUP`=`VALUE`

where *LOOKUP* can be replaced with `icontains` to have a partial insensitive
research, where

`where_first_name__icontains`=`CHa`

matches with every user that have the *cha* string in their name, with
no differences between capital and lower cases.

[Here](https://docs.djangoproject.com/en/1.11/ref/models/querysets/#field-lookups)
the list of the lookups.

## Webhooks

Signing Today supports webhooks for the update of DSTs and identities status.
You can choose if to use or not webhooks and if you want to receive updates
about DSTs and/or identities. You can configurate it on application token
level, in the *webhook* field, as follows:

```json
"webhooks": {
  "dst": "URL",
  "identity": "URL"
  }
```

### DSTs status update

DSTs send the following status updates:
- **DST_STATUS_CHANGED**: whenever the DST changes its status
- **SIGNATURE_STATUS_CHANGED**: whenever one of the signatures changes its
status

#### DST_STATUS_CHANGED

Sends the following information:

```json
{
    "message": "DST_STATUS_CHANGED",
    "data": {
      "status": "<DST_STATUS>",
      "dst": "<DST_ID>",
      "reason": "<DST_REASON>"
    }
  }
```

#### SIGNATURE_STATUS_CHANGED

Sends the following information:

```json
{
    "message": "SIGNATURE_STATUS_CHANGED",
    "data": {
      "status": "<SIGNATURE_STATUS>",
      "group": <MEMBERSHIP_GROUP_INDEX>,
      "dst": {
        "id": "<DST_ID>",
        "title": "<DST_TITLE>"
      },
      "signature": "<SIGNATURE_ID>",
      "signer": "<SIGNER_USERNAME>",
      "position": "<SIGNATURE_POSITION>",
      "document": {
        "display_name": "<DOCUMENT_TITLE>",
        "id": "<DOCUMENT_ID>",
        "order": <DOCUMENT_INDEX>
      },
      "automatic": <DECLARES_IF_THE_SIGNER_IS_AUTOMATIC>,
      "page": "<SIGNATURE_PAGE>"
    }
  }
```

### Identities status update

Identities send the following status updates:
- **IDENTITY_REQUEST_ENROLLED**: whenever an identity request is activated

#### IDENTITY_REQUEST_ENROLLED

Sends the following information:

```json
{
    "message": "IDENTITY_REQUEST_ENROLLED",
    "data": {
      "status": "<REQUEST_STATUS>",
      "request": "<REQUEST_ID>",
      "user": "<APPLICANT_USERNAME>"
    }
  }
```

### Urlback

Sometimes may be necessary to make a redirect after an user, from the
signature tray, has completed his operations or activated a certificate.

If set, redirects could happen in 3 cases:
- after a signature or decline
- after a DST has been signed by all the signers or canceled
- after the activation of a certificate

In the first two cases the urlback returns the following information through
a data form:
- **dst-id**: id of the DST
- **dst-url**: signature_ticket of the signature
- **dst-status**: current status of the DST
- **dst-signature-id**: id of the signature
- **dst-signature-status**: current status of the signature
- **user**: username of the signer
- **decline-reason**: in case of a refused DST contains the reason of the
decline

In the last case the urlback returns the following information through a
data form:
- **user**: username of the user activated the certificate
- **identity-provider**: the provider has been used to issue the certificate
- **identity-request-id**: id of the enrollment request
- **identity-id**: id of the new identity
- **identity-label**: the label assigned to the identity
- **identity-certificate**: public key of the certificate


 # noqa: E501
The version of the OpenAPI document: 1.5.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class InlineResponse2013(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'data': 'SignatureTransaction',
'meta': 'MetaDataSuccess'
}
attribute_map = {
'data': 'data',
'meta': 'meta'
}
def __init__(self, data=None, meta=None): # noqa: E501
"""InlineResponse2013 - a model defined in OpenAPI""" # noqa: E501
self._data = None
self._meta = None
self.discriminator = None
if data is not None:
self.data = data
if meta is not None:
self.meta = meta
@property
def data(self):
"""Gets the data of this InlineResponse2013. # noqa: E501
:return: The data of this InlineResponse2013. # noqa: E501
:rtype: SignatureTransaction
"""
return self._data
@data.setter
def data(self, data):
"""Sets the data of this InlineResponse2013.
:param data: The data of this InlineResponse2013. # noqa: E501
:type: SignatureTransaction
"""
self._data = data
@property
def meta(self):
"""Gets the meta of this InlineResponse2013. # noqa: E501
:return: The meta of this InlineResponse2013. # noqa: E501
:rtype: MetaDataSuccess
"""
return self._meta
@meta.setter
def meta(self, meta):
"""Sets the meta of this InlineResponse2013.
:param meta: The meta of this InlineResponse2013. # noqa: E501
:type: MetaDataSuccess
"""
self._meta = meta
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, InlineResponse2013):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
5ceec9be34f8bee06d3162cec9aacb9cb4578b59
|
eb9af63f5874345c03b567a944e2cb67ec8995d5
|
/leetcode/binarySearch/findminimumValueinSortedRotatedWIthDuplicate.py
|
2731d45cce9d2ad7025bb28d97bc1d079678bb36
|
[] |
no_license
|
ziqingW/pythonPlayground
|
262fc143c7997fb9f9a9b148359c4d2c7de84fc7
|
3aab1747a1e6a77de808073e8735f89704940496
|
refs/heads/master
| 2021-01-25T13:41:56.494266 | 2019-03-10T02:48:04 | 2019-03-10T02:48:04 | 123,607,323 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 984 |
py
|
# Suppose an array sorted in ascending order is rotated at some pivot unknown to you beforehand.
# (i.e., [0,1,2,4,5,6,7] might become [4,5,6,7,0,1,2]).
# Find the minimum element.
# The array may contain duplicates.
class Solution:
def findMin(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
left,right = 0, len(nums)-1
while left < right:
mid = (left+right) // 2
if nums[mid] > nums[right]:
left = mid + 1
elif nums[mid] == nums[right]:
if nums[left] == nums[mid]:
if len(set(nums[left:mid+1])) == 1:
left = mid + 1
elif len(set(nums[mid+1: right+1])) == 1:
right = mid
else:
right = mid
else:
right = mid
return nums[left]
# return min(nums)
|
[
"[email protected]"
] | |
2c1513fb42947cf5ca374844cd41d2a4d4ae5bfd
|
ad13583673551857615498b9605d9dcab63bb2c3
|
/output/models/ms_data/attribute_group/attg_d018_xsd/__init__.py
|
624bfe9285086da05657bad0b2b20d96f60e002a
|
[
"MIT"
] |
permissive
|
tefra/xsdata-w3c-tests
|
397180205a735b06170aa188f1f39451d2089815
|
081d0908382a0e0b29c8ee9caca6f1c0e36dd6db
|
refs/heads/main
| 2023-08-03T04:25:37.841917 | 2023-07-29T17:10:13 | 2023-07-30T12:11:13 | 239,622,251 | 2 | 0 |
MIT
| 2023-07-25T14:19:04 | 2020-02-10T21:59:47 |
Python
|
UTF-8
|
Python
| false | false | 141 |
py
|
from output.models.ms_data.attribute_group.attg_d018_xsd.attg_d018 import (
AttgRef,
Doc,
)
__all__ = [
"AttgRef",
"Doc",
]
|
[
"[email protected]"
] | |
2aba281c8d4b6dee7d96c653a117739563d1e6bd
|
89ba6569e82bfe1abbb85f58c3a264240ef5b68f
|
/Scripts/calc_SNA_Data_Eurasia_CDRSCE.py
|
375ff378a7d20bed12ffd1dcee10ec43332aa0ea
|
[
"MIT"
] |
permissive
|
muskanmahajan37/AMIP_Simu
|
b3792c24f2f82749ac4d9df48a11bb46d2b82236
|
6370626fe81baf5c2280dab95fdab08a873f3a84
|
refs/heads/master
| 2022-02-08T08:09:08.575967 | 2019-07-25T22:29:34 | 2019-07-25T22:29:34 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,914 |
py
|
"""
Script calculates Eurasian snow area index for October-November using data
from the Rutgers Global Snow Lab data
Notes
-----
Author : Zachary Labe
Date : 25 July 2019
"""
### Import modules
import datetime
import numpy as np
import matplotlib.pyplot as plt
import scipy.stats as sts
import scipy.signal as SS
### Define directories
directoryfigure = '/home/zlabe/Desktop/'
directoryoutput = '/home/zlabe/Documents/Research/AMIP/Data/'
### Define time
now = datetime.datetime.now()
currentmn = str(now.month)
currentdy = str(now.day)
currentyr = str(now.year)
currenttime = currentmn + '_' + currentdy + '_' + currentyr
titletime = currentmn + '/' + currentdy + '/' + currentyr
print('\n' '----Calculating Snow Cover Area Index - %s----' % titletime)
#### Alott time series
year1 = 1979
year2 = 2015
years = np.arange(year1,year2+1,1)
yearsdata = np.arange(year1,2018+1,1)
m = 12 # number of months
### Read in all months of data
yearsdata,months,data = np.genfromtxt(directoryoutput + \
'CDR_SCE_Eurasia_Monthly.txt',unpack=True,
usecols=[0,1,2])
### Reshape data into []
yearssort = np.reshape(yearsdata,(yearsdata.shape[0]//m,m))
monthsort = np.reshape(months,(months.shape[0]//m,m))
datasortq = np.reshape(data,(data.shape[0]//m,m))
### Change units from km^2 to 10^6 km^2
datasort = datasortq/1e6
### Calculate October-November index (1979-2015)
octnov = np.nanmean(datasort[:years.shape[0],9:11],axis=1)
octnovdt = SS.detrend(octnov,type='linear')
### Calculate October index (1979-2015)
octonly = datasort[:years.shape[0],9:10].squeeze()
octonlydt = SS.detrend(octonly,type='linear')
### Save both indices (Oct-Nov)
np.savetxt(directoryoutput + 'SNA_Eurasia_ON_CDRSCE.txt',
np.vstack([years,octnov]).transpose(),delimiter=',',fmt='%3.1f',
footer='\n Snow cover index calculated from' \
'CDR SCE record in Global Snow Lab by \n' \
'Rutgers',newline='\n\n')
np.savetxt(directoryoutput + 'SNA_Eurasia_ON_CDRSCE_DETRENDED.txt',
np.vstack([years,octnovdt]).transpose(),delimiter=',',fmt='%3.1f',
footer='\n Snow cover index calculated from' \
'CDR SCE record in Global Snow Lab by \n' \
'Rutgers',newline='\n\n')
### Save both indices (Oct)
np.savetxt(directoryoutput + 'SNA_Eurasia_O_CDRSCE.txt',
np.vstack([years,octonly]).transpose(),delimiter=',',fmt='%3.1f',
footer='\n Snow cover index calculated from' \
'CDR SCE record in Global Snow Lab by \n' \
'Rutgers',newline='\n\n')
np.savetxt(directoryoutput + 'SNA_Eurasia_O_CDRSCE_DETRENDED.txt',
np.vstack([years,octonlydt]).transpose(),delimiter=',',fmt='%3.1f',
footer='\n Snow cover index calculated from' \
'CDR SCE record in Global Snow Lab by \n' \
'Rutgers',newline='\n\n')
|
[
"[email protected]"
] | |
ec6aab7ffc23ab1a4c0880cb262f17cb5af1fd33
|
5d3fd9328cf3fab1056d79cd8464df3f1719b30e
|
/MG5_aMC_v2_6_7/tests/parallel_tests/madevent_comparator.py
|
cfe449cb4c00d2f3799c94df2b4fa7c1e617ba95
|
[] |
no_license
|
BKailasapathy/madgraph
|
c8d34147146edda1f147e8259539c0e86e6209c2
|
949fcf00f111eadf8948827e2933952b7823778d
|
refs/heads/master
| 2023-07-15T08:38:08.382422 | 2021-08-21T09:12:23 | 2021-08-21T09:12:23 | 398,511,168 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 25,934 |
py
|
################################################################################
#
# Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors
#
# This file is a part of the MadGraph5_aMC@NLO project, an application which
# automatically generates Feynman diagrams and matrix elements for arbitrary
# high-energy processes in the Standard Model and beyond.
#
# It is subject to the MadGraph5_aMC@NLO license which should accompany this
# distribution.
#
# For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch
#
################################################################################
"""A set of objects to allow for easy comparisons of results from various ME
generators (e.g., MG v5 against v4, ...) and output nice reports in different
formats (txt, tex, ...).
"""
import datetime
import glob
import itertools
import logging
import os
import re
import shutil
import subprocess
import sys
import time
pjoin = os.path.join
# Get the grand parent directory (mg5 root) of the module real path
# (tests/acceptance_tests) and add it to the current PYTHONPATH to allow
# for easy import of MG5 tools
_file_path = os.path.dirname(os.path.realpath(__file__))
import madgraph.iolibs.template_files as template_files
import madgraph.iolibs.save_load_object as save_load_object
import madgraph.interface.master_interface as cmd_interface
import madgraph.various.misc as misc
from madgraph import MadGraph5Error, MG5DIR
import me_comparator
class MadEventComparator(me_comparator.MEComparator):
"""Base object to run comparison tests. Take standard Runner objects and
a list of proc as an input and return detailed comparison tables in various
formats."""
def run_comparison(self, proc_list, model='sm', orders={}):
"""Run the codes and store results."""
if isinstance(model, basestring):
model= [model] * len(self.me_runners)
self.results = []
self.proc_list = proc_list
logging.info(\
"Running on %i processes with order: %s, in model %s" % \
(len(proc_list),
me_comparator.MERunner.get_coupling_definitions(orders),
'/'.join([onemodel for onemodel in model])))
pass_proc = False
for i,runner in enumerate(self.me_runners):
cpu_time1 = time.time()
logging.info("Now running %s" % runner.name)
if pass_proc:
runner.pass_proc = pass_proc
self.results.append(runner.run(proc_list, model[i], orders))
cpu_time2 = time.time()
logging.info(" Done in %0.3f s" % (cpu_time2 - cpu_time1))
# logging.info(" (%i/%i with zero ME)" % \
# (len([res for res in self.results[-1] if res[0][0] == 0.0]),
# len(proc_list)))
def cleanup(self):
"""Call cleanup for each MERunner."""
for runner in self.me_runners:
logging.info("Cleaning code %s runner" % runner.name)
runner.cleanup()
def output_result(self, filename=None, tolerance=3e-02):
"""Output result as a nicely formated table. If filename is provided,
write it to the file, else to the screen. Tolerance can be adjusted."""
def detect_type(data):
"""check if the type is an integer/float/string"""
if data.isdigit():
return 'int'
elif len(data) and data[0] == '-' and data[1:].isdigit():
return 'int'
try:
float(data)
return 'float'
except:
return 'str'
proc_col_size = 17
for proc in self.results[0]:
if len(proc) + 1 > proc_col_size:
proc_col_size = len(proc) + 1
col_size = 17
pass_test = 0
fail_test = 0
failed_prop_list = []
res_str = "\n" + self._fixed_string_length("Checked", proc_col_size) + \
''.join([self._fixed_string_length(runner.name, col_size) for \
runner in self.me_runners]) + \
self._fixed_string_length("Relative diff.", col_size) + \
"Result"
for prop in self.results[0]:
loc_results = []
succeed = True
for i in range(len(self.results)):
if not self.results[i].has_key(prop):
loc_results.append('not present')
succeed = False
else:
loc_results.append(self.results[i][prop])
res_str += '\n' + self._fixed_string_length(proc, proc_col_size)+ \
''.join([self._fixed_string_length(str(res),
col_size) for res in loc_results])
if not succeed:
res_str += self._fixed_string_length("NAN", col_size)
res_str += 'failed'
fail_test += 1
failed_prop_list.append(prop)
else:
# check the type (integer/float/string)
type = detect_type(loc_results[0])
if type == 'int':
if any(detect_type(loc)=='float' for loc in loc_results):
type = 'float'
if type == 'float':
if max(loc_results) == 0.0 and min(loc_results) == 0.0:
res_str += self._fixed_string_length("0", col_size)
res_str += 'passed'
pass_test +=1
else:
loc_results = [float(d) for d in loc_results]
diff = (max(loc_results) - min(loc_results)) / \
(max(loc_results) + min(loc_results))
res_str += self._fixed_string_length("%1.10e" % diff, col_size)
if diff >= tolerance:
res_str += 'failed'
failed_prop_list.append(prop)
fail_test += 1
else:
res_str += 'passed'
pass_test +=1
else:
for value in loc_results:
if value != loc_results[0]:
res_str += self._fixed_string_length("differ", col_size)
res_str += 'failed'
failed_prop_list.append(prop)
fail_test += 1
break
res_str += self._fixed_string_length("identical", col_size)
res_str += 'passed'
pass_test +=1
res_str += "\nSummary: %i/%i passed, %i/%i failed" % \
(pass_test, pass_test + fail_test,
fail_test, pass_test + fail_test)
if fail_test != 0:
res_str += "\nFailed processes: %s" % ', '.join(failed_prop_list)
logging.info(res_str)
if filename:
file = open(filename, 'w')
file.write(res_str)
file.close()
return fail_test, failed_prop_list
def assert_processes(self, test_object, tolerance = 1e-06):
"""Run assert to check that all processes passed comparison"""
fail_test, fail_prop = self.output_result('', tolerance)
test_object.assertEqual(fail_test, 0, "Failed for processes: %s" % ', '.join(fail_prop))
class MadEventComparatorGauge(me_comparator.MEComparatorGauge):
"""Base object to run comparison tests. Take standard Runner objects and
a list of proc as an input and return detailed comparison tables in various
formats."""
def run_comparison(self, proc_list, model='sm', orders={}):
"""Run the codes and store results."""
#if isinstance(model, basestring):
# model= [model] * len(self.me_runners)
self.results = []
self.proc_list = proc_list
logging.info(\
"Running on %i processes with order: %s, in model %s" % \
(len(proc_list),
' '.join(["%s=%i" % (k, v) for k, v in orders.items()]),
model))
pass_proc = False
for i,runner in enumerate(self.me_runners):
cpu_time1 = time.time()
logging.info("Now running %s" % runner.name)
if pass_proc:
runner.pass_proc = pass_proc
self.results.append(runner.run(proc_list, model, orders))
cpu_time2 = time.time()
logging.info(" Done in %0.3f s" % (cpu_time2 - cpu_time1))
# logging.info(" (%i/%i with zero ME)" % \
# (len([res for res in self.results[-1] if res[0][0] == 0.0]),
# len(proc_list)))
def cleanup(self):
"""Call cleanup for each MERunner."""
for runner in self.me_runners:
logging.info("Cleaning code %s runner" % runner.name)
runner.cleanup()
def output_result(self, filename=None, tolerance=3e-03):
"""Output result as a nicely formated table. If filename is provided,
write it to the file, else to the screen. Tolerance can be adjusted."""
def detect_type(data):
"""check if the type is an integer/float/string"""
if data.isdigit():
return 'int'
elif len(data) and data[0] == '-' and data[1:].isdigit():
return 'int'
try:
float(data)
return 'float'
except:
return 'str'
proc_col_size = 17
for proc in self.results[0]:
if len(proc) + 1 > proc_col_size:
proc_col_size = len(proc) + 1
col_size = 17
pass_test = 0
fail_test = 0
failed_proc_list = []
res_str = "\n" + self._fixed_string_length("Process", proc_col_size) + \
''.join([self._fixed_string_length(runner.name, col_size) for \
runner in self.me_runners]) + \
self._fixed_string_length("Diff both unit", col_size) + \
self._fixed_string_length("Diff both cms", col_size) + \
self._fixed_string_length("Diff both fixw", col_size) + \
self._fixed_string_length("Diff both feyn", col_size) + \
"Result"
for proc in self.results[0]:
loc_results = []
succeed = True
for i in range(len(self.results)):
if not self.results[i].has_key(proc):
loc_results.append('not present')
succeed = False
else:
loc_results.append(self.results[i][proc])
res_str += '\n' + self._fixed_string_length(proc, proc_col_size)+ \
''.join([self._fixed_string_length(str(res),
col_size) for res in loc_results])
if not succeed:
res_str += self._fixed_string_length("NAN", col_size)
res_str += 'failed'
fail_test += 1
failed_proc_list.append(proc)
else:
# check the type (integer/float/string)
type = detect_type(loc_results[0])
if type == 'float':
if max(loc_results) == 0.0 and min(loc_results) == 0.0:
res_str += self._fixed_string_length("0", col_size)
res_str += 'passed'
pass_test +=1
else:
loc_results = [float(d) for d in loc_results]
diff_feyn = abs(loc_results[1] - loc_results[2]) / \
(loc_results[1] + loc_results[2] + 1e-99)
diff_unit = abs(loc_results[0] - loc_results[3]) / \
(loc_results[0] + loc_results[3] + 1e-99)
diff_cms = abs(loc_results[0] - loc_results[1]) / \
(loc_results[0] + loc_results[1] + 1e-99)
diff_fixw = abs(loc_results[2] - loc_results[3]) / \
(loc_results[2] + loc_results[3] + 1e-99)
res_str += self._fixed_string_length("%1.10e" % diff_unit, col_size)
res_str += self._fixed_string_length("%1.10e" % diff_cms, col_size)
res_str += self._fixed_string_length("%1.10e" % diff_fixw, col_size)
res_str += self._fixed_string_length("%1.10e" % diff_feyn, col_size)
if diff_feyn < 4e-2 and diff_cms < 1e-2 and diff_fixw < 1e-2 and \
diff_unit < 4e-2:
pass_test += 1
res_str += "Pass"
else:
fail_test += 1
failed_proc_list.append(proc)
res_str += "Fail"
else:
for value in loc_results:
if value != loc_results[0]:
res_str += self._fixed_string_length("differ", col_size)
res_str += 'failed'
failed_proc_list.append(proc)
fail_test += 1
break
res_str += self._fixed_string_length("identical", col_size)
res_str += 'passed'
pass_test +=1
res_str += "\nSummary: %i/%i passed, %i/%i failed" % \
(pass_test, pass_test + fail_test,
fail_test, pass_test + fail_test)
if fail_test != 0:
res_str += "\nFailed processes: %s" % ', '.join(failed_proc_list)
logging.info(res_str)
if filename:
file = open(filename, 'w')
file.write(res_str)
file.close()
return fail_test, failed_proc_list
def assert_processes(self, test_object, tolerance = 1e-06):
"""Run assert to check that all processes passed comparison"""
fail_test, fail_prop = self.output_result('', tolerance)
test_object.assertEqual(fail_test, 0, "Failed for processes: %s" % ', '.join(fail_prop))
class FakeRunner(object):
temp_dir_name = ""
proc_list = []
res_list = []
setup_flag = False
name = 'Store'
type = 'Store'
model_dir = os.path.join(MG5DIR,'models')
def cleanup(self):
pass
class MadEventRunner(object):
"""Base class to containing default function to setup, run and access results
produced with a specific ME generator.
"""
temp_dir_name = ""
proc_list = []
res_list = []
setup_flag = False
name = 'None'
model_dir = os.path.join(MG5DIR,'models')
class MERunnerException(Exception):
"""Default Exception class for MERunner objects"""
def setup(self):
"""Empty method to define all warming up operations to be executed before
actually running the generator.
"""
pass
def run(self, proc_list, model, orders, energy):
"""Run the generator for a specific list of processes (see below for
conventions) and store the result.
"""
pass
def get_result(self, proc_id):
"""Return the result (i.e., ME value for a particular PS point) for a
specific process identified with its id."""
return self.proc_list[proc_id]
def cleanup(self):
"""Perform some clean up procedure to leave the ME code directory in
the same state as it was initially (e.g., remove temp dirs, ...)
"""
pass
class MG5Runner(MadEventRunner):
"""Runner object for the MG5 Matrix Element generator."""
mg5_path = ""
name = 'MadGraph v5'
type = 'v5'
def setup(self, mg5_path, temp_dir=None):
"""Wrapper for the mg4 setup, also initializing the mg5 path variable"""
self.mg5_path = os.path.abspath(mg5_path)
if not temp_dir:
i=0
while os.path.exists(os.path.join(mg5_path,
"p_ME_test_%s_%s" % (self.type, i))):
i += 1
temp_dir = "p_ME_test_%s_%s" % (self.type, i)
self.temp_dir_name = temp_dir
def run(self, proc_list, model, orders={}):
"""Execute MG5 on the list of processes mentioned in proc_list, using
the specified model, the specified maximal coupling orders and a certain
energy for incoming particles (for decay, incoming particle is at rest).
"""
self.res_list = [] # ensure that to be void, and avoid pointer problem
self.proc_list = proc_list
self.model = model
self.orders = orders
self.non_zero = 0
dir_name = os.path.join(self.mg5_path, self.temp_dir_name)
# Create a proc_card.dat in the v5 format
proc_card_location = os.path.join(self.mg5_path, 'proc_card_%s.dat' % \
self.temp_dir_name)
proc_card_file = open(proc_card_location, 'w')
proc_card_file.write(self.format_mg5_proc_card(proc_list, model, orders))
proc_card_file.close()
logging.info("proc_card.dat file for %i processes successfully created in %s" % \
(len(proc_list), os.path.join(dir_name, 'Cards')))
# Run mg5
logging.info("Running MG5")
#proc_card = open(proc_card_location, 'r').read()
new_proc_list = []
cmd = cmd_interface.MasterCmd()
cmd.no_notification()
cmd.exec_cmd('import command %s' %proc_card_location)
#for line in proc_card.split('\n'):
# cmd.exec_cmd(line, errorhandling=False)
os.remove(proc_card_location)
values = self.get_values()
self.res_list.append(values)
return values
def format_mg5_proc_card(self, proc_list, model, orders):
"""Create a proc_card.dat string following v5 conventions."""
if model != 'mssm':
v5_string = "import model %s\n" % os.path.join(self.model_dir, model)
else:
v5_string = "import model %s\n" % model
v5_string += "set automatic_html_opening False\n"
if orders == {}:
couplings = ' '
else:
couplings = me_comparator.MERunner.get_coupling_definitions(orders)
for i, proc in enumerate(proc_list):
v5_string += 'add process ' + proc + ' ' + couplings + \
'@%i' % i + '\n'
v5_string += "output %s -f\n" % \
os.path.join(self.mg5_path, self.temp_dir_name)
v5_string += "launch -i --multicore\n"
v5_string += " set automatic_html_opening False\n"
v5_string += "edit_cards\n"
# v5_string += "set ickkw 0\n"
v5_string += "set LHC 13\n"
# v5_string += "set xqcut 0\n"
v5_string += "set auto_ptj_mjj True\n"
v5_string += "set cut_decays True\n"
v5_string += "set ickkw 0\n"
v5_string += "set xqcut 0\n"
v5_string += "survey run_01; refine 0.01; refine 0.01\n"
#v5_string += "print_results\n"
return v5_string
def get_values(self):
dir_name = os.path.join(self.mg5_path, self.temp_dir_name)
SubProc=[name for name in os.listdir(dir_name + '/SubProcesses')
if name[0]=='P' and
os.path.isdir(dir_name + '/SubProcesses/'+name) and \
name[1].isdigit()]
output = {}
#Part1: number of SubProcesses
numsubProc={}
for name in SubProc :
tag=name.split('_')[0][1:]
if numsubProc.has_key(tag):
numsubProc[tag]+=1
else: numsubProc[tag]=1
for key,value in numsubProc.items():
output['number_of_P'+key]=str(value)
#Part 2: cross section
for name in SubProc:
if os.path.exists(dir_name+'/SubProcesses/'+name+'/run_01_results.dat'):
filepath = dir_name+'/SubProcesses/'+name+'/run_01_results.dat'
else:
filepath = dir_name+'/SubProcesses/'+name+'/results.dat'
if not os.path.exists(filepath):
cross = 0
for G in os.listdir(dir_name+'/SubProcesses/'+name):
if os.path.isdir(pjoin(dir_name+'/SubProcesses/'+name,G)):
filepath = pjoin(dir_name+'/SubProcesses/'+name,G,'results.dat')
channel = G[1:]
for line in file(filepath):
splitline=line.split()
cross += float(splitline[9])
break
output['cross_'+name] = str(cross)
else:
for line in file(filepath):
splitline=line.split()
#if len(splitline)==8:
output['cross_'+name]=splitline[0]
print "found %s %s" % (splitline[0], splitline[1])
else:
return output
filepath = dir_name+'/HTML/run_01/results.html'
text = open(filepath).read()
#id="#P1_qq_ll" href=#P1_qq_ll onClick="check_link('#P1_qq_ll','#P1_qq_ll','#P1_qq_ll')"> 842.9
info = re.findall('id="\#(?P<a1>\w*)" href=\#(?P=a1) onClick="check_link\(\'\#(?P=a1)\',\'\#(?P=a1)\',\'\#(?P=a1)\'\)">\s* ([\d.e+-]*)', text)
for name,value in info:
output['cross_'+name] = value
return output
class MG5OldRunner(MG5Runner):
"""Runner object for the MG5 Matrix Element generator."""
mg5_path = ""
name = 'v5 Ref'
type = 'v5_ref'
def format_mg5_proc_card(self, proc_list, model, orders):
"""Create a proc_card.dat string following v5 conventions."""
v5_string = "import model %s\n" % os.path.join(self.model_dir, model)
v5_string += "set automatic_html_opening False\n"
couplings = me_comparator.MERunner.get_coupling_definitions(orders)
for i, proc in enumerate(proc_list):
v5_string += 'add process ' + proc + ' ' + couplings + \
'@%i' % i + '\n'
v5_string += "output %s -f\n" % \
os.path.join(self.mg5_path, self.temp_dir_name)
v5_string += "launch -f \n"
return v5_string
def run(self, proc_list, model, orders={}):
"""Execute MG5 on the list of processes mentioned in proc_list, using
the specified model, the specified maximal coupling orders and a certain
energy for incoming particles (for decay, incoming particle is at rest).
"""
self.res_list = [] # ensure that to be void, and avoid pointer problem
self.proc_list = proc_list
self.model = model
self.orders = orders
self.non_zero = 0
dir_name = os.path.join(self.mg5_path, self.temp_dir_name)
# Create a proc_card.dat in the v5 format
proc_card_location = os.path.join(self.mg5_path, 'proc_card_%s.dat' % \
self.temp_dir_name)
proc_card_file = open(proc_card_location, 'w')
proc_card_file.write(self.format_mg5_proc_card(proc_list, model, orders))
proc_card_file.close()
logging.info("proc_card.dat file for %i processes successfully created in %s" % \
(len(proc_list), os.path.join(dir_name, 'Cards')))
# Run mg5
logging.info("Running MG5")
devnull = open(os.devnull,'w')
if logging.root.level >=20:
subprocess.call([pjoin(self.mg5_path,'bin','mg5'), proc_card_location],
stdout=devnull, stderr=devnull)
else:
subprocess.call([pjoin(self.mg5_path,'bin','mg5'), proc_card_location])
os.remove(proc_card_location)
values = self.get_values()
self.res_list.append(values)
return values
class MG5gaugeRunner(MG5Runner):
"""Runner object for the MG5 Matrix Element generator."""
def __init__(self, cms, gauge):
self.cms = cms
self.gauge = gauge
self.mg5_path = ""
self.name = 'MG_%s_%s' %(self.cms, self.gauge)
self.type = '%s_%s' %(self.cms, self.gauge)
def format_mg5_proc_card(self, proc_list, model, orders):
"""Create a proc_card.dat string following v5 conventions."""
v5_string = 'import model sm \n'
v5_string += 'set automatic_html_opening False\n'
v5_string += 'set complex_mass_scheme %s \n' % self.cms
v5_string += 'set gauge %s \n' % self.gauge
v5_string += "import model %s \n" % os.path.join(self.model_dir, model)
couplings = me_comparator.MERunner.get_coupling_definitions(orders)
for i, proc in enumerate(proc_list):
v5_string += 'add process ' + proc + ' ' + couplings + \
'@%i' % i + '\n'
v5_string += "output %s -f\n" % \
os.path.join(self.mg5_path, self.temp_dir_name)
v5_string += "launch -f \n"
v5_string += 'set complex_mass_scheme False \n'
v5_string += 'set gauge unitary'
return v5_string
|
[
"[email protected]"
] | |
569020ba811d88fe1fc6766d117bea58a7e411d7
|
b53077fb048007f526f447b4bcdd97af8d98ba7d
|
/python/uri1011.py
|
babf079d95a45c284704296eeb9e5e689f876113
|
[] |
no_license
|
ClaudioSiqueira/URI
|
862e58e13943a4fcdccee21c134448912d77a318
|
de75c6bbbddea62e9aafa29954e892054acf829c
|
refs/heads/master
| 2021-02-11T04:05:01.393249 | 2020-05-21T00:05:28 | 2020-05-21T00:05:28 | 244,451,958 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 100 |
py
|
raio = float(input())
volume = (4 * 3.14159 * raio ** 3)/3
print('VOLUME = {:.3f}'.format(volume))
|
[
"[email protected]"
] | |
9d57190c339f4d7c406c41b3ab2a49e9f86c568c
|
26e3d85a3b61219e13f794289ff2b70baa248f14
|
/material/frontend/views/detail.py
|
0bad092ff2ef47b543351b523af9a399ee2695e5
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] |
permissive
|
viewflow/django-material
|
ac8dd10daa8352440845c767b07cafc7f7d09216
|
31b1ce5f6fecc10ba4c9babe4219fb7be97dbf93
|
refs/heads/master
| 2023-08-15T23:32:58.330321 | 2023-04-12T06:12:07 | 2023-04-12T06:12:40 | 29,337,344 | 2,818 | 570 |
BSD-3-Clause
| 2023-03-04T02:28:50 | 2015-01-16T07:17:33 |
CSS
|
UTF-8
|
Python
| false | false | 5,692 |
py
|
from __future__ import unicode_literals
from django.contrib.auth import get_permission_codename
from django.core.exceptions import PermissionDenied, ValidationError
from django.db import models
from django.http import Http404
from django.urls import reverse
from django.views import generic
class DetailModelView(generic.DetailView):
"""Thin wrapper for `generic.DetailView`."""
viewset = None
def get_queryset(self):
"""Return the list of items for this view.
If view have no explicit `self.queryset`, tries too lookup to
`viewflow.get_queryset`
"""
if self.queryset is None and self.viewset is not None:
if hasattr(self.viewset, 'get_queryset'):
return self.viewset.get_queryset(self.request)
return super(DetailModelView, self).get_queryset()
def get_object_data(self):
"""List of object fields to display.
Choice fields values are expanded to readable choice label.
"""
for field in self.object._meta.fields:
if isinstance(field, models.AutoField):
continue
elif field.auto_created:
continue
else:
choice_display_attr = "get_{}_display".format(field.name)
if hasattr(self.object, choice_display_attr):
value = getattr(self.object, choice_display_attr)()
else:
value = getattr(self.object, field.name)
if value is not None:
yield (field.verbose_name.title(), value)
def has_view_permission(self, request, obj):
"""Object view permission check.
If view had a `viewset`, the `viewset.has_view_permission` used.
"""
if self.viewset is not None:
return self.viewset.has_view_permission(request, obj)
# default lookup for the django permission
opts = self.model._meta
codename = get_permission_codename('view', opts)
view_perm = '{}.{}'.format(opts.app_label, codename)
if request.user.has_perm(view_perm):
return True
elif request.user.has_perm(view_perm, obj=obj):
return True
return self.has_change_permission(request, obj=obj)
def has_change_permission(self, request, obj):
"""Object chane permission check.
If view had a `viewset`, the `viewset.has_change_permission` used.
If true, view will show `Change` link to the Change view.
"""
if self.viewset is not None:
return self.viewset.has_change_permission(request, obj)
# default lookup for the django permission
opts = self.model._meta
codename = get_permission_codename('change', opts)
change_perm = '{}.{}'.format(opts.app_label, codename)
if request.user.has_perm(change_perm):
return True
return request.user.has_perm(change_perm, obj=obj)
def has_delete_permission(self, request, obj):
"""Object delete permission check.
If true, view will show `Delete` link to the Delete view.
"""
if self.viewset is not None:
return self.viewset.has_delete_permission(request, obj)
# default lookup for the django permission
opts = self.model._meta
codename = get_permission_codename('delete', opts)
delete_perm = '{}.{}'.format(opts.app_label, codename)
if request.user.has_perm(delete_perm):
return True
return request.user.has_perm(delete_perm, obj=obj)
def get_object(self):
"""Retrieve the object.
Check object view permission at the same time.
"""
queryset = self.get_queryset()
model = queryset.model
pk = self.kwargs.get(self.pk_url_kwarg)
if pk is not None:
try:
self.kwargs[self.pk_url_kwarg] = model._meta.pk.to_python(pk)
except (ValidationError, ValueError):
raise Http404
obj = super(DetailModelView, self).get_object()
if not self.has_view_permission(self.request, obj):
raise PermissionDenied
return obj
def get_context_data(self, **kwargs):
"""Additional context data for detail view.
:keyword object_data: List of fields and values of the object
:keyword change_url: Link to the change view
:keyword delete_url: Link to the delete view
"""
opts = self.model._meta
kwargs['object_data'] = self.get_object_data()
if self.has_change_permission(self.request, self.object):
kwargs['change_url'] = reverse(
'{}:{}_change'.format(opts.app_label, opts.model_name),
args=[self.object.pk])
if self.has_delete_permission(self.request, self.object):
kwargs['delete_url'] = reverse(
'{}:{}_delete'.format(opts.app_label, opts.model_name),
args=[self.object.pk])
return super(DetailModelView, self).get_context_data(**kwargs)
def get_template_names(self):
"""
List of templates for the view.
If no `self.template_name` defined, returns::
[<app_label>/<model_label>_detail.html
'material/frontend/views/detail.html']
"""
if self.template_name is None:
opts = self.model._meta
return [
'{}/{}{}.html'.format(
opts.app_label,
opts.model_name,
self.template_name_suffix),
'material/frontend/views/detail.html',
]
return [self.template_name]
|
[
"[email protected]"
] | |
f0177181db3771cd7db55d41e5558003f6dee2c1
|
478de38a95c2729ee2ef8c77b1c5a81f23aedb59
|
/Programming-Algorithm/Factorial Trailing Zeroes.py
|
b0bbec923fa02afbe6ed300d334715b8a0dfae98
|
[] |
no_license
|
shuzhancnjx/leetcode-
|
0c711f720ef653ddff2af3af697a453122c28403
|
12093c92ef33707ad8ccdd59ad040c04cad1ee3b
|
refs/heads/master
| 2020-12-24T16:32:25.976747 | 2016-03-03T15:36:44 | 2016-03-03T15:36:44 | 37,101,621 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 419 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 24 17:00:01 2015
@author: ZSHU
"""
"""
a simple algorithm based on the wiki.
"""
class Solution(object):
def trailingZeroes(self, n):
"""
:type n: int
:rtype: int
"""
res=0
for i in xrange(1, n):
if 5**i<=n:
res+= (n/5**i)
else:
return res
return res
|
[
"[email protected]"
] | |
bc7cd024c8017f602b40f68ae8f58135a8d780cf
|
df1306cdc82ccbe730d77d78740004abc10bb492
|
/src/main/python/config.py
|
d0b40c696caac30b756c79c5e59af50081cb0bc7
|
[] |
no_license
|
richburdon/flask-demo
|
cc656c3c750977c8ee2a312554fda146d83919d3
|
16f346f77af7824807e8e30ed9c92ceab55cfa82
|
refs/heads/master
| 2020-03-27T23:17:30.220307 | 2015-08-02T18:07:10 | 2015-08-02T18:07:10 | 38,577,472 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 438 |
py
|
#
# Copyright 2015 Alien Laboratories, Inc.
#
import flask
import flask.views
from injector import Key, Module, inject, singleton
CONFIG = Key('configuration')
@singleton
@inject(app=flask.Flask)
class ConfigModule(Module):
def configure(self, binder):
binder.bind(CONFIG, {
'app': {
'name': 'Demo'
},
'client': {
'debug': True
}
})
|
[
"EMAIL"
] |
EMAIL
|
1890b3225e9fecb70d999a6c4e8c5668902b71fc
|
ce196aba0adde47ea2767eae1d7983a1ef548bb8
|
/txtFile_覆盖式生成文件3_姓名生成.py
|
81646a132236fad56879debd3ae1c06fee1131f6
|
[] |
no_license
|
xiang-daode/Python3_codes
|
5d2639ffd5d65065b98d029e79b8f3608a37cf0b
|
06c64f85ce2c299aef7f9311e9473e0203a05b09
|
refs/heads/main
| 2023-08-30T14:59:55.123128 | 2021-11-03T05:12:24 | 2021-11-03T05:12:24 | 333,632,892 | 0 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 326 |
py
|
# txtFile_覆盖式生成文件:
import random
f = "myTest.txt"
a =range(200)
with open(f,"w") as file: #"w"代表着每次运行都覆盖内容
for m in a:
rnd1=int(20902*(random.random()))
rnd2=int(20902*(random.random()))
file.write(str(m)+":\t 项"+chr(0x4e00+rnd1)+chr(0x4e00+rnd2)+"\n")
|
[
"[email protected]"
] | |
9d2c2c7b6917bdaaba2e101d20bd9c5cd9fd32c0
|
f58e6240965d2d3148e124dcbdcd617df879bb84
|
/tensorflow_datasets/core/community/load.py
|
2718ecb5588a9c29696143b81ddbd0feec48c6cd
|
[
"Apache-2.0"
] |
permissive
|
suvarnak/datasets
|
b3f5913cece5c3fe41ec0dde6401a6f37bfd9303
|
3a46548d0c8c83b2256e5abeb483137bd549a4c1
|
refs/heads/master
| 2022-09-27T03:38:20.430405 | 2022-07-22T15:21:33 | 2022-07-22T15:27:07 | 176,061,377 | 0 | 0 |
Apache-2.0
| 2019-03-17T05:45:33 | 2019-03-17T05:45:32 | null |
UTF-8
|
Python
| false | false | 2,465 |
py
|
# coding=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils to load community datasets."""
import importlib
import sys
from typing import Type
from tensorflow_datasets.core import dataset_builder
from tensorflow_datasets.core import registered
from tensorflow_datasets.core.community import huggingface_wrapper
def builder_cls_from_module(
module_name: str,) -> Type[dataset_builder.DatasetBuilder]:
"""Imports the module and extract the `tfds.core.DatasetBuilder`.
Args:
module_name: Dataset module to import containing the dataset definition
(e.g. `tensorflow_datasets.image.mnist.mnist`)
Returns:
The extracted tfds.core.DatasetBuilder builder class.
"""
if module_name not in sys.modules: # Module already imported
# Module can be created during execution, so call invalidate_caches() to
# make sure the new module is noticed by the import system.
importlib.invalidate_caches()
# Executing the module will register the datasets in _MODULE_TO_DATASETS.
with registered.skip_registration(),\
huggingface_wrapper.mock_huggingface_import():
importlib.import_module(module_name)
# TODO(tfds): For community-installed modules, we should raise cleaner
# error if there is additional missing dependency. E.g. Parsing all
# import statements. Or wrap this `importlib.import_module` within a
# `with lazy_imports():` context manager ?
builder_classes = registered._MODULE_TO_DATASETS.get(module_name, []) # pylint: disable=protected-access
if len(builder_classes) != 1:
raise ValueError(
f'Could not load DatasetBuilder from: {module_name}. '
'Make sure the module only contains a single `DatasetBuilder`.\n'
'If no dataset is detected, make sure that all abstractmethods are '
'implemented.\n'
f'Detected builders: {builder_classes}')
return builder_classes[0]
|
[
"[email protected]"
] | |
415268d2f1f79abf144496915ce9c4b774eb9e79
|
147e022b38e05fb2f6967aa4d5e50816221f8bf5
|
/matches/middleware.py
|
e4dbf0afe8f40b927f4991ce1b29b5f4ad73d44c
|
[] |
no_license
|
Shirhussain/Meet-your-match
|
a542f0fdcab573ba70740bfbd8d2bb6c2603bdc9
|
fd9a8beabe7288aca6fae07f1a7bc0b68c0223a8
|
refs/heads/main
| 2023-02-25T20:30:22.647930 | 2021-02-04T08:02:05 | 2021-02-04T08:02:05 | 332,492,486 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,316 |
py
|
from django.contrib import messages
from django.urls import reverse
from django.conf import settings
from django.shortcuts import HttpResponseRedirect
URLS = [reverse(url) for url in settings.SUBSCRIPTION_REQUIRED_URLS]
class CheckMembership:
def __init__(self, get_response):
self.get_response = get_response
# One-time configuration and initialization.
def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
response = self.get_response(request)
# Code to be executed for each request/response after
# the view is called.
return response
def process_view(self, request, view_func, *view_args, **view_kwargs):
if request.user.is_authenticated:
# messages.success(request, "user is logged in ")
if request.path in URLS:
role = request.user.userrole
if str(role) == "Regular":
messages.success(request, f"you need to upgrade your membership plan to see that, your rol is: {role}")
return HttpResponseRedirect(reverse("home"))
else:
# messages.error(request, "user is not logged in")
print("not logged in horraaaaaaaaaaaaaaaaaaaaaa")
|
[
"[email protected]"
] | |
e67e63767b785e8631555fc27647b748c11857c0
|
f86f62734c167e127f5f6a87b77dbf18d4b9ffb5
|
/pandas_ta/core.py
|
9c953b7bc6357dcf855a17784a32d32a0aaec08b
|
[
"MIT"
] |
permissive
|
jingmouren/pandas-ta
|
dcb0291d9c665af5361e3e84cb4ae865436cb455
|
20c6c5172a64540264d9d5192df05a37f214cf87
|
refs/heads/master
| 2020-06-17T19:31:51.318930 | 2019-06-13T15:02:05 | 2019-06-13T15:02:05 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 41,794 |
py
|
# -*- coding: utf-8 -*-
import time
import pandas as pd
from pandas.core.base import PandasObject
from .utils import *
class BasePandasObject(PandasObject):
"""Simple PandasObject Extension
Ensures the DataFrame is not empty and has columns.
Args:
df (pd.DataFrame): Extends Pandas DataFrame
"""
def __init__(self, df, **kwargs):
if df.empty: return
if len(df.columns) > 0:
self._df = df
else:
raise AttributeError(f" [X] No columns!")
def __call__(self, kind, *args, **kwargs):
raise NotImplementedError()
@pd.api.extensions.register_dataframe_accessor('ta')
class AnalysisIndicators(BasePandasObject):
"""AnalysisIndicators is class that extends the Pandas DataFrame via
Pandas @pd.api.extensions.register_dataframe_accessor('name') decorator.
This Pandas Extension is named 'ta' for Technical Analysis that allows us
to apply technical indicators with an one extension. Even though 'ta' is
now a Pandas DataFrame Extension, you can still call the Indicators
individually. However many of the Indicators have been updated and new ones
added, so make sure to check help.
By default the 'ta' extensions uses lower case column names: open, high,
low, close, and volume. You can override the defaults but providing the
it's replacement name when calling the indicator. For example, to call the
indicator hl2().
With 'default' columns: open, high, low, close, and volume.
>>> df.ta.hl2()
>>> df.ta(kind='hl2')
With DataFrame columns: Open, High, Low, Close, and Volume.
>>> df.ta.hl2(high='High', low='Low')
>>> df.ta(kind='hl2', high='High', low='Low')
Args:
kind (str, optional): Default: None. Name of the indicator. Converts
kind to lowercase before calling.
timed (bool, optional): Default: False. Curious about the execution
speed? Well it's not ground breaking, but you can enable with True.
kwargs: Extension specific modifiers.
append (bool, optional): Default: False. When True, it appends to
result column(s) of the indicator onto the DataFrame.
Returns:
Most Indicators will return a Pandas Series. Others like MACD, BBANDS,
KC, et al will return a Pandas DataFrame. Ichimoku on the other hand
will return two DataFrames, the Ichimoku DataFrame for the known period
and a Span DataFrame for the future of the Span values.
Let's get started!
1. Loading the 'ta' module:
>>> import pandas as pd
>>> import ta as ta
2. Load some data:
>>> df = pd.read_csv('AAPL.csv', index_col='date', parse_dates=True)
3. Help!
3a. General Help:
>>> help(df.ta)
>>> df.ta()
3a. Indicator Help:
>>> help(ta.apo)
3b. Indicator Extension Help:
>>> help(df.ta.apo)
4. Ways of calling an indicator.
4a. Calling just the MACD indicator without 'ta' DataFrame extension.
>>> ta.apo(df['close'])
4b. Calling just the MACD indicator with 'ta' DataFrame extension.
>>> df.ta.apo()
4c. Calling using kind.
>>> df.ta(kind='apo')
5. Working with kwargs
5a. Append the result to the working df.
>>> df.ta.apo(append=True)
5b. Timing an indicator.
>>> apo = df.ta(kind='apo', timed=True)
>>> print(apo.timed)
"""
def __call__(self, kind=None, alias=None, timed=False, **kwargs):
try:
if isinstance(kind, str):
kind = kind.lower()
fn = getattr(self, kind)
if timed:
stime = time.time()
# Run the indicator
indicator = fn(**kwargs)
if timed:
time_diff = time.time() - stime
ms = time_diff * 1000
indicator.timed = f"{ms:2.3f} ms ({time_diff:2.3f} s)"
# print(f"execution time: {indicator.timed}")
# Add an alias if passed
if alias:
indicator.alias = f"{alias}"
return indicator
else:
self.help()
except:
self.help()
def _append(self, result=None, **kwargs):
"""Appends a Pandas Series or DataFrame columns to self._df."""
if 'append' in kwargs and kwargs['append']:
df = self._df
if df is None or result is None: return
else:
if isinstance(result, pd.DataFrame):
for i, column in enumerate(result.columns):
df[column] = result.iloc[:,i]
else:
df[result.name] = result
def _get_column(self, series, default):
"""Attempts to get the correct series or 'column' and return it."""
df = self._df
if df is None: return
# Explicit passing a pd.Series to override default.
if isinstance(series, pd.Series):
return series
# Apply default if no series nor a default.
elif series is None or default is None:
return df[default]
# Ok. So it's a str.
elif isinstance(series, str):
# Return the df column since it's in there.
if series in df.columns:
return df[series]
else:
# Attempt to match the 'series' because it was likely misspelled.
matches = df.columns.str.match(series, case=False)
match = [i for i, x in enumerate(matches) if x]
# If found, awesome. Return it or return the 'series'.
cols = ', '.join(list(df.columns))
NOT_FOUND = f" [X] Ooops!!!: It's {series not in df.columns}, the series '{series}' not in {cols}"
return df.iloc[:,match[0]] if len(match) else print(NOT_FOUND)
def constants(self, apply, lower_bound=-100, upper_bound=100, every=1):
"""Constants
Useful for indicator levels or if you need some constant value.
Add constant '1' to the DataFrame
>>> df.ta.constants(True, 1, 1, 1)
Remove constant '1' to the DataFrame
>>> df.ta.constants(False, 1, 1, 1)
Adding constants that range of constants from -4 to 4 inclusive
>>> df.ta.constants(True, -4, 4, 1)
Removing constants that range of constants from -4 to 4 inclusive
>>> df.ta.constants(False, -4, 4, 1)
Args:
apply (bool): Default: None. If True, appends the range of constants to the
working DataFrame. If False, it removes the constant range from the working
DataFrame.
lower_bound (int): Default: -100. Lowest integer for the constant range.
upper_bound (int): Default: 100. Largest integer for the constant range.
every (int): Default: 10. How often to include a new constant.
Returns:
Returns nothing to the user. Either adds or removes constant ranges from the
working DataFrame.
"""
levels = [x for x in range(lower_bound, upper_bound + 1) if x % every == 0]
if apply:
for x in levels:
self._df[f'{x}'] = x
else:
for x in levels:
del self._df[f'{x}']
def indicators(self, **kwargs):
"""Indicator list"""
header = f"pandas.ta - Technical Analysis Indicators"
helper_methods = ['indicators', 'constants'] # Public non-indicator methods
exclude_methods = kwargs.pop('exclude', None)
as_list = kwargs.pop('as_list', False)
ta_indicators = list((x for x in dir(pd.DataFrame().ta) if not x.startswith('_') and not x.endswith('_')))
for x in helper_methods:
ta_indicators.remove(x)
if isinstance(exclude_methods, list) and exclude_methods in ta_indicators and len(exclude_methods) > 0:
for x in exclude_methods:
ta_indicators.remove(x)
if as_list:
return ta_indicators
total_indicators = len(ta_indicators)
s = f"{header}\nTotal Indicators: {total_indicators}\n"
if total_indicators > 0:
abbr_list = ', '.join(ta_indicators)
print(f"{s}Abbreviations:\n {abbr_list}")
else:
print(s)
# Momentum Indicators
def ao(self, high=None, low=None, fast=None, slow=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
from .momentum.ao import ao
result = ao(high=high, low=low, fast=fast, slow=slow, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def apo(self, close=None, fast=None, slow=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.apo import apo
result = apo(close=close, fast=fast, slow=slow, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def bop(self, open_=None, high=None, low=None, close=None, percentage=False, offset=None, **kwargs):
open_ = self._get_column(open_, 'open')
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from .momentum.bop import bop
result = bop(open_=open_, high=high, low=low, close=close, percentage=percentage, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def cci(self, high=None, low=None, close=None, length=None, c=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from .momentum.cci import cci
result = cci(high=high, low=low, close=close, length=length, c=c, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def cg(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.cg import cg
result = cg(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def cmo(self, close=None, length=None, drift=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.cmo import cmo
result = cmo(close=close, length=length, drift=drift, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def coppock(self, close=None, length=None, fast=None, slow=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.coppock import coppock
result = coppock(close=close, length=length, fast=fast, slow=slow, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def fisher(self, high=None, low=None, length=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
from .momentum.fisher import fisher
result = fisher(high=high, low=low, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def kst(self, close=None, roc1=None, roc2=None, roc3=None, roc4=None, sma1=None, sma2=None, sma3=None, sma4=None, signal=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.kst import kst
result = kst(close=close, roc1=roc1, roc2=roc2, roc3=roc3, roc4=roc4, sma1=sma1, sma2=sma2, sma3=sma3, sma4=sma4, signal=signal, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def macd(self, close=None, fast=None, slow=None, signal=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.macd import macd
result = macd(close=close, fast=fast, slow=slow, signal=signal, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def mom(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.mom import mom
result = mom(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def ppo(self, close=None, fast=None, slow=None, percentage=True, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.ppo import ppo
result = ppo(close=close, fast=fast, slow=slow, percentage=percentage, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def roc(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.roc import roc
result = roc(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def rsi(self, close=None, length=None, drift=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.rsi import rsi
result = rsi(close=close, length=length, drift=drift, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def slope(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.slope import slope
result = slope(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def stoch(self, high=None, low=None, close=None, fast_k=None, slow_k=None, slow_d=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from .momentum.stoch import stoch
result = stoch(high=high, low=low, close=close, fast_k=fast_k, slow_k=slow_k, slow_d=slow_d, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def trix(self, close=None, length=None, drift=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.trix import trix
result = trix(close=close, length=length, drift=drift, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def tsi(self, close=None, fast=None, slow=None, drift=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .momentum.tsi import tsi
result = tsi(close=close, fast=fast, slow=slow, drift=drift, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def uo(self, high=None, low=None, close=None, fast=None, medium=None, slow=None, fast_w=None, medium_w=None, slow_w=None, drift=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from .momentum.uo import uo
result = uo(high=high, low=low, close=close, fast=fast, medium=medium, slow=slow, fast_w=fast_w, medium_w=medium_w, slow_w=slow_w, drift=drift, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def willr(self, high=None, low=None, close=None, length=None, percentage=True, offset=None,**kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from .momentum.willr import willr
result = willr(high=high, low=low, close=close, length=length, percentage=percentage, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
# Overlap Indicators
def dema(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.dema import dema
result = dema(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def ema(self, close=None, length=None, offset=None, adjust=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.ema import ema
result = ema(close=close, length=length, offset=offset, adjust=adjust, **kwargs)
self._append(result, **kwargs)
return result
def fwma(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.fwma import fwma
result = fwma(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def hl2(self, high=None, low=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
from .overlap.hl2 import hl2
result = hl2(high=high, low=low, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def hlc3(self, high=None, low=None, close=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from .overlap.hlc3 import hlc3
result = hlc3(high=high, low=low, close=close, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def hma(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.hma import hma
result = hma(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def kama(self, close=None, length=None, fast=None, slow=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.kama import kama
result = kama(close=close, length=length, fast=fast, slow=slow, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def ichimoku(self, high=None, low=None, close=None, tenkan=None, kijun=None, senkou=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from .overlap.ichimoku import ichimoku
result, span = ichimoku(high=high, low=low, close=close, tenkan=tenkan, kijun=kijun, senkou=senkou, offset=offset, **kwargs)
self._append(result, **kwargs)
return result, span
def linreg(self, close=None, length=None, offset=None, adjust=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.linreg import linreg
result = linreg(close=close, length=length, offset=offset, adjust=adjust, **kwargs)
self._append(result, **kwargs)
return result
def midpoint(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.midpoint import midpoint
result = midpoint(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def midprice(self, high=None, low=None, length=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
from .overlap.midprice import midprice
result = midprice(high=high, low=low, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def ohlc4(self, open_=None, high=None, low=None, close=None, offset=None, **kwargs):
open_ = self._get_column(open_, 'open')
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from .overlap.ohlc4 import ohlc4
result = ohlc4(open_=open_, high=high, low=low, close=close, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def pwma(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.pwma import pwma
result = pwma(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def rma(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.rma import rma
result = rma(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def sma(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.sma import sma
result = sma(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def swma(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.swma import swma
result = swma(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def t3(self, close=None, length=None, a=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.t3 import t3
result = t3(close=close, length=length, a=a, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def tema(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.tema import tema
result = tema(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def trima(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.trima import trima
result = trima(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def vwap(self, high=None, low=None, close=None, volume=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from .overlap.vwap import vwap
result = vwap(high=high, low=low, close=close, volume=volume, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def vwma(self, close=None, volume=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from .overlap.vwma import vwma
result = vwma(close=close, volume=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def wma(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.wma import wma
result = wma(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def zlma(self, close=None, length=None, offset=None, mamode=None, **kwargs):
close = self._get_column(close, 'close')
from .overlap.zlma import zlma
result = zlma(close=close, length=length, offset=offset, mamode=mamode, **kwargs)
self._append(result, **kwargs)
return result
# Performance Indicators
def log_return(self, close=None, length=None, cumulative=False, percent=False, offset=None, **kwargs):
close = self._get_column(close, 'close')
from pandas_ta.performance.log_return import log_return
result = log_return(close=close, length=length, cumulative=cumulative, percent=percent, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def percent_return(self, close=None, length=None, cumulative=False, percent=False, offset=None, **kwargs):
close = self._get_column(close, 'close')
from pandas_ta.performance.percent_return import percent_return
result = percent_return(close=close, length=length, cumulative=cumulative, percent=percent, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def trend_return(self, close=None, trend=None, log=None, cumulative=None, offset=None, trend_reset=None, **kwargs):
close = self._get_column(close, 'close')
trend = self._get_column(trend, f"{trend}")
from pandas_ta.performance.trend_return import trend_return
result = trend_return(close=close, trend=trend, log=log, cumulative=cumulative, offset=offset, trend_reset=trend_reset, **kwargs)
self._append(result, **kwargs)
return result
# Statistics Indicators
def kurtosis(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .statistics.kurtosis import kurtosis
result = kurtosis(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def mad(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .statistics.mad import mad
result = mad(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def median(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .statistics.median import median
result = median(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def quantile(self, close=None, length=None, q=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .statistics.quantile import quantile
result = quantile(close=close, length=length, q=q, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def skew(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .statistics.skew import skew
result = skew(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def stdev(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .statistics.stdev import stdev
result = stdev(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def variance(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .statistics.variance import variance
result = variance(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def zscore(self, close=None, length=None, std=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .statistics.zscore import zscore
result = zscore(close=close, length=length, std=std, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
# Trend Indicators
def adx(self, high=None, low=None, close=None, drift=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from .trend.adx import adx
result = adx(high=high, low=low, close=close, drift=drift, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def amat(self, close=None, fast=None, slow=None, mamode=None, lookback=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .trend.amat import amat
result = amat(close=close, fast=fast, slow=slow, mamode=mamode, lookback=lookback, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def aroon(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .trend.aroon import aroon
result = aroon(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def decreasing(self, close=None, length=None, asint=True, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .trend.decreasing import decreasing
result = decreasing(close=close, length=length, asint=asint, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def dpo(self, close=None, length=None, centered=True, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .trend.dpo import dpo
result = dpo(close=close, length=length, centered=centered, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def increasing(self, close=None, length=None, asint=True, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .trend.increasing import increasing
result = increasing(close=close, length=length, asint=asint, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def linear_decay(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from .trend.linear_decay import linear_decay
result = linear_decay(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def long_run(self, fast=None, slow=None, length=None, offset=None, **kwargs):
if fast is None and slow is None: return self._df
else:
fast = self._get_column(fast, f"{fast}")
slow = self._get_column(slow, f"{slow}")
from .trend.long_run import long_run
result = long_run(fast=fast, slow=slow, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def qstick(self, open_=None, close=None, length=None, offset=None, **kwargs):
open_ = self._get_column(open_, 'open')
close = self._get_column(close, 'close')
from .trend.qstick import qstick
result = qstick(open_=open_, close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def short_run(self, fast=None, slow=None, length=None, offset=None, **kwargs):
if fast is None and slow is None: return self._df
else:
fast = self._get_column(fast, f"{fast}")
slow = self._get_column(slow, f"{slow}")
from .trend.short_run import short_run
result = short_run(fast=fast, slow=slow, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def vortex(self, high=None, low=None, close=None, drift=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from .trend.vortex import vortex
result = vortex(high=high, low=low, close=close, drift=drift, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
# Utility Indicators
def cross(self, a=None, b=None, above=True, asint=True, offset=None, **kwargs):
if a is None and b is None: return self._df
else:
a = self._get_column(a, f"{a}")
b = self._get_column(b, f"{b}")
result = cross(series_a=a, series_b=b, above=above, asint=asint, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
# Volatility Indicators
def accbands(self, high=None, low=None, close=None, length=None, c=None, mamode=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from pandas_ta.volatility.accbands import accbands
result = accbands(high=high, low=low, close=close, length=length, c=c, mamode=mamode, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def atr(self, high=None, low=None, close=None, length=None, mamode=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from pandas_ta.volatility.atr import atr
result = atr(high=high, low=low, close=close, length=length, mamode=mamode, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def bbands(self, close=None, length=None, stdev=None, mamode=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from pandas_ta.volatility.bbands import bbands
result = bbands(close=close, length=length, stdev=stdev, mamode=mamode, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def donchian(self, close=None, length=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
from pandas_ta.volatility.donchian import donchian
result = donchian(close=close, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def kc(self, high=None, low=None, close=None, length=None, scalar=None, mamode=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from pandas_ta.volatility.kc import kc
result = kc(high=high, low=low, close=close, length=length, scalar=scalar, mamode=mamode, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def massi(self, high=None, low=None, fast=None, slow=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
from pandas_ta.volatility.massi import massi
result = massi(high=high, low=low, fast=fast, slow=slow, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def natr(self, high=None, low=None, close=None, length=None, mamode=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from pandas_ta.volatility.natr import natr
result = natr(high=high, low=low, close=close, length=length, mamode=mamode, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def true_range(self, high=None, low=None, close=None, drift=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
from pandas_ta.volatility.true_range import true_range
result = true_range(high=high, low=low, close=close, drift=drift, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
# Volume Indicators
def ad(self, high=None, low=None, close=None, volume=None, open_=None, signed=True, offset=None, **kwargs):
if open_ is not None:
open_ = self._get_column(open_, 'open')
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.ad import ad
result = ad(high=high, low=low, close=close, volume=volume, open_=open_, signed=signed, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def adosc(self, high=None, low=None, close=None, volume=None, open_=None, fast=None, slow=None, signed=True, offset=None, **kwargs):
if open_ is not None:
open_ = self._get_column(open_, 'open')
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.adosc import adosc
result = adosc(high=high, low=low, close=close, volume=volume, open_=open_, fast=fast, slow=slow, signed=signed, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def aobv(self, close=None, volume=None, fast=None, slow=None, mamode=None, max_lookback=None, min_lookback=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.aobv import aobv
result = aobv(close=close, volume=volume, fast=fast, slow=slow, mamode=mamode, max_lookback=max_lookback, min_lookback=min_lookback, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def cmf(self, high=None, low=None, close=None, volume=None, open_=None, length=None, offset=None, **kwargs):
if open_ is not None:
open_ = self._get_column(open_, 'open')
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.cmf import cmf
result = cmf(high=high, low=low, close=close, volume=volume, open_=open_, length=length, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def efi(self, close=None, volume=None, length=None, mamode=None, offset=None, drift=None, **kwargs):
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.efi import efi
result = efi(close=close, volume=volume, length=length, offset=offset, mamode=mamode, drift=drift, **kwargs)
self._append(result, **kwargs)
return result
def eom(self, high=None, low=None, close=None, volume=None, length=None, divisor=None, offset=None, drift=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.eom import eom
result = eom(high=high, low=low, close=close, volume=volume, length=length, divisor=divisor, offset=offset, drift=drift, **kwargs)
self._append(result, **kwargs)
return result
def mfi(self, high=None, low=None, close=None, volume=None, length=None, drift=None, offset=None, **kwargs):
high = self._get_column(high, 'high')
low = self._get_column(low, 'low')
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.mfi import mfi
result = mfi(high=high, low=low, close=close, volume=volume, length=length, drift=drift, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def nvi(self, close=None, volume=None, length=None, initial=None, signed=True, offset=None, **kwargs):
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.nvi import nvi
result = nvi(close=close, volume=volume, length=length, initial=initial, signed=signed, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def obv(self, close=None, volume=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.obv import obv
result = obv(close=close, volume=volume, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def pvi(self, close=None, volume=None, length=None, initial=None, signed=True, offset=None, **kwargs):
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.pvi import pvi
result = pvi(close=close, volume=volume, length=length, initial=initial, signed=signed, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def pvol(self, close=None, volume=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.pvol import pvol
result = pvol(close=close, volume=volume, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def pvt(self, close=None, volume=None, offset=None, **kwargs):
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.pvt import pvt
result = pvt(close=close, volume=volume, offset=offset, **kwargs)
self._append(result, **kwargs)
return result
def vp(self, close=None, volume=None, width=None, percent=None, **kwargs):
close = self._get_column(close, 'close')
volume = self._get_column(volume, 'volume')
from pandas_ta.volume.vp import vp
return vp(close=close, volume=volume, width=width, percent=percent, **kwargs)
|
[
"[email protected]"
] | |
c8aff798a0c522cfd15ef26a74275128a8d4fc8a
|
e298bf40ae88c2bd8e0a07f3e92f3e08a92edcc6
|
/keystoneauth1/fixture/keystoneauth_betamax.py
|
724d217d4b7bddbb6cda5bc1c31935af4f359787
|
[] |
no_license
|
KevinKaiQian/polar-bear
|
46a814c746246394f76505846166673a049f12f2
|
61d4e0ccd7328a6aa543af3b75e5f7fedf98bf8e
|
refs/heads/master
| 2022-04-29T02:15:35.536039 | 2021-05-19T12:33:07 | 2021-05-19T12:33:07 | 172,068,536 | 2 | 0 | null | 2022-03-29T21:56:51 | 2019-02-22T13:11:58 |
Python
|
UTF-8
|
Python
| false | false | 2,976 |
py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A fixture to wrap the session constructor for use with Betamax."""
from functools import partial
import betamax
import fixtures
import mock
import requests
from keystoneauth1.fixture import hooks
from keystoneauth1.fixture import serializer as yaml_serializer
from keystoneauth1 import session
class BetamaxFixture(fixtures.Fixture):
def __init__(self, cassette_name, cassette_library_dir=None,
serializer=None, record=False,
pre_record_hook=hooks.pre_record_hook):
self.cassette_library_dir = cassette_library_dir
self.record = record
self.cassette_name = cassette_name
if not serializer:
serializer = yaml_serializer.YamlJsonSerializer
self.serializer = serializer
betamax.Betamax.register_serializer(serializer)
self.pre_record_hook = pre_record_hook
def setUp(self):
super(BetamaxFixture, self).setUp()
self.mockpatch = mock.patch.object(
session, '_construct_session',
partial(_construct_session_with_betamax, self))
self.mockpatch.start()
# Unpatch during cleanup
self.addCleanup(self.mockpatch.stop)
def _construct_session_with_betamax(fixture, session_obj=None):
# NOTE(morganfainberg): This function should contain the logic of
# keystoneauth1.session._construct_session as it replaces the
# _construct_session function to apply betamax magic to the requests
# session object.
if not session_obj:
session_obj = requests.Session()
# Use TCPKeepAliveAdapter to fix bug 1323862
for scheme in list(session_obj.adapters.keys()):
session_obj.mount(scheme, session.TCPKeepAliveAdapter())
with betamax.Betamax.configure() as config:
config.before_record(callback=fixture.pre_record_hook)
fixture.recorder = betamax.Betamax(
session_obj, cassette_library_dir=fixture.cassette_library_dir)
record = 'none'
serializer = None
if fixture.record in ['once', 'all', 'new_episodes']:
record = fixture.record
if fixture.serializer:
serializer = fixture.serializer.name
fixture.recorder.use_cassette(fixture.cassette_name,
serialize_with=serializer,
record=record)
fixture.recorder.start()
fixture.addCleanup(fixture.recorder.stop)
return session_obj
|
[
"[email protected]"
] | |
84751391d9a0d6fb8e19ee5ccd599e6882cd26df
|
40e7156576ad93db1f0dcab62ec3bb6042576166
|
/Termux/python/Mergeall/test/ziptools/zip-extract.py
|
dc8a5fd4d5ae165ebb55f3e6dd9fdcb43e83d452
|
[] |
no_license
|
WeilerWebServices/Gists
|
c5d12093d620abc8152e8e8214a2000832969421
|
506fae2f3f9568ecd73ba373f35ac5fda054520e
|
refs/heads/master
| 2023-02-17T18:00:07.721504 | 2021-01-11T02:19:44 | 2021-01-11T02:19:44 | 272,584,650 | 2 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,823 |
py
|
#!/usr/bin/python
"""
=============================================================================
Command-line ziptools wrapper and client (for Python 3.X or 2.X).
Extract a zip file, with:
<python> zip-extract.py [zipfile [unzipto] [-nofixlinks]]
Where:
"zipfile" is the pathname of an existing zipfile (a ".zip" is appended to
the end of this if missing)
"unzipto" is the pathname of a possibly-existing folder where all unzipped
items will be stored (the default is ".", the current working directory)
"-nofixlinks", if used, prevents symbolic-link path separators from being
adjusted for the local platform (else they are, to make links portable)
Arguments are input at console prompts if not listed on the command line.
The script's output lists for each item both zipfile (from) and extracted
(to) name, the latter after a "=>" on a new line.
<python> is your platform's optional Python identifier string. It may be
"python", "python3", or an alias on Unix; and "python", "py -3", or "py"
on Windows. It can also be omitted on Windows (to use a default), and on
Unix given executable permission for this script (e.g., post "chmod +x").
Some frozen app/executable packages may also omit <python>; see your docs.
The "unzipto" folder is created automatically if needed, but is cleaned
of its contents before the extract only if using interactive-prompts
mode here and cleaning is confirmed. Neither the base extract function
nor non-interactive mode here do any such cleaning. Remove the unzipto
folder's contents manually if needed before running this script.
Caution: cleaning may not make sense for ".", the current working dir.
This case is verified with prompts in interactive mode only, but that
is the only context in which auto-cleaning occurs.
Examples:
python zip-extract.py # input args
python zip-extract.py tests.zip # unzip to '.'
python zip-extract.py download.zip dirpath # unzip to other dir
python zip-extract.py dev.zip . -nofixlinks # don't adjust links
ABOUT LINKS AND OTHER FILE TYPES:
For symbolic links to both files and dirs, the ziptools package either
zips links themselves (by default), or the items they refer to (upon
request); this extract simply recreates whatever was added to the zip.
FIFOs and other exotica are never zipped or unzipped.
To make links more portable, path separators in link paths are automatically
agjusted for the hosting platform by default (e.g., '/' becomes '\' on
Windows); use "-nofixlinks" (which can appear anywhere on the command line)
to suppress this if you are unzipping on one platform for use on another.
See ziptools.py's main docstring for more details.
ABOUT TARGET PATHS:
For extracts, the Python zipfile module underlying this script discards
any special syntax in the archive's item names, including leading slashes,
Windows drive and UNC network names, and ".." up-references. The local
symlink adder parrots the same behavior.
Hence, paths that were either absolute, rooted in a drive or network, or
parent-relative at zip time become relative to (and are created in) the
"unzipto" path here. Items zipped as "dir0", "/dir1", "C:\dir2", and
"..\dir3" are extracted to "dir0", "dir1", "dir2", and "dir3" in "unzipto".
Technically, zipfile's write() removes leading slashes and drive and
network names (they won't be in the zipfile), and its extract() used
here removes everything special, including "..". Other zip tools may
store anything in a zipfile, and may or may not be as forgiving about
"..", but the -create and -extract scripts here work as a team.
Note that all top-level items in the zipfile are extracted as top-level
items in the "unzipto" folder. A zipfile that contains just files will
not create nested folders in "unzipto"; a zipfile with folders will.
ABOUT LARGE FILES:
Python's zipfile - and hence ziptools - handles files > ZIP64's 2G
size cutoff, both for zipping and unzipping. UNIX "unzip" may not.
See zip-create.py for more details.
CAVEAT: extracts here may not preserve UNIX permissions due to a Python
zipfile bug; see extractzipfile() in ziptools/ziptools.py for more details.
See zip-create.py for usage details on the zip-creation companion script.
See ziptools/ziptools.py's docstring for more on this script's utility.
=============================================================================
"""
import ziptools, sys, os
if sys.version[0] == '2':
input = raw_input # py 2.X compatibility
if len(sys.argv) >= 2: # 2 = script zipfile...
interactive = False
nofixlinks = False
if '-nofixlinks' in sys.argv: # anywhere in argv
nofixlinks = True
sys.argv.remove('-nofixlinks')
assert len(sys.argv) >= 2, 'Too few arguments'
zipfrom = sys.argv[1]
zipfrom += '' if zipfrom[-4:].lower() == '.zip' else '.zip'
unzipto = '.' if len(sys.argv) == 2 else sys.argv[2]
else:
interactive = True
zipfrom = input('Zip file to extract? ')
zipfrom += '' if zipfrom[-4:].lower() == '.zip' else '.zip'
unzipto = input('Folder to extract in (use . for here) ? ') or '.'
nofixlinks = input('Do not localize symlinks (y=yes)? ').lower() == 'y'
verify = input("About to UNZIP\n"
"\t%s,\n"
"\tto %s,\n"
"\t%socalizing any links\n"
"Confirm with 'y'? "
% (zipfrom, unzipto, 'not l' if nofixlinks else 'l'))
if verify.lower() != 'y':
input('Run cancelled.')
sys.exit(0)
if not os.path.exists(unzipto):
# no need to create here: zipfile.extract() does os.makedirs(unzipto)
pass
else:
# in interactive mode, offer to clean target folder (ziptools doesn't);
# removing only items to be written requires scanning the zipfile: pass;
if (interactive and
input('Clean target folder first (yes=y)? ').lower() == 'y'):
# okay, but really?
if (unzipto in ['.', os.getcwd()] and
input('Target = "." cwd - really clean (yes=y)? ').lower() != 'y'):
# a very bad thing to do silently!
pass
else:
# proceed with cleaning
for item in os.listdir(unzipto):
itempath = os.path.join(unzipto, item)
if os.path.isfile(itempath) or os.path.islink(itempath):
os.remove(ziptools.FWP(itempath))
elif os.path.isdir(itempath):
ziptools.tryrmtree(itempath)
# the zip bit
ziptools.extractzipfile(zipfrom, unzipto, nofixlinks)
if interactive and sys.platform.startswith('win'):
input('Press Enter to exit.') # stay up if clicked
|
[
"[email protected]"
] | |
73397cdefaa3f889a395f6445034301d0a731cbc
|
bde6ed092b7b29703737e11c5a5ff90934af3d74
|
/AtCoder/ABC/128/a.py
|
a9866e7e31e1d163626daae9c34501fc2c277bec
|
[] |
no_license
|
takecian/ProgrammingStudyLog
|
2ab7ea601e0996b3fa502b81ec141bc3772442b6
|
94485d131c0cc9842f1f4799da2d861dbf09b12a
|
refs/heads/master
| 2023-04-28T16:56:18.943574 | 2023-04-18T06:34:58 | 2023-04-18T06:34:58 | 128,525,713 | 4 | 0 | null | 2022-12-09T06:15:19 | 2018-04-07T12:21:29 |
Python
|
UTF-8
|
Python
| false | false | 219 |
py
|
#
import itertools
from collections import Counter
from collections import defaultdict
import bisect
def main():
A, P = map(int, input().split())
print((A * 3 + P) // 2)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
a6c9ef054d9f5125756b43703b676622c0ba1e9a
|
5a628b54e511e6186dcbc8636b530eab48cc6523
|
/django/db/models/fields/related.py
|
100844795f5fc19a0dbcfb8442ac9a10af15492f
|
[
"BSD-3-Clause",
"Python-2.0"
] |
permissive
|
hdknr/annotated-django
|
9c6241853ce09d0d130a57e0f6611a062cc4f17b
|
5843908dd6586a54b92d974f45049fa87e64db8b
|
refs/heads/2.1.x
| 2021-05-22T08:08:11.469887 | 2020-01-25T22:41:11 | 2020-01-25T22:41:11 | 41,231,818 | 0 | 0 |
NOASSERTION
| 2019-11-02T22:33:33 | 2015-08-23T02:04:03 |
Python
|
UTF-8
|
Python
| false | false | 68,142 |
py
|
import functools
import inspect
from functools import partial
from django import forms
from django.apps import apps
from django.core import checks, exceptions
from django.db import connection, router
from django.db.backends import utils
from django.db.models import Q
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import CASCADE, SET_DEFAULT, SET_NULL
from django.db.models.query_utils import PathInfo
from django.db.models.utils import make_model_tuple
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from . import Field
from .mixins import FieldCacheMixin
from .related_descriptors import (
ForwardManyToOneDescriptor, ForwardOneToOneDescriptor,
ManyToManyDescriptor, ReverseManyToOneDescriptor,
ReverseOneToOneDescriptor,
)
from .related_lookups import (
RelatedExact, RelatedGreaterThan, RelatedGreaterThanOrEqual, RelatedIn,
RelatedIsNull, RelatedLessThan, RelatedLessThanOrEqual,
)
from .reverse_related import (
ForeignObjectRel, ManyToManyRel, ManyToOneRel, OneToOneRel,
)
RECURSIVE_RELATIONSHIP_CONSTANT = 'self'
def resolve_relation(scope_model, relation):
"""
Transform relation into a model or fully-qualified model string of the form
"app_label.ModelName", relative to scope_model.
The relation argument can be:
* RECURSIVE_RELATIONSHIP_CONSTANT, i.e. the string "self", in which case
the model argument will be returned.
* A bare model name without an app_label, in which case scope_model's
app_label will be prepended.
* An "app_label.ModelName" string.
* A model class, which will be returned unchanged.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
relation = scope_model
# Look for an "app.Model" relation
if isinstance(relation, str):
if "." not in relation:
relation = "%s.%s" % (scope_model._meta.app_label, relation)
return relation
def lazy_related_operation(function, model, *related_models, **kwargs):
"""
Schedule `function` to be called once `model` and all `related_models`
have been imported and registered with the app registry. `function` will
be called with the newly-loaded model classes as its positional arguments,
plus any optional keyword arguments.
The `model` argument must be a model class. Each subsequent positional
argument is another model, or a reference to another model - see
`resolve_relation()` for the various forms these may take. Any relative
references will be resolved relative to `model`.
This is a convenience wrapper for `Apps.lazy_model_operation` - the app
registry model used is the one found in `model._meta.apps`.
"""
models = [model] + [resolve_relation(model, rel) for rel in related_models]
model_keys = (make_model_tuple(m) for m in models)
apps = model._meta.apps
return apps.lazy_model_operation(partial(function, **kwargs), *model_keys)
class RelatedField(FieldCacheMixin, Field):
"""Base class that all relational fields inherit from."""
# Field flags
one_to_many = False
one_to_one = False
many_to_many = False
many_to_one = False
@cached_property
def related_model(self):
# Can't cache this property until all the models are loaded.
apps.check_models_ready()
return self.remote_field.model
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_related_name_is_valid(),
*self._check_related_query_name_is_valid(),
*self._check_relation_model_exists(),
*self._check_referencing_to_swapped_model(),
*self._check_clashes(),
]
def _check_related_name_is_valid(self):
import keyword
related_name = self.remote_field.related_name
if related_name is None:
return []
is_valid_id = not keyword.iskeyword(related_name) and related_name.isidentifier()
if not (is_valid_id or related_name.endswith('+')):
return [
checks.Error(
"The name '%s' is invalid related_name for field %s.%s" %
(self.remote_field.related_name, self.model._meta.object_name,
self.name),
hint="Related name must be a valid Python identifier or end with a '+'",
obj=self,
id='fields.E306',
)
]
return []
def _check_related_query_name_is_valid(self):
if self.remote_field.is_hidden():
return []
rel_query_name = self.related_query_name()
errors = []
if rel_query_name.endswith('_'):
errors.append(
checks.Error(
"Reverse query name '%s' must not end with an underscore."
% (rel_query_name,),
hint=("Add or change a related_name or related_query_name "
"argument for this field."),
obj=self,
id='fields.E308',
)
)
if LOOKUP_SEP in rel_query_name:
errors.append(
checks.Error(
"Reverse query name '%s' must not contain '%s'."
% (rel_query_name, LOOKUP_SEP),
hint=("Add or change a related_name or related_query_name "
"argument for this field."),
obj=self,
id='fields.E309',
)
)
return errors
def _check_relation_model_exists(self):
rel_is_missing = self.remote_field.model not in self.opts.apps.get_models()
rel_is_string = isinstance(self.remote_field.model, str)
model_name = self.remote_field.model if rel_is_string else self.remote_field.model._meta.object_name
if rel_is_missing and (rel_is_string or not self.remote_field.model._meta.swapped):
return [
checks.Error(
"Field defines a relation with model '%s', which is either "
"not installed, or is abstract." % model_name,
obj=self,
id='fields.E300',
)
]
return []
def _check_referencing_to_swapped_model(self):
if (self.remote_field.model not in self.opts.apps.get_models() and
not isinstance(self.remote_field.model, str) and
self.remote_field.model._meta.swapped):
model = "%s.%s" % (
self.remote_field.model._meta.app_label,
self.remote_field.model._meta.object_name
)
return [
checks.Error(
"Field defines a relation with the model '%s', which has "
"been swapped out." % model,
hint="Update the relation to point at 'settings.%s'." % self.remote_field.model._meta.swappable,
obj=self,
id='fields.E301',
)
]
return []
def _check_clashes(self):
"""Check accessor and reverse query name clashes."""
from django.db.models.base import ModelBase
errors = []
opts = self.model._meta
# `f.remote_field.model` may be a string instead of a model. Skip if model name is
# not resolved.
if not isinstance(self.remote_field.model, ModelBase):
return []
# Consider that we are checking field `Model.foreign` and the models
# are:
#
# class Target(models.Model):
# model = models.IntegerField()
# model_set = models.IntegerField()
#
# class Model(models.Model):
# foreign = models.ForeignKey(Target)
# m2m = models.ManyToManyField(Target)
# rel_opts.object_name == "Target"
rel_opts = self.remote_field.model._meta
# If the field doesn't install a backward relation on the target model
# (so `is_hidden` returns True), then there are no clashes to check
# and we can skip these fields.
rel_is_hidden = self.remote_field.is_hidden()
rel_name = self.remote_field.get_accessor_name() # i. e. "model_set"
rel_query_name = self.related_query_name() # i. e. "model"
field_name = "%s.%s" % (opts.object_name, self.name) # i. e. "Model.field"
# Check clashes between accessor or reverse query name of `field`
# and any other field name -- i.e. accessor for Model.foreign is
# model_set and it clashes with Target.model_set.
potential_clashes = rel_opts.fields + rel_opts.many_to_many
for clash_field in potential_clashes:
clash_name = "%s.%s" % (rel_opts.object_name, clash_field.name) # i.e. "Target.model_set"
if not rel_is_hidden and clash_field.name == rel_name:
errors.append(
checks.Error(
"Reverse accessor for '%s' clashes with field name '%s'." % (field_name, clash_name),
hint=("Rename field '%s', or add/change a related_name "
"argument to the definition for field '%s'.") % (clash_name, field_name),
obj=self,
id='fields.E302',
)
)
if clash_field.name == rel_query_name:
errors.append(
checks.Error(
"Reverse query name for '%s' clashes with field name '%s'." % (field_name, clash_name),
hint=("Rename field '%s', or add/change a related_name "
"argument to the definition for field '%s'.") % (clash_name, field_name),
obj=self,
id='fields.E303',
)
)
# Check clashes between accessors/reverse query names of `field` and
# any other field accessor -- i. e. Model.foreign accessor clashes with
# Model.m2m accessor.
potential_clashes = (r for r in rel_opts.related_objects if r.field is not self)
for clash_field in potential_clashes:
clash_name = "%s.%s" % ( # i. e. "Model.m2m"
clash_field.related_model._meta.object_name,
clash_field.field.name)
if not rel_is_hidden and clash_field.get_accessor_name() == rel_name:
errors.append(
checks.Error(
"Reverse accessor for '%s' clashes with reverse accessor for '%s'." % (field_name, clash_name),
hint=("Add or change a related_name argument "
"to the definition for '%s' or '%s'.") % (field_name, clash_name),
obj=self,
id='fields.E304',
)
)
if clash_field.get_accessor_name() == rel_query_name:
errors.append(
checks.Error(
"Reverse query name for '%s' clashes with reverse query name for '%s'."
% (field_name, clash_name),
hint=("Add or change a related_name argument "
"to the definition for '%s' or '%s'.") % (field_name, clash_name),
obj=self,
id='fields.E305',
)
)
return errors
def db_type(self, connection):
# By default related field will not have a column as it relates to
# columns from another table.
return None
def contribute_to_class(self, cls, name, private_only=False, **kwargs):
super().contribute_to_class(cls, name, private_only=private_only, **kwargs)
self.opts = cls._meta
if not cls._meta.abstract:
if self.remote_field.related_name:
related_name = self.remote_field.related_name
else:
related_name = self.opts.default_related_name
if related_name:
related_name = related_name % {
'class': cls.__name__.lower(),
'model_name': cls._meta.model_name.lower(),
'app_label': cls._meta.app_label.lower()
}
self.remote_field.related_name = related_name
if self.remote_field.related_query_name:
related_query_name = self.remote_field.related_query_name % {
'class': cls.__name__.lower(),
'app_label': cls._meta.app_label.lower(),
}
self.remote_field.related_query_name = related_query_name
def resolve_related_class(model, related, field):
field.remote_field.model = related
field.do_related_class(related, model)
lazy_related_operation(resolve_related_class, cls, self.remote_field.model, field=self)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.remote_field.limit_choices_to:
kwargs['limit_choices_to'] = self.remote_field.limit_choices_to
if self.remote_field.related_name is not None:
kwargs['related_name'] = self.remote_field.related_name
if self.remote_field.related_query_name is not None:
kwargs['related_query_name'] = self.remote_field.related_query_name
return name, path, args, kwargs
def get_forward_related_filter(self, obj):
"""
Return the keyword arguments that when supplied to
self.model.object.filter(), would select all instances related through
this field to the remote obj. This is used to build the querysets
returned by related descriptors. obj is an instance of
self.related_field.model.
"""
return {
'%s__%s' % (self.name, rh_field.name): getattr(obj, rh_field.attname)
for _, rh_field in self.related_fields
}
def get_reverse_related_filter(self, obj):
"""
Complement to get_forward_related_filter(). Return the keyword
arguments that when passed to self.related_field.model.object.filter()
select all instances of self.related_field.model related through
this field to obj. obj is an instance of self.model.
"""
base_filter = {
rh_field.attname: getattr(obj, lh_field.attname)
for lh_field, rh_field in self.related_fields
}
descriptor_filter = self.get_extra_descriptor_filter(obj)
base_q = Q(**base_filter)
if isinstance(descriptor_filter, dict):
return base_q & Q(**descriptor_filter)
elif descriptor_filter:
return base_q & descriptor_filter
return base_q
@property
def swappable_setting(self):
"""
Get the setting that this is powered from for swapping, or None
if it's not swapped in / marked with swappable=False.
"""
if self.swappable:
# Work out string form of "to"
if isinstance(self.remote_field.model, str):
to_string = self.remote_field.model
else:
to_string = self.remote_field.model._meta.label
return apps.get_swappable_settings_name(to_string)
return None
def set_attributes_from_rel(self):
self.name = (
self.name or
(self.remote_field.model._meta.model_name + '_' + self.remote_field.model._meta.pk.name)
)
if self.verbose_name is None:
self.verbose_name = self.remote_field.model._meta.verbose_name
self.remote_field.set_field_name()
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
self.contribute_to_related_class(other, self.remote_field)
def get_limit_choices_to(self):
""" limit_choices_to を返す
Return ``limit_choices_to`` for this model field.
If it is a callable, it will be invoked and the result will be
returned.
"""
if callable(self.remote_field.limit_choices_to):
# 関数であれば呼び出してリゾルブする
return self.remote_field.limit_choices_to()
return self.remote_field.limit_choices_to
def formfield(self, **kwargs):
"""
Pass ``limit_choices_to`` to the field being constructed.
Only passes it if there is a type that supports related fields.
This is a similar strategy used to pass the ``queryset`` to the field
being constructed.
"""
defaults = {}
if hasattr(self.remote_field, 'get_related_field'):
# If this is a callable, do not invoke it here. Just pass
# it in the defaults for when the form class will later be
# instantiated.
limit_choices_to = self.remote_field.limit_choices_to
defaults.update({
'limit_choices_to': limit_choices_to,
})
defaults.update(kwargs)
return super().formfield(**defaults)
def related_query_name(self):
"""
Define the name that can be used to identify this related object in a
table-spanning query.
"""
return self.remote_field.related_query_name or self.remote_field.related_name or self.opts.model_name
@property
def target_field(self):
"""
When filtering against this relation, return the field on the remote
model against which the filtering should happen.
"""
target_fields = self.get_path_info()[-1].target_fields
if len(target_fields) > 1:
raise exceptions.FieldError(
"The relation has multiple target fields, but only single target field was asked for")
return target_fields[0]
def get_cache_name(self):
return self.name
class ForeignObject(RelatedField):
"""
Abstraction of the ForeignKey relation to support multi-column relations.
"""
# Field flags
many_to_many = False
many_to_one = True
one_to_many = False
one_to_one = False
requires_unique_target = True
related_accessor_class = ReverseManyToOneDescriptor
forward_related_accessor_class = ForwardManyToOneDescriptor
rel_class = ForeignObjectRel
def __init__(self, to, on_delete, from_fields, to_fields, rel=None, related_name=None,
related_query_name=None, limit_choices_to=None, parent_link=False,
swappable=True, **kwargs):
if rel is None:
rel = self.rel_class(
self, to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
super().__init__(rel=rel, **kwargs)
self.from_fields = from_fields
self.to_fields = to_fields
self.swappable = swappable
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_to_fields_exist(),
*self._check_unique_target(),
]
def _check_to_fields_exist(self):
# Skip nonexistent models.
if isinstance(self.remote_field.model, str):
return []
errors = []
for to_field in self.to_fields:
if to_field:
try:
self.remote_field.model._meta.get_field(to_field)
except exceptions.FieldDoesNotExist:
errors.append(
checks.Error(
"The to_field '%s' doesn't exist on the related "
"model '%s'."
% (to_field, self.remote_field.model._meta.label),
obj=self,
id='fields.E312',
)
)
return errors
def _check_unique_target(self):
rel_is_string = isinstance(self.remote_field.model, str)
if rel_is_string or not self.requires_unique_target:
return []
try:
self.foreign_related_fields
except exceptions.FieldDoesNotExist:
return []
if not self.foreign_related_fields:
return []
unique_foreign_fields = {
frozenset([f.name])
for f in self.remote_field.model._meta.get_fields()
if getattr(f, 'unique', False)
}
unique_foreign_fields.update({
frozenset(ut)
for ut in self.remote_field.model._meta.unique_together
})
foreign_fields = {f.name for f in self.foreign_related_fields}
has_unique_constraint = any(u <= foreign_fields for u in unique_foreign_fields)
if not has_unique_constraint and len(self.foreign_related_fields) > 1:
field_combination = ', '.join(
"'%s'" % rel_field.name for rel_field in self.foreign_related_fields
)
model_name = self.remote_field.model.__name__
return [
checks.Error(
"No subset of the fields %s on model '%s' is unique."
% (field_combination, model_name),
hint=(
"Add unique=True on any of those fields or add at "
"least a subset of them to a unique_together constraint."
),
obj=self,
id='fields.E310',
)
]
elif not has_unique_constraint:
field_name = self.foreign_related_fields[0].name
model_name = self.remote_field.model.__name__
return [
checks.Error(
"'%s.%s' must set unique=True because it is referenced by "
"a foreign key." % (model_name, field_name),
obj=self,
id='fields.E311',
)
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
kwargs['on_delete'] = self.remote_field.on_delete
kwargs['from_fields'] = self.from_fields
kwargs['to_fields'] = self.to_fields
if self.remote_field.parent_link:
kwargs['parent_link'] = self.remote_field.parent_link
# Work out string form of "to"
if isinstance(self.remote_field.model, str):
kwargs['to'] = self.remote_field.model
else:
kwargs['to'] = "%s.%s" % (
self.remote_field.model._meta.app_label,
self.remote_field.model._meta.object_name,
)
# If swappable is True, then see if we're actually pointing to the target
# of a swap.
swappable_setting = self.swappable_setting
if swappable_setting is not None:
# If it's already a settings reference, error
if hasattr(kwargs['to'], "setting_name"):
if kwargs['to'].setting_name != swappable_setting:
raise ValueError(
"Cannot deconstruct a ForeignKey pointing to a model "
"that is swapped in place of more than one model (%s and %s)"
% (kwargs['to'].setting_name, swappable_setting)
)
# Set it
from django.db.migrations.writer import SettingsReference
kwargs['to'] = SettingsReference(
kwargs['to'],
swappable_setting,
)
return name, path, args, kwargs
def resolve_related_fields(self):
if not self.from_fields or len(self.from_fields) != len(self.to_fields):
raise ValueError('Foreign Object from and to fields must be the same non-zero length')
if isinstance(self.remote_field.model, str):
raise ValueError('Related model %r cannot be resolved' % self.remote_field.model)
related_fields = []
for index in range(len(self.from_fields)):
from_field_name = self.from_fields[index]
to_field_name = self.to_fields[index]
from_field = (self if from_field_name == 'self'
else self.opts.get_field(from_field_name))
to_field = (self.remote_field.model._meta.pk if to_field_name is None
else self.remote_field.model._meta.get_field(to_field_name))
related_fields.append((from_field, to_field))
return related_fields
@property
def related_fields(self):
if not hasattr(self, '_related_fields'):
self._related_fields = self.resolve_related_fields()
return self._related_fields
@property
def reverse_related_fields(self):
return [(rhs_field, lhs_field) for lhs_field, rhs_field in self.related_fields]
@property
def local_related_fields(self):
return tuple(lhs_field for lhs_field, rhs_field in self.related_fields)
@property
def foreign_related_fields(self):
return tuple(rhs_field for lhs_field, rhs_field in self.related_fields if rhs_field)
def get_local_related_value(self, instance):
return self.get_instance_value_for_fields(instance, self.local_related_fields)
def get_foreign_related_value(self, instance):
return self.get_instance_value_for_fields(instance, self.foreign_related_fields)
@staticmethod
def get_instance_value_for_fields(instance, fields):
ret = []
opts = instance._meta
for field in fields:
# Gotcha: in some cases (like fixture loading) a model can have
# different values in parent_ptr_id and parent's id. So, use
# instance.pk (that is, parent_ptr_id) when asked for instance.id.
if field.primary_key:
possible_parent_link = opts.get_ancestor_link(field.model)
if (not possible_parent_link or
possible_parent_link.primary_key or
possible_parent_link.model._meta.abstract):
ret.append(instance.pk)
continue
ret.append(getattr(instance, field.attname))
return tuple(ret)
def get_attname_column(self):
attname, column = super().get_attname_column()
return attname, None
def get_joining_columns(self, reverse_join=False):
source = self.reverse_related_fields if reverse_join else self.related_fields
return tuple((lhs_field.column, rhs_field.column) for lhs_field, rhs_field in source)
def get_reverse_joining_columns(self):
return self.get_joining_columns(reverse_join=True)
def get_extra_descriptor_filter(self, instance):
"""
Return an extra filter condition for related object fetching when
user does 'instance.fieldname', that is the extra filter is used in
the descriptor of the field.
The filter should be either a dict usable in .filter(**kwargs) call or
a Q-object. The condition will be ANDed together with the relation's
joining columns.
A parallel method is get_extra_restriction() which is used in
JOIN and subquery conditions.
"""
return {}
def get_extra_restriction(self, where_class, alias, related_alias):
"""
Return a pair condition used for joining and subquery pushdown. The
condition is something that responds to as_sql(compiler, connection)
method.
Note that currently referring both the 'alias' and 'related_alias'
will not work in some conditions, like subquery pushdown.
A parallel method is get_extra_descriptor_filter() which is used in
instance.fieldname related object fetching.
"""
return None
def get_path_info(self, filtered_relation=None):
"""Get path from this field to the related model."""
opts = self.remote_field.model._meta
from_opts = self.model._meta
return [PathInfo(
from_opts=from_opts,
to_opts=opts,
target_fields=self.foreign_related_fields,
join_field=self,
m2m=False,
direct=True,
filtered_relation=filtered_relation,
)]
def get_reverse_path_info(self, filtered_relation=None):
"""Get path from the related model to this field's model."""
opts = self.model._meta
from_opts = self.remote_field.model._meta
return [PathInfo(
from_opts=from_opts,
to_opts=opts,
target_fields=(opts.pk,),
join_field=self.remote_field,
m2m=not self.unique,
direct=False,
filtered_relation=filtered_relation,
)]
@classmethod
@functools.lru_cache(maxsize=None)
def get_lookups(cls):
bases = inspect.getmro(cls)
bases = bases[:bases.index(ForeignObject) + 1]
class_lookups = [parent.__dict__.get('class_lookups', {}) for parent in bases]
return cls.merge_dicts(class_lookups)
def contribute_to_class(self, cls, name, private_only=False, **kwargs):
super().contribute_to_class(cls, name, private_only=private_only, **kwargs)
setattr(cls, self.name, self.forward_related_accessor_class(self))
def contribute_to_related_class(self, cls, related):
# Internal FK's - i.e., those with a related name ending with '+' -
# and swapped models don't get a related descriptor.
if not self.remote_field.is_hidden() and not related.related_model._meta.swapped:
setattr(cls._meta.concrete_model, related.get_accessor_name(), self.related_accessor_class(related))
# While 'limit_choices_to' might be a callable, simply pass
# it along for later - this is too early because it's still
# model load time.
if self.remote_field.limit_choices_to:
cls._meta.related_fkey_lookups.append(self.remote_field.limit_choices_to)
ForeignObject.register_lookup(RelatedIn)
ForeignObject.register_lookup(RelatedExact)
ForeignObject.register_lookup(RelatedLessThan)
ForeignObject.register_lookup(RelatedGreaterThan)
ForeignObject.register_lookup(RelatedGreaterThanOrEqual)
ForeignObject.register_lookup(RelatedLessThanOrEqual)
ForeignObject.register_lookup(RelatedIsNull)
class ForeignKey(ForeignObject):
"""
Provide a many-to-one relation by adding a column to the local model
to hold the remote value.
By default ForeignKey will target the pk of the remote model but this
behavior can be changed by using the ``to_field`` argument.
"""
# Field flags
many_to_many = False
many_to_one = True
one_to_many = False
one_to_one = False
rel_class = ManyToOneRel
empty_strings_allowed = False
default_error_messages = {
'invalid': _('%(model)s instance with %(field)s %(value)r does not exist.')
}
description = _("Foreign Key (type determined by related field)")
def __init__(self, to, on_delete, related_name=None, related_query_name=None,
limit_choices_to=None, parent_link=False, to_field=None,
db_constraint=True, **kwargs):
try:
to._meta.model_name
except AttributeError:
assert isinstance(to, str), (
"%s(%r) is invalid. First parameter to ForeignKey must be "
"either a model, a model name, or the string %r" % (
self.__class__.__name__, to,
RECURSIVE_RELATIONSHIP_CONSTANT,
)
)
else:
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
kwargs['rel'] = self.rel_class(
self, to, to_field,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
kwargs.setdefault('db_index', True)
super().__init__(to, on_delete, from_fields=['self'], to_fields=[to_field], **kwargs)
self.db_constraint = db_constraint
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_on_delete(),
*self._check_unique(),
]
def _check_on_delete(self):
on_delete = getattr(self.remote_field, 'on_delete', None)
if on_delete == SET_NULL and not self.null:
return [
checks.Error(
'Field specifies on_delete=SET_NULL, but cannot be null.',
hint='Set null=True argument on the field, or change the on_delete rule.',
obj=self,
id='fields.E320',
)
]
elif on_delete == SET_DEFAULT and not self.has_default():
return [
checks.Error(
'Field specifies on_delete=SET_DEFAULT, but has no default value.',
hint='Set a default value, or change the on_delete rule.',
obj=self,
id='fields.E321',
)
]
else:
return []
def _check_unique(self, **kwargs):
return [
checks.Warning(
'Setting unique=True on a ForeignKey has the same effect as using a OneToOneField.',
hint='ForeignKey(unique=True) is usually better served by a OneToOneField.',
obj=self,
id='fields.W342',
)
] if self.unique else []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['to_fields']
del kwargs['from_fields']
# Handle the simpler arguments
if self.db_index:
del kwargs['db_index']
else:
kwargs['db_index'] = False
if self.db_constraint is not True:
kwargs['db_constraint'] = self.db_constraint
# Rel needs more work.
to_meta = getattr(self.remote_field.model, "_meta", None)
if self.remote_field.field_name and (
not to_meta or (to_meta.pk and self.remote_field.field_name != to_meta.pk.name)):
kwargs['to_field'] = self.remote_field.field_name
return name, path, args, kwargs
def to_python(self, value):
return self.target_field.to_python(value)
@property
def target_field(self):
return self.foreign_related_fields[0]
def get_reverse_path_info(self, filtered_relation=None):
"""Get path from the related model to this field's model."""
opts = self.model._meta
from_opts = self.remote_field.model._meta
return [PathInfo(
from_opts=from_opts,
to_opts=opts,
target_fields=(opts.pk,),
join_field=self.remote_field,
m2m=not self.unique,
direct=False,
filtered_relation=filtered_relation,
)]
def validate(self, value, model_instance):
if self.remote_field.parent_link:
return
super().validate(value, model_instance)
if value is None:
return
using = router.db_for_read(self.remote_field.model, instance=model_instance)
qs = self.remote_field.model._default_manager.using(using).filter(
**{self.remote_field.field_name: value}
)
qs = qs.complex_filter(self.get_limit_choices_to())
if not qs.exists():
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={
'model': self.remote_field.model._meta.verbose_name, 'pk': value,
'field': self.remote_field.field_name, 'value': value,
}, # 'pk' is included for backwards compatibility
)
def get_attname(self):
return '%s_id' % self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_default(self):
"""Return the to_field if the default value is an object."""
field_default = super().get_default()
if isinstance(field_default, self.remote_field.model):
return getattr(field_default, self.target_field.attname)
return field_default
def get_db_prep_save(self, value, connection):
if value is None or (value == '' and
(not self.target_field.empty_strings_allowed or
connection.features.interprets_empty_strings_as_nulls)):
return None
else:
return self.target_field.get_db_prep_save(value, connection=connection)
def get_db_prep_value(self, value, connection, prepared=False):
return self.target_field.get_db_prep_value(value, connection, prepared)
def contribute_to_related_class(self, cls, related):
super().contribute_to_related_class(cls, related)
if self.remote_field.field_name is None:
self.remote_field.field_name = cls._meta.pk.name
def formfield(self, *, using=None, **kwargs):
if isinstance(self.remote_field.model, str):
raise ValueError("Cannot create form field for %r yet, because "
"its related model %r has not been loaded yet" %
(self.name, self.remote_field.model))
return super().formfield(**{
'form_class': forms.ModelChoiceField,
'queryset': self.remote_field.model._default_manager.using(using),
'to_field_name': self.remote_field.field_name,
**kwargs,
})
def db_check(self, connection):
return []
def db_type(self, connection):
return self.target_field.rel_db_type(connection=connection)
def db_parameters(self, connection):
return {"type": self.db_type(connection), "check": self.db_check(connection)}
def convert_empty_strings(self, value, expression, connection):
if (not value) and isinstance(value, str):
return None
return value
def get_db_converters(self, connection):
converters = super().get_db_converters(connection)
if connection.features.interprets_empty_strings_as_nulls:
converters += [self.convert_empty_strings]
return converters
def get_col(self, alias, output_field=None):
return super().get_col(alias, output_field or self.target_field)
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that it always carries a "unique" constraint with it and the reverse
relation always returns the object pointed to (since there will only ever
be one), rather than returning a list.
"""
# Field flags
many_to_many = False
many_to_one = False
one_to_many = False
one_to_one = True
related_accessor_class = ReverseOneToOneDescriptor
forward_related_accessor_class = ForwardOneToOneDescriptor
rel_class = OneToOneRel
description = _("One-to-one relationship")
def __init__(self, to, on_delete, to_field=None, **kwargs):
kwargs['unique'] = True
super().__init__(to, on_delete, to_field=to_field, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if "unique" in kwargs:
del kwargs['unique']
return name, path, args, kwargs
def formfield(self, **kwargs):
if self.remote_field.parent_link:
return None
return super().formfield(**kwargs)
def save_form_data(self, instance, data):
if isinstance(data, self.remote_field.model):
setattr(instance, self.name, data)
else:
setattr(instance, self.attname, data)
def _check_unique(self, **kwargs):
# Override ForeignKey since check isn't applicable here.
return []
def create_many_to_many_intermediary_model(field, klass):
from django.db import models
def set_managed(model, related, through):
through._meta.managed = model._meta.managed or related._meta.managed
to_model = resolve_relation(klass, field.remote_field.model)
name = '%s_%s' % (klass._meta.object_name, field.name)
lazy_related_operation(set_managed, klass, to_model, name)
to = make_model_tuple(to_model)[1]
from_ = klass._meta.model_name
if to == from_:
to = 'to_%s' % to
from_ = 'from_%s' % from_
meta = type('Meta', (), {
'db_table': field._get_m2m_db_table(klass._meta),
'auto_created': klass,
'app_label': klass._meta.app_label,
'db_tablespace': klass._meta.db_tablespace,
'unique_together': (from_, to),
'verbose_name': _('%(from)s-%(to)s relationship') % {'from': from_, 'to': to},
'verbose_name_plural': _('%(from)s-%(to)s relationships') % {'from': from_, 'to': to},
'apps': field.model._meta.apps,
})
# Construct and return the new class.
return type(name, (models.Model,), {
'Meta': meta,
'__module__': klass.__module__,
from_: models.ForeignKey(
klass,
related_name='%s+' % name,
db_tablespace=field.db_tablespace,
db_constraint=field.remote_field.db_constraint,
on_delete=CASCADE,
),
to: models.ForeignKey(
to_model,
related_name='%s+' % name,
db_tablespace=field.db_tablespace,
db_constraint=field.remote_field.db_constraint,
on_delete=CASCADE,
)
})
class ManyToManyField(RelatedField):
"""
Provide a many-to-many relation by using an intermediary model that
holds two ForeignKey fields pointed at the two sides of the relation.
Unless a ``through`` model was provided, ManyToManyField will use the
create_many_to_many_intermediary_model factory to automatically generate
the intermediary model.
"""
# Field flags
many_to_many = True
many_to_one = False
one_to_many = False
one_to_one = False
rel_class = ManyToManyRel
description = _("Many-to-many relationship")
def __init__(self, to, related_name=None, related_query_name=None,
limit_choices_to=None, symmetrical=None, through=None,
through_fields=None, db_constraint=True, db_table=None,
swappable=True, **kwargs):
try:
to._meta
except AttributeError:
assert isinstance(to, str), (
"%s(%r) is invalid. First parameter to ManyToManyField must be "
"either a model, a model name, or the string %r" %
(self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
)
if symmetrical is None:
symmetrical = (to == RECURSIVE_RELATIONSHIP_CONSTANT)
if through is not None:
assert db_table is None, (
"Cannot specify a db_table if an intermediary model is used."
)
kwargs['rel'] = self.rel_class(
self, to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
symmetrical=symmetrical,
through=through,
through_fields=through_fields,
db_constraint=db_constraint,
)
self.has_null_arg = 'null' in kwargs
super().__init__(**kwargs)
self.db_table = db_table
self.swappable = swappable
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_unique(**kwargs),
*self._check_relationship_model(**kwargs),
*self._check_ignored_options(**kwargs),
*self._check_table_uniqueness(**kwargs),
]
def _check_unique(self, **kwargs):
if self.unique:
return [
checks.Error(
'ManyToManyFields cannot be unique.',
obj=self,
id='fields.E330',
)
]
return []
def _check_ignored_options(self, **kwargs):
warnings = []
if self.has_null_arg:
warnings.append(
checks.Warning(
'null has no effect on ManyToManyField.',
obj=self,
id='fields.W340',
)
)
if self._validators:
warnings.append(
checks.Warning(
'ManyToManyField does not support validators.',
obj=self,
id='fields.W341',
)
)
if (self.remote_field.limit_choices_to and self.remote_field.through and
not self.remote_field.through._meta.auto_created):
warnings.append(
checks.Warning(
'limit_choices_to has no effect on ManyToManyField '
'with a through model.',
obj=self,
id='fields.W343',
)
)
return warnings
def _check_relationship_model(self, from_model=None, **kwargs):
if hasattr(self.remote_field.through, '_meta'):
qualified_model_name = "%s.%s" % (
self.remote_field.through._meta.app_label, self.remote_field.through.__name__)
else:
qualified_model_name = self.remote_field.through
errors = []
if self.remote_field.through not in self.opts.apps.get_models(include_auto_created=True):
# The relationship model is not installed.
errors.append(
checks.Error(
"Field specifies a many-to-many relation through model "
"'%s', which has not been installed." % qualified_model_name,
obj=self,
id='fields.E331',
)
)
else:
assert from_model is not None, (
"ManyToManyField with intermediate "
"tables cannot be checked if you don't pass the model "
"where the field is attached to."
)
# Set some useful local variables
to_model = resolve_relation(from_model, self.remote_field.model)
from_model_name = from_model._meta.object_name
if isinstance(to_model, str):
to_model_name = to_model
else:
to_model_name = to_model._meta.object_name
relationship_model_name = self.remote_field.through._meta.object_name
self_referential = from_model == to_model
# Check symmetrical attribute.
if (self_referential and self.remote_field.symmetrical and
not self.remote_field.through._meta.auto_created):
errors.append(
checks.Error(
'Many-to-many fields with intermediate tables must not be symmetrical.',
obj=self,
id='fields.E332',
)
)
# Count foreign keys in intermediate model
if self_referential:
seen_self = sum(
from_model == getattr(field.remote_field, 'model', None)
for field in self.remote_field.through._meta.fields
)
if seen_self > 2 and not self.remote_field.through_fields:
errors.append(
checks.Error(
"The model is used as an intermediate model by "
"'%s', but it has more than two foreign keys "
"to '%s', which is ambiguous. You must specify "
"which two foreign keys Django should use via the "
"through_fields keyword argument." % (self, from_model_name),
hint="Use through_fields to specify which two foreign keys Django should use.",
obj=self.remote_field.through,
id='fields.E333',
)
)
else:
# Count foreign keys in relationship model
seen_from = sum(
from_model == getattr(field.remote_field, 'model', None)
for field in self.remote_field.through._meta.fields
)
seen_to = sum(
to_model == getattr(field.remote_field, 'model', None)
for field in self.remote_field.through._meta.fields
)
if seen_from > 1 and not self.remote_field.through_fields:
errors.append(
checks.Error(
("The model is used as an intermediate model by "
"'%s', but it has more than one foreign key "
"from '%s', which is ambiguous. You must specify "
"which foreign key Django should use via the "
"through_fields keyword argument.") % (self, from_model_name),
hint=(
'If you want to create a recursive relationship, '
'use ForeignKey("self", symmetrical=False, through="%s").'
) % relationship_model_name,
obj=self,
id='fields.E334',
)
)
if seen_to > 1 and not self.remote_field.through_fields:
errors.append(
checks.Error(
"The model is used as an intermediate model by "
"'%s', but it has more than one foreign key "
"to '%s', which is ambiguous. You must specify "
"which foreign key Django should use via the "
"through_fields keyword argument." % (self, to_model_name),
hint=(
'If you want to create a recursive relationship, '
'use ForeignKey("self", symmetrical=False, through="%s").'
) % relationship_model_name,
obj=self,
id='fields.E335',
)
)
if seen_from == 0 or seen_to == 0:
errors.append(
checks.Error(
"The model is used as an intermediate model by "
"'%s', but it does not have a foreign key to '%s' or '%s'." % (
self, from_model_name, to_model_name
),
obj=self.remote_field.through,
id='fields.E336',
)
)
# Validate `through_fields`.
if self.remote_field.through_fields is not None:
# Validate that we're given an iterable of at least two items
# and that none of them is "falsy".
if not (len(self.remote_field.through_fields) >= 2 and
self.remote_field.through_fields[0] and self.remote_field.through_fields[1]):
errors.append(
checks.Error(
"Field specifies 'through_fields' but does not provide "
"the names of the two link fields that should be used "
"for the relation through model '%s'." % qualified_model_name,
hint="Make sure you specify 'through_fields' as through_fields=('field1', 'field2')",
obj=self,
id='fields.E337',
)
)
# Validate the given through fields -- they should be actual
# fields on the through model, and also be foreign keys to the
# expected models.
else:
assert from_model is not None, (
"ManyToManyField with intermediate "
"tables cannot be checked if you don't pass the model "
"where the field is attached to."
)
source, through, target = from_model, self.remote_field.through, self.remote_field.model
source_field_name, target_field_name = self.remote_field.through_fields[:2]
for field_name, related_model in ((source_field_name, source),
(target_field_name, target)):
possible_field_names = []
for f in through._meta.fields:
if hasattr(f, 'remote_field') and getattr(f.remote_field, 'model', None) == related_model:
possible_field_names.append(f.name)
if possible_field_names:
hint = "Did you mean one of the following foreign keys to '%s': %s?" % (
related_model._meta.object_name,
', '.join(possible_field_names),
)
else:
hint = None
try:
field = through._meta.get_field(field_name)
except exceptions.FieldDoesNotExist:
errors.append(
checks.Error(
"The intermediary model '%s' has no field '%s'."
% (qualified_model_name, field_name),
hint=hint,
obj=self,
id='fields.E338',
)
)
else:
if not (hasattr(field, 'remote_field') and
getattr(field.remote_field, 'model', None) == related_model):
errors.append(
checks.Error(
"'%s.%s' is not a foreign key to '%s'." % (
through._meta.object_name, field_name,
related_model._meta.object_name,
),
hint=hint,
obj=self,
id='fields.E339',
)
)
return errors
def _check_table_uniqueness(self, **kwargs):
if isinstance(self.remote_field.through, str) or not self.remote_field.through._meta.managed:
return []
registered_tables = {
model._meta.db_table: model
for model in self.opts.apps.get_models(include_auto_created=True)
if model != self.remote_field.through and model._meta.managed
}
m2m_db_table = self.m2m_db_table()
model = registered_tables.get(m2m_db_table)
# The second condition allows multiple m2m relations on a model if
# some point to a through model that proxies another through model.
if model and model._meta.concrete_model != self.remote_field.through._meta.concrete_model:
if model._meta.auto_created:
def _get_field_name(model):
for field in model._meta.auto_created._meta.many_to_many:
if field.remote_field.through is model:
return field.name
opts = model._meta.auto_created._meta
clashing_obj = '%s.%s' % (opts.label, _get_field_name(model))
else:
clashing_obj = '%s' % model._meta.label
return [
checks.Error(
"The field's intermediary table '%s' clashes with the "
"table name of '%s'." % (m2m_db_table, clashing_obj),
obj=self,
id='fields.E340',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# Handle the simpler arguments.
if self.db_table is not None:
kwargs['db_table'] = self.db_table
if self.remote_field.db_constraint is not True:
kwargs['db_constraint'] = self.remote_field.db_constraint
# Rel needs more work.
if isinstance(self.remote_field.model, str):
kwargs['to'] = self.remote_field.model
else:
kwargs['to'] = "%s.%s" % (
self.remote_field.model._meta.app_label,
self.remote_field.model._meta.object_name,
)
if getattr(self.remote_field, 'through', None) is not None:
if isinstance(self.remote_field.through, str):
kwargs['through'] = self.remote_field.through
elif not self.remote_field.through._meta.auto_created:
kwargs['through'] = "%s.%s" % (
self.remote_field.through._meta.app_label,
self.remote_field.through._meta.object_name,
)
# If swappable is True, then see if we're actually pointing to the target
# of a swap.
swappable_setting = self.swappable_setting
if swappable_setting is not None:
# If it's already a settings reference, error.
if hasattr(kwargs['to'], "setting_name"):
if kwargs['to'].setting_name != swappable_setting:
raise ValueError(
"Cannot deconstruct a ManyToManyField pointing to a "
"model that is swapped in place of more than one model "
"(%s and %s)" % (kwargs['to'].setting_name, swappable_setting)
)
from django.db.migrations.writer import SettingsReference
kwargs['to'] = SettingsReference(
kwargs['to'],
swappable_setting,
)
return name, path, args, kwargs
def _get_path_info(self, direct=False, filtered_relation=None):
"""Called by both direct and indirect m2m traversal."""
int_model = self.remote_field.through
linkfield1 = int_model._meta.get_field(self.m2m_field_name())
linkfield2 = int_model._meta.get_field(self.m2m_reverse_field_name())
if direct:
join1infos = linkfield1.get_reverse_path_info()
join2infos = linkfield2.get_path_info(filtered_relation)
else:
join1infos = linkfield2.get_reverse_path_info()
join2infos = linkfield1.get_path_info(filtered_relation)
# Get join infos between the last model of join 1 and the first model
# of join 2. Assume the only reason these may differ is due to model
# inheritance.
join1_final = join1infos[-1].to_opts
join2_initial = join2infos[0].from_opts
if join1_final is join2_initial:
intermediate_infos = []
elif issubclass(join1_final.model, join2_initial.model):
intermediate_infos = join1_final.get_path_to_parent(join2_initial.model)
else:
intermediate_infos = join2_initial.get_path_from_parent(join1_final.model)
return [*join1infos, *intermediate_infos, *join2infos]
def get_path_info(self, filtered_relation=None):
return self._get_path_info(direct=True, filtered_relation=filtered_relation)
def get_reverse_path_info(self, filtered_relation=None):
return self._get_path_info(direct=False, filtered_relation=filtered_relation)
def _get_m2m_db_table(self, opts):
"""
Function that can be curried to provide the m2m table name for this
relation.
"""
if self.remote_field.through is not None:
return self.remote_field.through._meta.db_table
elif self.db_table:
return self.db_table
else:
m2m_table_name = '%s_%s' % (utils.strip_quotes(opts.db_table), self.name)
return utils.truncate_name(m2m_table_name, connection.ops.max_name_length())
def _get_m2m_attr(self, related, attr):
"""
Function that can be curried to provide the source accessor or DB
column name for the m2m table.
"""
cache_attr = '_m2m_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
if self.remote_field.through_fields is not None:
link_field_name = self.remote_field.through_fields[0]
else:
link_field_name = None
for f in self.remote_field.through._meta.fields:
if (f.is_relation and f.remote_field.model == related.related_model and
(link_field_name is None or link_field_name == f.name)):
setattr(self, cache_attr, getattr(f, attr))
return getattr(self, cache_attr)
def _get_m2m_reverse_attr(self, related, attr):
"""
Function that can be curried to provide the related accessor or DB
column name for the m2m table.
"""
cache_attr = '_m2m_reverse_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
found = False
if self.remote_field.through_fields is not None:
link_field_name = self.remote_field.through_fields[1]
else:
link_field_name = None
for f in self.remote_field.through._meta.fields:
if f.is_relation and f.remote_field.model == related.model:
if link_field_name is None and related.related_model == related.model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
setattr(self, cache_attr, getattr(f, attr))
break
else:
found = True
elif link_field_name is None or link_field_name == f.name:
setattr(self, cache_attr, getattr(f, attr))
break
return getattr(self, cache_attr)
def contribute_to_class(self, cls, name, **kwargs):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.remote_field.symmetrical and (
self.remote_field.model == "self" or self.remote_field.model == cls._meta.object_name):
self.remote_field.related_name = "%s_rel_+" % name
elif self.remote_field.is_hidden():
# If the backwards relation is disabled, replace the original
# related_name with one generated from the m2m field name. Django
# still uses backwards relations internally and we need to avoid
# clashes between multiple m2m fields with related_name == '+'.
self.remote_field.related_name = "_%s_%s_+" % (cls.__name__.lower(), name)
super().contribute_to_class(cls, name, **kwargs)
# The intermediate m2m model is not auto created if:
# 1) There is a manually specified intermediate, or
# 2) The class owning the m2m field is abstract.
# 3) The class owning the m2m field has been swapped out.
if not cls._meta.abstract:
if self.remote_field.through:
def resolve_through_model(_, model, field):
field.remote_field.through = model
lazy_related_operation(resolve_through_model, cls, self.remote_field.through, field=self)
elif not cls._meta.swapped:
self.remote_field.through = create_many_to_many_intermediary_model(self, cls)
# Add the descriptor for the m2m relation.
setattr(cls, self.name, ManyToManyDescriptor(self.remote_field, reverse=False))
# Set up the accessor for the m2m table name for the relation.
self.m2m_db_table = partial(self._get_m2m_db_table, cls._meta)
def contribute_to_related_class(self, cls, related):
# Internal M2Ms (i.e., those with a related name ending with '+')
# and swapped models don't get a related descriptor.
if not self.remote_field.is_hidden() and not related.related_model._meta.swapped:
setattr(cls, related.get_accessor_name(), ManyToManyDescriptor(self.remote_field, reverse=True))
# Set up the accessors for the column names on the m2m table.
self.m2m_column_name = partial(self._get_m2m_attr, related, 'column')
self.m2m_reverse_name = partial(self._get_m2m_reverse_attr, related, 'column')
self.m2m_field_name = partial(self._get_m2m_attr, related, 'name')
self.m2m_reverse_field_name = partial(self._get_m2m_reverse_attr, related, 'name')
get_m2m_rel = partial(self._get_m2m_attr, related, 'remote_field')
self.m2m_target_field_name = lambda: get_m2m_rel().field_name
get_m2m_reverse_rel = partial(self._get_m2m_reverse_attr, related, 'remote_field')
self.m2m_reverse_target_field_name = lambda: get_m2m_reverse_rel().field_name
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
return [] if obj.pk is None else list(getattr(obj, self.attname).all())
def save_form_data(self, instance, data):
getattr(instance, self.attname).set(data)
def formfield(self, *, using=None, **kwargs):
defaults = {
'form_class': forms.ModelMultipleChoiceField,
'queryset': self.remote_field.model._default_manager.using(using),
**kwargs,
}
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
initial = defaults['initial']
if callable(initial):
initial = initial()
defaults['initial'] = [i.pk for i in initial]
return super().formfield(**defaults)
def db_check(self, connection):
return None
def db_type(self, connection):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
def db_parameters(self, connection):
return {"type": None, "check": None}
|
[
"[email protected]"
] | |
c863d3bd856aea04083da417daf952d5564beed7
|
0bf5e600ac0bd860919d4e43e8ccd55057a9e899
|
/Python Files/sunni_keydown.py
|
2787b80c178a36b079a878127a86adb701c19a9d
|
[] |
no_license
|
AndyDeany/Sunni
|
3f4dc0d9ffdec864c5340d5fbc45c18f470c5165
|
3dbe0f0f8f25c1c3cff024ffadf1a2ca76cbadd0
|
refs/heads/master
| 2020-12-25T14:14:01.610017 | 2016-10-04T12:48:06 | 2016-10-04T12:48:06 | 66,398,293 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,508 |
py
|
keys = pygame.key.get_pressed()
# Miscellaneous
backspace_held = keys[8]
tab_held = keys[9]
enter_held = keys[13]
pausebreak_held = keys[19]
escape_held = keys[27]
space_held = keys[32]
apostrophe_held = keys[39]
comma_held = keys[44]
minus_held = keys[45]
fullstop_held = keys[46]
forwardslash_held = keys[47]
# Numbers across the top
zero_held = keys[48]
one_held = keys[49]
two_held = keys[50]
three_held = keys[51]
four_held = keys[52]
five_held = keys[53]
six_held = keys[54]
seven_held = keys[55]
eight_held = keys[56]
nine_held = keys[57]
# Miscellaneous
semicolon_held = keys[59]
backslash_held = keys[60]
equals_held = keys[61]
opensquarebracket_held = keys[91]
sharp_held = keys[92]
closesquarebracket_held = keys[93]
backtick_held = keys[96]
# Alphabet
a_held = keys[97]
b_held = keys[98]
c_held = keys[99]
d_held = keys[100]
e_held = keys[101]
f_held = keys[102]
g_held = keys[103]
h_held = keys[104]
i_held = keys[105]
j_held = keys[106]
k_held = keys[107]
l_held = keys[108]
m_held = keys[109]
n_held = keys[110]
o_held = keys[111]
p_held = keys[112]
q_held = keys[113]
r_held = keys[114]
s_held = keys[115]
t_held = keys[116]
u_held = keys[117]
v_held = keys[118]
w_held = keys[119]
x_held = keys[120]
y_held = keys[121]
z_held = keys[122]
# Miscellaneous
delete_held = keys[127]
# Numpad
numpad0_held = keys[256]
numpad1_held = keys[257]
numpad2_held = keys[258]
numpad3_held = keys[259]
numpad4_held = keys[260]
numpad5_held = keys[261]
numpad6_held = keys[262]
numpad7_held = keys[263]
numpad8_held = keys[264]
numpad9_held = keys[265]
numpaddivide_held = keys[267]
numpadmultiply_held = keys[268]
numpadminus_held = keys[269]
numpadplus_held = keys[270]
numpadenter_held = keys[271]
# Arrow keys
uparrow_held = keys[273]
downarrow_held = keys[274]
rightarrow_held = keys[275]
leftarrow_held = keys[276]
# Miscellaneous
insert_held = keys[277]
home_held = keys[278]
end_held = keys[279]
pageup_held = keys[280]
pagedown_held = keys[281]
# F keys
f1_held = keys[282]
f2_held = keys[283]
f3_held = keys[284]
f4_held = keys[285]
f5_held = keys[286]
f6_held = keys[287]
f7_held = keys[288]
f8_held = keys[289]
f9_held = keys[290]
f10_held = keys[291]
f11_held = keys[292]
f12_held = keys[293]
# Key modifiers
numlock = keys[300]
capslock = keys[301]
scrolllock_held = keys[302]
rightshift_held = keys[303]
leftshift_held = keys[304]
shift_held = rightshift_held or leftshift_held
rightcontrol_held = keys[305]
leftcontrol_held = keys[306]
altgrammar_held = keys[307]
alt_held = keys[308]
leftwindows_held = keys[311] #} these might be
rightwindows_held = keys[312] #} pointless (windows keys)
menubutton_held = keys[319]
# Calculating the number of keys pressed (for typing)
if accepting_text:
keys_pressed = 0
for value in keys:
keys_pressed += value
if numlock:
keys_pressed -= 1
if capslock:
keys_pressed -= 1
if scrolllock_held:
keys_pressed -= 1
if rightshift_held:
keys_pressed -= 1
if leftshift_held:
keys_pressed -= 1
if rightcontrol_held:
keys_pressed -= 1
if leftcontrol_held:
keys_pressed -= 1
if altgrammar_held:
if leftcontrol_held:
keys_pressed -= 1
else:
keys_pressed -= 2
if alt_held:
keys_pressed -= 1
if leftwindows_held:
keys_pressed -= 1
if rightwindows_held:
keys_pressed -= 1
if menubutton_held:
keys_pressed -= 1
|
[
"[email protected]"
] | |
d1aa898f06c191a61523747bcb96a7d8b451c574
|
297497957c531d81ba286bc91253fbbb78b4d8be
|
/testing/web-platform/tests/tools/third_party/attrs/tests/test_make.py
|
657fa517781caa02094eaa2ea109475471b0022c
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
marco-c/gecko-dev-comments-removed
|
7a9dd34045b07e6b22f0c636c0a836b9e639f9d3
|
61942784fb157763e65608e5a29b3729b0aa66fa
|
refs/heads/master
| 2023-08-09T18:55:25.895853 | 2023-08-01T00:40:39 | 2023-08-01T00:40:39 | 211,297,481 | 0 | 0 |
NOASSERTION
| 2019-09-29T01:27:49 | 2019-09-27T10:44:24 |
C++
|
UTF-8
|
Python
| false | false | 64,779 |
py
|
"""
Tests for `attr._make`.
"""
from __future__ import absolute_import, division, print_function
import copy
import functools
import gc
import inspect
import itertools
import sys
from operator import attrgetter
import pytest
from hypothesis import assume, given
from hypothesis.strategies import booleans, integers, lists, sampled_from, text
import attr
from attr import _config
from attr._compat import PY2, PY310, ordered_dict
from attr._make import (
Attribute,
Factory,
_AndValidator,
_Attributes,
_ClassBuilder,
_CountingAttr,
_determine_attrib_eq_order,
_determine_attrs_eq_order,
_determine_whether_to_implement,
_transform_attrs,
and_,
fields,
fields_dict,
make_class,
validate,
)
from attr.exceptions import (
DefaultAlreadySetError,
NotAnAttrsClassError,
PythonTooOldError,
)
from .strategies import (
gen_attr_names,
list_of_attrs,
optional_bool,
simple_attrs,
simple_attrs_with_metadata,
simple_attrs_without_metadata,
simple_classes,
)
from .utils import simple_attr
attrs_st = simple_attrs.map(lambda c: Attribute.from_counting_attr("name", c))
class TestCountingAttr(object):
"""
Tests for `attr`.
"""
def test_returns_Attr(self):
"""
Returns an instance of _CountingAttr.
"""
a = attr.ib()
assert isinstance(a, _CountingAttr)
def test_validators_lists_to_wrapped_tuples(self):
"""
If a list is passed as validator, it's just converted to a tuple.
"""
def v1(_, __):
pass
def v2(_, __):
pass
a = attr.ib(validator=[v1, v2])
assert _AndValidator((v1, v2)) == a._validator
def test_validator_decorator_single(self):
"""
If _CountingAttr.validator is used as a decorator and there is no
decorator set, the decorated method is used as the validator.
"""
a = attr.ib()
@a.validator
def v():
pass
assert v == a._validator
@pytest.mark.parametrize(
"wrap", [lambda v: v, lambda v: [v], lambda v: and_(v)]
)
def test_validator_decorator(self, wrap):
"""
If _CountingAttr.validator is used as a decorator and there is already
a decorator set, the decorators are composed using `and_`.
"""
def v(_, __):
pass
a = attr.ib(validator=wrap(v))
@a.validator
def v2(self, _, __):
pass
assert _AndValidator((v, v2)) == a._validator
def test_default_decorator_already_set(self):
"""
Raise DefaultAlreadySetError if the decorator is used after a default
has been set.
"""
a = attr.ib(default=42)
with pytest.raises(DefaultAlreadySetError):
@a.default
def f(self):
pass
def test_default_decorator_sets(self):
"""
Decorator wraps the method in a Factory with pass_self=True and sets
the default.
"""
a = attr.ib()
@a.default
def f(self):
pass
assert Factory(f, True) == a._default
def make_tc():
class TransformC(object):
z = attr.ib()
y = attr.ib()
x = attr.ib()
a = 42
return TransformC
class TestTransformAttrs(object):
"""
Tests for `_transform_attrs`.
"""
def test_no_modifications(self):
"""
Does not attach __attrs_attrs__ to the class.
"""
C = make_tc()
_transform_attrs(C, None, False, False, True, None)
assert None is getattr(C, "__attrs_attrs__", None)
def test_normal(self):
"""
Transforms every `_CountingAttr` and leaves others (a) be.
"""
C = make_tc()
attrs, _, _ = _transform_attrs(C, None, False, False, True, None)
assert ["z", "y", "x"] == [a.name for a in attrs]
def test_empty(self):
"""
No attributes works as expected.
"""
@attr.s
class C(object):
pass
assert _Attributes(((), [], {})) == _transform_attrs(
C, None, False, False, True, None
)
def test_transforms_to_attribute(self):
"""
All `_CountingAttr`s are transformed into `Attribute`s.
"""
C = make_tc()
attrs, base_attrs, _ = _transform_attrs(
C, None, False, False, True, None
)
assert [] == base_attrs
assert 3 == len(attrs)
assert all(isinstance(a, Attribute) for a in attrs)
def test_conflicting_defaults(self):
"""
Raises `ValueError` if attributes with defaults are followed by
mandatory attributes.
"""
class C(object):
x = attr.ib(default=None)
y = attr.ib()
with pytest.raises(ValueError) as e:
_transform_attrs(C, None, False, False, True, None)
assert (
"No mandatory attributes allowed after an attribute with a "
"default value or factory. Attribute in question: Attribute"
"(name='y', default=NOTHING, validator=None, repr=True, "
"eq=True, eq_key=None, order=True, order_key=None, "
"hash=None, init=True, "
"metadata=mappingproxy({}), type=None, converter=None, "
"kw_only=False, inherited=False, on_setattr=None)",
) == e.value.args
def test_kw_only(self):
"""
Converts all attributes, including base class' attributes, if `kw_only`
is provided. Therefore, `kw_only` allows attributes with defaults to
preceed mandatory attributes.
Updates in the subclass *don't* affect the base class attributes.
"""
@attr.s
class B(object):
b = attr.ib()
for b_a in B.__attrs_attrs__:
assert b_a.kw_only is False
class C(B):
x = attr.ib(default=None)
y = attr.ib()
attrs, base_attrs, _ = _transform_attrs(
C, None, False, True, True, None
)
assert len(attrs) == 3
assert len(base_attrs) == 1
for a in attrs:
assert a.kw_only is True
for b_a in B.__attrs_attrs__:
assert b_a.kw_only is False
def test_these(self):
"""
If these is passed, use it and ignore body and base classes.
"""
class Base(object):
z = attr.ib()
class C(Base):
y = attr.ib()
attrs, base_attrs, _ = _transform_attrs(
C, {"x": attr.ib()}, False, False, True, None
)
assert [] == base_attrs
assert (simple_attr("x"),) == attrs
def test_these_leave_body(self):
"""
If these is passed, no attributes are removed from the body.
"""
@attr.s(init=False, these={"x": attr.ib()})
class C(object):
x = 5
assert 5 == C().x
assert "C(x=5)" == repr(C())
def test_these_ordered(self):
"""
If these is passed ordered attrs, their order respect instead of the
counter.
"""
b = attr.ib(default=2)
a = attr.ib(default=1)
@attr.s(these=ordered_dict([("a", a), ("b", b)]))
class C(object):
pass
assert "C(a=1, b=2)" == repr(C())
def test_multiple_inheritance_old(self):
"""
Old multiple inheritance attributre collection behavior is retained.
See #285
"""
@attr.s
class A(object):
a1 = attr.ib(default="a1")
a2 = attr.ib(default="a2")
@attr.s
class B(A):
b1 = attr.ib(default="b1")
b2 = attr.ib(default="b2")
@attr.s
class C(B, A):
c1 = attr.ib(default="c1")
c2 = attr.ib(default="c2")
@attr.s
class D(A):
d1 = attr.ib(default="d1")
d2 = attr.ib(default="d2")
@attr.s
class E(C, D):
e1 = attr.ib(default="e1")
e2 = attr.ib(default="e2")
assert (
"E(a1='a1', a2='a2', b1='b1', b2='b2', c1='c1', c2='c2', d1='d1', "
"d2='d2', e1='e1', e2='e2')"
) == repr(E())
def test_overwrite_proper_mro(self):
"""
The proper MRO path works single overwrites too.
"""
@attr.s(collect_by_mro=True)
class C(object):
x = attr.ib(default=1)
@attr.s(collect_by_mro=True)
class D(C):
x = attr.ib(default=2)
assert "D(x=2)" == repr(D())
def test_multiple_inheritance_proper_mro(self):
"""
Attributes are collected according to the MRO.
See #428
"""
@attr.s
class A(object):
a1 = attr.ib(default="a1")
a2 = attr.ib(default="a2")
@attr.s
class B(A):
b1 = attr.ib(default="b1")
b2 = attr.ib(default="b2")
@attr.s
class C(B, A):
c1 = attr.ib(default="c1")
c2 = attr.ib(default="c2")
@attr.s
class D(A):
d1 = attr.ib(default="d1")
d2 = attr.ib(default="d2")
@attr.s(collect_by_mro=True)
class E(C, D):
e1 = attr.ib(default="e1")
e2 = attr.ib(default="e2")
assert (
"E(a1='a1', a2='a2', d1='d1', d2='d2', b1='b1', b2='b2', c1='c1', "
"c2='c2', e1='e1', e2='e2')"
) == repr(E())
def test_mro(self):
"""
Attributes and methods are looked up the same way.
See #428
"""
@attr.s(collect_by_mro=True)
class A(object):
x = attr.ib(10)
def xx(self):
return 10
@attr.s(collect_by_mro=True)
class B(A):
y = attr.ib(20)
@attr.s(collect_by_mro=True)
class C(A):
x = attr.ib(50)
def xx(self):
return 50
@attr.s(collect_by_mro=True)
class D(B, C):
pass
d = D()
assert d.x == d.xx()
def test_inherited(self):
"""
Inherited Attributes have `.inherited` True, otherwise False.
"""
@attr.s
class A(object):
a = attr.ib()
@attr.s
class B(A):
b = attr.ib()
@attr.s
class C(B):
a = attr.ib()
c = attr.ib()
f = attr.fields
assert False is f(A).a.inherited
assert True is f(B).a.inherited
assert False is f(B).b.inherited
assert False is f(C).a.inherited
assert True is f(C).b.inherited
assert False is f(C).c.inherited
class TestAttributes(object):
"""
Tests for the `attrs`/`attr.s` class decorator.
"""
@pytest.mark.skipif(not PY2, reason="No old-style classes in Py3")
def test_catches_old_style(self):
"""
Raises TypeError on old-style classes.
"""
with pytest.raises(TypeError) as e:
@attr.s
class C:
pass
assert ("attrs only works with new-style classes.",) == e.value.args
def test_sets_attrs(self):
"""
Sets the `__attrs_attrs__` class attribute with a list of `Attribute`s.
"""
@attr.s
class C(object):
x = attr.ib()
assert "x" == C.__attrs_attrs__[0].name
assert all(isinstance(a, Attribute) for a in C.__attrs_attrs__)
def test_empty(self):
"""
No attributes, no problems.
"""
@attr.s
class C3(object):
pass
assert "C3()" == repr(C3())
assert C3() == C3()
@given(attr=attrs_st, attr_name=sampled_from(Attribute.__slots__))
def test_immutable(self, attr, attr_name):
"""
Attribute instances are immutable.
"""
with pytest.raises(AttributeError):
setattr(attr, attr_name, 1)
@pytest.mark.parametrize(
"method_name", ["__repr__", "__eq__", "__hash__", "__init__"]
)
def test_adds_all_by_default(self, method_name):
"""
If no further arguments are supplied, all add_XXX functions except
add_hash are applied. __hash__ is set to None.
"""
sentinel = object()
class C(object):
x = attr.ib()
setattr(C, method_name, sentinel)
C = attr.s(C)
meth = getattr(C, method_name)
assert sentinel != meth
if method_name == "__hash__":
assert meth is None
@pytest.mark.parametrize(
"arg_name, method_name",
[
("repr", "__repr__"),
("eq", "__eq__"),
("order", "__le__"),
("hash", "__hash__"),
("init", "__init__"),
],
)
def test_respects_add_arguments(self, arg_name, method_name):
"""
If a certain `XXX` is `False`, `__XXX__` is not added to the class.
"""
sentinel = object()
am_args = {
"repr": True,
"eq": True,
"order": True,
"hash": True,
"init": True,
}
am_args[arg_name] = False
if arg_name == "eq":
am_args["order"] = False
class C(object):
x = attr.ib()
setattr(C, method_name, sentinel)
C = attr.s(**am_args)(C)
assert sentinel == getattr(C, method_name)
@pytest.mark.parametrize("init", [True, False])
def test_respects_init_attrs_init(self, init):
"""
If init=False, adds __attrs_init__ to the class.
Otherwise, it does not.
"""
class C(object):
x = attr.ib()
C = attr.s(init=init)(C)
assert hasattr(C, "__attrs_init__") != init
@pytest.mark.skipif(PY2, reason="__qualname__ is PY3-only.")
@given(slots_outer=booleans(), slots_inner=booleans())
def test_repr_qualname(self, slots_outer, slots_inner):
"""
On Python 3, the name in repr is the __qualname__.
"""
@attr.s(slots=slots_outer)
class C(object):
@attr.s(slots=slots_inner)
class D(object):
pass
assert "C.D()" == repr(C.D())
assert "GC.D()" == repr(GC.D())
@given(slots_outer=booleans(), slots_inner=booleans())
def test_repr_fake_qualname(self, slots_outer, slots_inner):
"""
Setting repr_ns overrides a potentially guessed namespace.
"""
@attr.s(slots=slots_outer)
class C(object):
@attr.s(repr_ns="C", slots=slots_inner)
class D(object):
pass
assert "C.D()" == repr(C.D())
@pytest.mark.skipif(PY2, reason="__qualname__ is PY3-only.")
@given(slots_outer=booleans(), slots_inner=booleans())
def test_name_not_overridden(self, slots_outer, slots_inner):
"""
On Python 3, __name__ is different from __qualname__.
"""
@attr.s(slots=slots_outer)
class C(object):
@attr.s(slots=slots_inner)
class D(object):
pass
assert C.D.__name__ == "D"
assert C.D.__qualname__ == C.__qualname__ + ".D"
@pytest.mark.parametrize("with_validation", [True, False])
def test_pre_init(self, with_validation, monkeypatch):
"""
Verify that __attrs_pre_init__ gets called if defined.
"""
monkeypatch.setattr(_config, "_run_validators", with_validation)
@attr.s
class C(object):
def __attrs_pre_init__(self2):
self2.z = 30
c = C()
assert 30 == getattr(c, "z", None)
@pytest.mark.parametrize("with_validation", [True, False])
def test_post_init(self, with_validation, monkeypatch):
"""
Verify that __attrs_post_init__ gets called if defined.
"""
monkeypatch.setattr(_config, "_run_validators", with_validation)
@attr.s
class C(object):
x = attr.ib()
y = attr.ib()
def __attrs_post_init__(self2):
self2.z = self2.x + self2.y
c = C(x=10, y=20)
assert 30 == getattr(c, "z", None)
@pytest.mark.parametrize("with_validation", [True, False])
def test_pre_post_init_order(self, with_validation, monkeypatch):
"""
Verify that __attrs_post_init__ gets called if defined.
"""
monkeypatch.setattr(_config, "_run_validators", with_validation)
@attr.s
class C(object):
x = attr.ib()
def __attrs_pre_init__(self2):
self2.z = 30
def __attrs_post_init__(self2):
self2.z += self2.x
c = C(x=10)
assert 40 == getattr(c, "z", None)
def test_types(self):
"""
Sets the `Attribute.type` attr from type argument.
"""
@attr.s
class C(object):
x = attr.ib(type=int)
y = attr.ib(type=str)
z = attr.ib()
assert int is fields(C).x.type
assert str is fields(C).y.type
assert None is fields(C).z.type
@pytest.mark.parametrize("slots", [True, False])
def test_clean_class(self, slots):
"""
Attribute definitions do not appear on the class body after @attr.s.
"""
@attr.s(slots=slots)
class C(object):
x = attr.ib()
x = getattr(C, "x", None)
assert not isinstance(x, _CountingAttr)
def test_factory_sugar(self):
"""
Passing factory=f is syntactic sugar for passing default=Factory(f).
"""
@attr.s
class C(object):
x = attr.ib(factory=list)
assert Factory(list) == attr.fields(C).x.default
def test_sugar_factory_mutex(self):
"""
Passing both default and factory raises ValueError.
"""
with pytest.raises(ValueError, match="mutually exclusive"):
@attr.s
class C(object):
x = attr.ib(factory=list, default=Factory(list))
def test_sugar_callable(self):
"""
Factory has to be a callable to prevent people from passing Factory
into it.
"""
with pytest.raises(ValueError, match="must be a callable"):
@attr.s
class C(object):
x = attr.ib(factory=Factory(list))
def test_inherited_does_not_affect_hashing_and_equality(self):
"""
Whether or not an Attribute has been inherited doesn't affect how it's
hashed and compared.
"""
@attr.s
class BaseClass(object):
x = attr.ib()
@attr.s
class SubClass(BaseClass):
pass
ba = attr.fields(BaseClass)[0]
sa = attr.fields(SubClass)[0]
assert ba == sa
assert hash(ba) == hash(sa)
class TestKeywordOnlyAttributes(object):
"""
Tests for keyword-only attributes.
"""
def test_adds_keyword_only_arguments(self):
"""
Attributes can be added as keyword-only.
"""
@attr.s
class C(object):
a = attr.ib()
b = attr.ib(default=2, kw_only=True)
c = attr.ib(kw_only=True)
d = attr.ib(default=attr.Factory(lambda: 4), kw_only=True)
c = C(1, c=3)
assert c.a == 1
assert c.b == 2
assert c.c == 3
assert c.d == 4
def test_ignores_kw_only_when_init_is_false(self):
"""
Specifying ``kw_only=True`` when ``init=False`` is essentially a no-op.
"""
@attr.s
class C(object):
x = attr.ib(init=False, default=0, kw_only=True)
y = attr.ib()
c = C(1)
assert c.x == 0
assert c.y == 1
def test_keyword_only_attributes_presence(self):
"""
Raises `TypeError` when keyword-only arguments are
not specified.
"""
@attr.s
class C(object):
x = attr.ib(kw_only=True)
with pytest.raises(TypeError) as e:
C()
if PY2:
assert (
"missing required keyword-only argument: 'x'"
) in e.value.args[0]
else:
assert (
"missing 1 required keyword-only argument: 'x'"
) in e.value.args[0]
def test_keyword_only_attributes_unexpected(self):
"""
Raises `TypeError` when unexpected keyword argument passed.
"""
@attr.s
class C(object):
x = attr.ib(kw_only=True)
with pytest.raises(TypeError) as e:
C(x=5, y=10)
assert "got an unexpected keyword argument 'y'" in e.value.args[0]
def test_keyword_only_attributes_can_come_in_any_order(self):
"""
Mandatory vs non-mandatory attr order only matters when they are part
of the __init__ signature and when they aren't kw_only (which are
moved to the end and can be mandatory or non-mandatory in any order,
as they will be specified as keyword args anyway).
"""
@attr.s
class C(object):
a = attr.ib(kw_only=True)
b = attr.ib(kw_only=True, default="b")
c = attr.ib(kw_only=True)
d = attr.ib()
e = attr.ib(default="e")
f = attr.ib(kw_only=True)
g = attr.ib(kw_only=True, default="g")
h = attr.ib(kw_only=True)
i = attr.ib(init=False)
c = C("d", a="a", c="c", f="f", h="h")
assert c.a == "a"
assert c.b == "b"
assert c.c == "c"
assert c.d == "d"
assert c.e == "e"
assert c.f == "f"
assert c.g == "g"
assert c.h == "h"
def test_keyword_only_attributes_allow_subclassing(self):
"""
Subclass can define keyword-only attributed without defaults,
when the base class has attributes with defaults.
"""
@attr.s
class Base(object):
x = attr.ib(default=0)
@attr.s
class C(Base):
y = attr.ib(kw_only=True)
c = C(y=1)
assert c.x == 0
assert c.y == 1
def test_keyword_only_class_level(self):
"""
`kw_only` can be provided at the attr.s level, converting all
attributes to `kw_only.`
"""
@attr.s(kw_only=True)
class C(object):
x = attr.ib()
y = attr.ib(kw_only=True)
with pytest.raises(TypeError):
C(0, y=1)
c = C(x=0, y=1)
assert c.x == 0
assert c.y == 1
def test_keyword_only_class_level_subclassing(self):
"""
Subclass `kw_only` propagates to attrs inherited from the base,
allowing non-default following default.
"""
@attr.s
class Base(object):
x = attr.ib(default=0)
@attr.s(kw_only=True)
class C(Base):
y = attr.ib()
with pytest.raises(TypeError):
C(1)
c = C(x=0, y=1)
assert c.x == 0
assert c.y == 1
def test_init_false_attribute_after_keyword_attribute(self):
"""
A positional attribute cannot follow a `kw_only` attribute,
but an `init=False` attribute can because it won't appear
in `__init__`
"""
@attr.s
class KwArgBeforeInitFalse(object):
kwarg = attr.ib(kw_only=True)
non_init_function_default = attr.ib(init=False)
non_init_keyword_default = attr.ib(
init=False, default="default-by-keyword"
)
@non_init_function_default.default
def _init_to_init(self):
return self.kwarg + "b"
c = KwArgBeforeInitFalse(kwarg="a")
assert c.kwarg == "a"
assert c.non_init_function_default == "ab"
assert c.non_init_keyword_default == "default-by-keyword"
def test_init_false_attribute_after_keyword_attribute_with_inheritance(
self,
):
"""
A positional attribute cannot follow a `kw_only` attribute,
but an `init=False` attribute can because it won't appear
in `__init__`. This test checks that we allow this
even when the `kw_only` attribute appears in a parent class
"""
@attr.s
class KwArgBeforeInitFalseParent(object):
kwarg = attr.ib(kw_only=True)
@attr.s
class KwArgBeforeInitFalseChild(KwArgBeforeInitFalseParent):
non_init_function_default = attr.ib(init=False)
non_init_keyword_default = attr.ib(
init=False, default="default-by-keyword"
)
@non_init_function_default.default
def _init_to_init(self):
return self.kwarg + "b"
c = KwArgBeforeInitFalseChild(kwarg="a")
assert c.kwarg == "a"
assert c.non_init_function_default == "ab"
assert c.non_init_keyword_default == "default-by-keyword"
@pytest.mark.skipif(not PY2, reason="PY2-specific keyword-only error behavior")
class TestKeywordOnlyAttributesOnPy2(object):
"""
Tests for keyword-only attribute behavior on py2.
"""
def test_no_init(self):
"""
Keyworld-only is a no-op, not any error, if ``init=false``.
"""
@attr.s(kw_only=True, init=False)
class ClassLevel(object):
a = attr.ib()
@attr.s(init=False)
class AttrLevel(object):
a = attr.ib(kw_only=True)
@attr.s
class GC(object):
@attr.s
class D(object):
pass
class TestMakeClass(object):
"""
Tests for `make_class`.
"""
@pytest.mark.parametrize("ls", [list, tuple])
def test_simple(self, ls):
"""
Passing a list of strings creates attributes with default args.
"""
C1 = make_class("C1", ls(["a", "b"]))
@attr.s
class C2(object):
a = attr.ib()
b = attr.ib()
assert C1.__attrs_attrs__ == C2.__attrs_attrs__
def test_dict(self):
"""
Passing a dict of name: _CountingAttr creates an equivalent class.
"""
C1 = make_class(
"C1", {"a": attr.ib(default=42), "b": attr.ib(default=None)}
)
@attr.s
class C2(object):
a = attr.ib(default=42)
b = attr.ib(default=None)
assert C1.__attrs_attrs__ == C2.__attrs_attrs__
def test_attr_args(self):
"""
attributes_arguments are passed to attributes
"""
C = make_class("C", ["x"], repr=False)
assert repr(C(1)).startswith("<tests.test_make.C object at 0x")
def test_catches_wrong_attrs_type(self):
"""
Raise `TypeError` if an invalid type for attrs is passed.
"""
with pytest.raises(TypeError) as e:
make_class("C", object())
assert ("attrs argument must be a dict or a list.",) == e.value.args
def test_bases(self):
"""
Parameter bases default to (object,) and subclasses correctly
"""
class D(object):
pass
cls = make_class("C", {})
assert cls.__mro__[-1] == object
cls = make_class("C", {}, bases=(D,))
assert D in cls.__mro__
assert isinstance(cls(), D)
@pytest.mark.parametrize("slots", [True, False])
def test_clean_class(self, slots):
"""
Attribute definitions do not appear on the class body.
"""
C = make_class("C", ["x"], slots=slots)
x = getattr(C, "x", None)
assert not isinstance(x, _CountingAttr)
def test_missing_sys_getframe(self, monkeypatch):
"""
`make_class()` does not fail when `sys._getframe()` is not available.
"""
monkeypatch.delattr(sys, "_getframe")
C = make_class("C", ["x"])
assert 1 == len(C.__attrs_attrs__)
def test_make_class_ordered(self):
"""
If `make_class()` is passed ordered attrs, their order is respected
instead of the counter.
"""
b = attr.ib(default=2)
a = attr.ib(default=1)
C = attr.make_class("C", ordered_dict([("a", a), ("b", b)]))
assert "C(a=1, b=2)" == repr(C())
@pytest.mark.skipif(PY2, reason="Python 3-only")
def test_generic_dynamic_class(self):
"""
make_class can create generic dynamic classes.
https://github.com/python-attrs/attrs/issues/756
https://bugs.python.org/issue33188
"""
from types import new_class
from typing import Generic, TypeVar
MyTypeVar = TypeVar("MyTypeVar")
MyParent = new_class("MyParent", (Generic[MyTypeVar],), {})
attr.make_class("test", {"id": attr.ib(type=str)}, (MyParent[int],))
class TestFields(object):
"""
Tests for `fields`.
"""
@given(simple_classes())
def test_instance(self, C):
"""
Raises `TypeError` on non-classes.
"""
with pytest.raises(TypeError) as e:
fields(C())
assert "Passed object must be a class." == e.value.args[0]
def test_handler_non_attrs_class(self):
"""
Raises `ValueError` if passed a non-``attrs`` instance.
"""
with pytest.raises(NotAnAttrsClassError) as e:
fields(object)
assert (
"{o!r} is not an attrs-decorated class.".format(o=object)
) == e.value.args[0]
@given(simple_classes())
def test_fields(self, C):
"""
Returns a list of `Attribute`a.
"""
assert all(isinstance(a, Attribute) for a in fields(C))
@given(simple_classes())
def test_fields_properties(self, C):
"""
Fields returns a tuple with properties.
"""
for attribute in fields(C):
assert getattr(fields(C), attribute.name) is attribute
class TestFieldsDict(object):
"""
Tests for `fields_dict`.
"""
@given(simple_classes())
def test_instance(self, C):
"""
Raises `TypeError` on non-classes.
"""
with pytest.raises(TypeError) as e:
fields_dict(C())
assert "Passed object must be a class." == e.value.args[0]
def test_handler_non_attrs_class(self):
"""
Raises `ValueError` if passed a non-``attrs`` instance.
"""
with pytest.raises(NotAnAttrsClassError) as e:
fields_dict(object)
assert (
"{o!r} is not an attrs-decorated class.".format(o=object)
) == e.value.args[0]
@given(simple_classes())
def test_fields_dict(self, C):
"""
Returns an ordered dict of ``{attribute_name: Attribute}``.
"""
d = fields_dict(C)
assert isinstance(d, ordered_dict)
assert list(fields(C)) == list(d.values())
assert [a.name for a in fields(C)] == [field_name for field_name in d]
class TestConverter(object):
"""
Tests for attribute conversion.
"""
def test_convert(self):
"""
Return value of converter is used as the attribute's value.
"""
C = make_class(
"C", {"x": attr.ib(converter=lambda v: v + 1), "y": attr.ib()}
)
c = C(1, 2)
assert c.x == 2
assert c.y == 2
@given(integers(), booleans())
def test_convert_property(self, val, init):
"""
Property tests for attributes using converter.
"""
C = make_class(
"C",
{
"y": attr.ib(),
"x": attr.ib(
init=init, default=val, converter=lambda v: v + 1
),
},
)
c = C(2)
assert c.x == val + 1
assert c.y == 2
@given(integers(), booleans())
def test_converter_factory_property(self, val, init):
"""
Property tests for attributes with converter, and a factory default.
"""
C = make_class(
"C",
ordered_dict(
[
("y", attr.ib()),
(
"x",
attr.ib(
init=init,
default=Factory(lambda: val),
converter=lambda v: v + 1,
),
),
]
),
)
c = C(2)
assert c.x == val + 1
assert c.y == 2
def test_factory_takes_self(self):
"""
If takes_self on factories is True, self is passed.
"""
C = make_class(
"C",
{
"x": attr.ib(
default=Factory((lambda self: self), takes_self=True)
)
},
)
i = C()
assert i is i.x
def test_factory_hashable(self):
"""
Factory is hashable.
"""
assert hash(Factory(None, False)) == hash(Factory(None, False))
def test_convert_before_validate(self):
"""
Validation happens after conversion.
"""
def validator(inst, attr, val):
raise RuntimeError("foo")
C = make_class(
"C",
{
"x": attr.ib(validator=validator, converter=lambda v: 1 / 0),
"y": attr.ib(),
},
)
with pytest.raises(ZeroDivisionError):
C(1, 2)
def test_frozen(self):
"""
Converters circumvent immutability.
"""
C = make_class(
"C", {"x": attr.ib(converter=lambda v: int(v))}, frozen=True
)
C("1")
class TestValidate(object):
"""
Tests for `validate`.
"""
def test_success(self):
"""
If the validator succeeds, nothing gets raised.
"""
C = make_class(
"C", {"x": attr.ib(validator=lambda *a: None), "y": attr.ib()}
)
validate(C(1, 2))
def test_propagates(self):
"""
The exception of the validator is handed through.
"""
def raiser(_, __, value):
if value == 42:
raise FloatingPointError
C = make_class("C", {"x": attr.ib(validator=raiser)})
i = C(1)
i.x = 42
with pytest.raises(FloatingPointError):
validate(i)
def test_run_validators(self):
"""
Setting `_run_validators` to False prevents validators from running.
"""
_config._run_validators = False
obj = object()
def raiser(_, __, ___):
raise Exception(obj)
C = make_class("C", {"x": attr.ib(validator=raiser)})
c = C(1)
validate(c)
assert 1 == c.x
_config._run_validators = True
with pytest.raises(Exception):
validate(c)
with pytest.raises(Exception) as e:
C(1)
assert (obj,) == e.value.args
def test_multiple_validators(self):
"""
If a list is passed as a validator, all of its items are treated as one
and must pass.
"""
def v1(_, __, value):
if value == 23:
raise TypeError("omg")
def v2(_, __, value):
if value == 42:
raise ValueError("omg")
C = make_class("C", {"x": attr.ib(validator=[v1, v2])})
validate(C(1))
with pytest.raises(TypeError) as e:
C(23)
assert "omg" == e.value.args[0]
with pytest.raises(ValueError) as e:
C(42)
assert "omg" == e.value.args[0]
def test_multiple_empty(self):
"""
Empty list/tuple for validator is the same as None.
"""
C1 = make_class("C", {"x": attr.ib(validator=[])})
C2 = make_class("C", {"x": attr.ib(validator=None)})
assert inspect.getsource(C1.__init__) == inspect.getsource(C2.__init__)
sorted_lists_of_attrs = list_of_attrs.map(
lambda l: sorted(l, key=attrgetter("counter"))
)
class TestMetadata(object):
"""
Tests for metadata handling.
"""
@given(sorted_lists_of_attrs)
def test_metadata_present(self, list_of_attrs):
"""
Assert dictionaries are copied and present.
"""
C = make_class("C", dict(zip(gen_attr_names(), list_of_attrs)))
for hyp_attr, class_attr in zip(list_of_attrs, fields(C)):
if hyp_attr.metadata is None:
assert class_attr.metadata is not None
assert len(class_attr.metadata) == 0
else:
assert hyp_attr.metadata == class_attr.metadata
for k in class_attr.metadata:
assert hyp_attr.metadata[k] == class_attr.metadata[k]
assert hyp_attr.metadata.get(k) == class_attr.metadata.get(
k
)
@given(simple_classes(), text())
def test_metadata_immutability(self, C, string):
"""
The metadata dict should be best-effort immutable.
"""
for a in fields(C):
with pytest.raises(TypeError):
a.metadata[string] = string
with pytest.raises(AttributeError):
a.metadata.update({string: string})
with pytest.raises(AttributeError):
a.metadata.clear()
with pytest.raises(AttributeError):
a.metadata.setdefault(string, string)
for k in a.metadata:
with pytest.raises((TypeError, IndexError)):
del a.metadata[k]
with pytest.raises(AttributeError):
a.metadata.pop(k)
with pytest.raises(AttributeError):
a.metadata.popitem()
@given(lists(simple_attrs_without_metadata, min_size=2, max_size=5))
def test_empty_metadata_singleton(self, list_of_attrs):
"""
All empty metadata attributes share the same empty metadata dict.
"""
C = make_class("C", dict(zip(gen_attr_names(), list_of_attrs)))
for a in fields(C)[1:]:
assert a.metadata is fields(C)[0].metadata
@given(lists(simple_attrs_without_metadata, min_size=2, max_size=5))
def test_empty_countingattr_metadata_independent(self, list_of_attrs):
"""
All empty metadata attributes are independent before ``@attr.s``.
"""
for x, y in itertools.combinations(list_of_attrs, 2):
assert x.metadata is not y.metadata
@given(lists(simple_attrs_with_metadata(), min_size=2, max_size=5))
def test_not_none_metadata(self, list_of_attrs):
"""
Non-empty metadata attributes exist as fields after ``@attr.s``.
"""
C = make_class("C", dict(zip(gen_attr_names(), list_of_attrs)))
assert len(fields(C)) > 0
for cls_a, raw_a in zip(fields(C), list_of_attrs):
assert cls_a.metadata != {}
assert cls_a.metadata == raw_a.metadata
def test_metadata(self):
"""
If metadata that is not None is passed, it is used.
This is necessary for coverage because the previous test is
hypothesis-based.
"""
md = {}
a = attr.ib(metadata=md)
assert md is a.metadata
class TestClassBuilder(object):
"""
Tests for `_ClassBuilder`.
"""
def test_repr_str(self):
"""
Trying to add a `__str__` without having a `__repr__` raises a
ValueError.
"""
with pytest.raises(ValueError) as ei:
make_class("C", {}, repr=False, str=True)
assert (
"__str__ can only be generated if a __repr__ exists.",
) == ei.value.args
def test_repr(self):
"""
repr of builder itself makes sense.
"""
class C(object):
pass
b = _ClassBuilder(
C,
None,
True,
True,
False,
False,
False,
False,
False,
False,
True,
None,
False,
None,
)
assert "<_ClassBuilder(cls=C)>" == repr(b)
def test_returns_self(self):
"""
All methods return the builder for chaining.
"""
class C(object):
x = attr.ib()
b = _ClassBuilder(
C,
None,
True,
True,
False,
False,
False,
False,
False,
False,
True,
None,
False,
None,
)
cls = (
b.add_eq()
.add_order()
.add_hash()
.add_init()
.add_attrs_init()
.add_repr("ns")
.add_str()
.build_class()
)
assert "ns.C(x=1)" == repr(cls(1))
@pytest.mark.parametrize(
"meth_name",
[
"__init__",
"__hash__",
"__repr__",
"__str__",
"__eq__",
"__ne__",
"__lt__",
"__le__",
"__gt__",
"__ge__",
],
)
def test_attaches_meta_dunders(self, meth_name):
"""
Generated methods have correct __module__, __name__, and __qualname__
attributes.
"""
@attr.s(hash=True, str=True)
class C(object):
def organic(self):
pass
@attr.s(hash=True, str=True)
class D(object):
pass
meth_C = getattr(C, meth_name)
meth_D = getattr(D, meth_name)
assert meth_name == meth_C.__name__ == meth_D.__name__
assert C.organic.__module__ == meth_C.__module__ == meth_D.__module__
if not PY2:
organic_prefix = C.organic.__qualname__.rsplit(".", 1)[0]
assert organic_prefix + "." + meth_name == meth_C.__qualname__
def test_handles_missing_meta_on_class(self):
"""
If the class hasn't a __module__ or __qualname__, the method hasn't
either.
"""
class C(object):
pass
b = _ClassBuilder(
C,
these=None,
slots=False,
frozen=False,
weakref_slot=True,
getstate_setstate=False,
auto_attribs=False,
is_exc=False,
kw_only=False,
cache_hash=False,
collect_by_mro=True,
on_setattr=None,
has_custom_setattr=False,
field_transformer=None,
)
b._cls = {}
def fake_meth(self):
pass
fake_meth.__module__ = "42"
fake_meth.__qualname__ = "23"
rv = b._add_method_dunders(fake_meth)
assert "42" == rv.__module__ == fake_meth.__module__
assert "23" == rv.__qualname__ == fake_meth.__qualname__
def test_weakref_setstate(self):
"""
__weakref__ is not set on in setstate because it's not writable in
slotted classes.
"""
@attr.s(slots=True)
class C(object):
__weakref__ = attr.ib(
init=False, hash=False, repr=False, eq=False, order=False
)
assert C() == copy.deepcopy(C())
def test_no_references_to_original(self):
"""
When subclassing a slotted class, there are no stray references to the
original class.
"""
@attr.s(slots=True)
class C(object):
pass
@attr.s(slots=True)
class C2(C):
pass
gc.collect()
assert [C2] == C.__subclasses__()
def _get_copy_kwargs(include_slots=True):
"""
Generate a list of compatible attr.s arguments for the `copy` tests.
"""
options = ["frozen", "hash", "cache_hash"]
if include_slots:
options.extend(["slots", "weakref_slot"])
out_kwargs = []
for args in itertools.product([True, False], repeat=len(options)):
kwargs = dict(zip(options, args))
kwargs["hash"] = kwargs["hash"] or None
if kwargs["cache_hash"] and not (
kwargs["frozen"] or kwargs["hash"]
):
continue
out_kwargs.append(kwargs)
return out_kwargs
@pytest.mark.parametrize("kwargs", _get_copy_kwargs())
def test_copy(self, kwargs):
"""
Ensure that an attrs class can be copied successfully.
"""
@attr.s(eq=True, **kwargs)
class C(object):
x = attr.ib()
a = C(1)
b = copy.deepcopy(a)
assert a == b
@pytest.mark.parametrize("kwargs", _get_copy_kwargs(include_slots=False))
def test_copy_custom_setstate(self, kwargs):
"""
Ensure that non-slots classes respect a custom __setstate__.
"""
@attr.s(eq=True, **kwargs)
class C(object):
x = attr.ib()
def __getstate__(self):
return self.__dict__
def __setstate__(self, state):
state["x"] *= 5
self.__dict__.update(state)
expected = C(25)
actual = copy.copy(C(5))
assert actual == expected
class TestMakeOrder:
"""
Tests for _make_order().
"""
def test_subclasses_cannot_be_compared(self):
"""
Calling comparison methods on subclasses raises a TypeError.
We use the actual operation so we get an error raised on Python 3.
"""
@attr.s
class A(object):
a = attr.ib()
@attr.s
class B(A):
pass
a = A(42)
b = B(42)
assert a <= a
assert a >= a
assert not a < a
assert not a > a
assert (
NotImplemented
== a.__lt__(b)
== a.__le__(b)
== a.__gt__(b)
== a.__ge__(b)
)
if not PY2:
with pytest.raises(TypeError):
a <= b
with pytest.raises(TypeError):
a >= b
with pytest.raises(TypeError):
a < b
with pytest.raises(TypeError):
a > b
class TestDetermineAttrsEqOrder(object):
def test_default(self):
"""
If all are set to None, set both eq and order to the passed default.
"""
assert (42, 42) == _determine_attrs_eq_order(None, None, None, 42)
@pytest.mark.parametrize("eq", [True, False])
def test_order_mirrors_eq_by_default(self, eq):
"""
If order is None, it mirrors eq.
"""
assert (eq, eq) == _determine_attrs_eq_order(None, eq, None, True)
def test_order_without_eq(self):
"""
eq=False, order=True raises a meaningful ValueError.
"""
with pytest.raises(
ValueError, match="`order` can only be True if `eq` is True too."
):
_determine_attrs_eq_order(None, False, True, True)
@given(cmp=booleans(), eq=optional_bool, order=optional_bool)
def test_mix(self, cmp, eq, order):
"""
If cmp is not None, eq and order must be None and vice versa.
"""
assume(eq is not None or order is not None)
with pytest.raises(
ValueError, match="Don't mix `cmp` with `eq' and `order`."
):
_determine_attrs_eq_order(cmp, eq, order, True)
class TestDetermineAttribEqOrder(object):
def test_default(self):
"""
If all are set to None, set both eq and order to the passed default.
"""
assert (42, None, 42, None) == _determine_attrib_eq_order(
None, None, None, 42
)
def test_eq_callable_order_boolean(self):
"""
eq=callable or order=callable need to transformed into eq/eq_key
or order/order_key.
"""
assert (True, str.lower, False, None) == _determine_attrib_eq_order(
None, str.lower, False, True
)
def test_eq_callable_order_callable(self):
"""
eq=callable or order=callable need to transformed into eq/eq_key
or order/order_key.
"""
assert (True, str.lower, True, abs) == _determine_attrib_eq_order(
None, str.lower, abs, True
)
def test_eq_boolean_order_callable(self):
"""
eq=callable or order=callable need to transformed into eq/eq_key
or order/order_key.
"""
assert (True, None, True, str.lower) == _determine_attrib_eq_order(
None, True, str.lower, True
)
@pytest.mark.parametrize("eq", [True, False])
def test_order_mirrors_eq_by_default(self, eq):
"""
If order is None, it mirrors eq.
"""
assert (eq, None, eq, None) == _determine_attrib_eq_order(
None, eq, None, True
)
def test_order_without_eq(self):
"""
eq=False, order=True raises a meaningful ValueError.
"""
with pytest.raises(
ValueError, match="`order` can only be True if `eq` is True too."
):
_determine_attrib_eq_order(None, False, True, True)
@given(cmp=booleans(), eq=optional_bool, order=optional_bool)
def test_mix(self, cmp, eq, order):
"""
If cmp is not None, eq and order must be None and vice versa.
"""
assume(eq is not None or order is not None)
with pytest.raises(
ValueError, match="Don't mix `cmp` with `eq' and `order`."
):
_determine_attrib_eq_order(cmp, eq, order, True)
class TestDocs:
@pytest.mark.parametrize(
"meth_name",
[
"__init__",
"__repr__",
"__eq__",
"__ne__",
"__lt__",
"__le__",
"__gt__",
"__ge__",
],
)
def test_docs(self, meth_name):
"""
Tests the presence and correctness of the documentation
for the generated methods
"""
@attr.s
class A(object):
pass
if hasattr(A, "__qualname__"):
method = getattr(A, meth_name)
expected = "Method generated by attrs for class {}.".format(
A.__qualname__
)
assert expected == method.__doc__
@pytest.mark.skipif(not PY2, reason="Needs to be only caught on Python 2.")
def test_auto_detect_raises_on_py2():
"""
Trying to pass auto_detect=True to attr.s raises PythonTooOldError.
"""
with pytest.raises(PythonTooOldError):
attr.s(auto_detect=True)
class BareC(object):
pass
class BareSlottedC(object):
__slots__ = ()
@pytest.mark.skipif(PY2, reason="Auto-detection is Python 3-only.")
class TestAutoDetect:
@pytest.mark.parametrize("C", (BareC, BareSlottedC))
def test_determine_detects_non_presence_correctly(self, C):
"""
On an empty class, nothing should be detected.
"""
assert True is _determine_whether_to_implement(
C, None, True, ("__init__",)
)
assert True is _determine_whether_to_implement(
C, None, True, ("__repr__",)
)
assert True is _determine_whether_to_implement(
C, None, True, ("__eq__", "__ne__")
)
assert True is _determine_whether_to_implement(
C, None, True, ("__le__", "__lt__", "__ge__", "__gt__")
)
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
def test_make_all_by_default(self, slots, frozen):
"""
If nothing is there to be detected, imply init=True, repr=True,
hash=None, eq=True, order=True.
"""
@attr.s(auto_detect=True, slots=slots, frozen=frozen)
class C(object):
x = attr.ib()
i = C(1)
o = object()
assert i.__init__ is not o.__init__
assert i.__repr__ is not o.__repr__
assert i.__eq__ is not o.__eq__
assert i.__ne__ is not o.__ne__
assert i.__le__ is not o.__le__
assert i.__lt__ is not o.__lt__
assert i.__ge__ is not o.__ge__
assert i.__gt__ is not o.__gt__
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
def test_detect_auto_init(self, slots, frozen):
"""
If auto_detect=True and an __init__ exists, don't write one.
"""
@attr.s(auto_detect=True, slots=slots, frozen=frozen)
class CI(object):
x = attr.ib()
def __init__(self):
object.__setattr__(self, "x", 42)
assert 42 == CI().x
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
def test_detect_auto_repr(self, slots, frozen):
"""
If auto_detect=True and an __repr__ exists, don't write one.
"""
@attr.s(auto_detect=True, slots=slots, frozen=frozen)
class C(object):
x = attr.ib()
def __repr__(self):
return "hi"
assert "hi" == repr(C(42))
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
def test_detect_auto_hash(self, slots, frozen):
"""
If auto_detect=True and an __hash__ exists, don't write one.
"""
@attr.s(auto_detect=True, slots=slots, frozen=frozen)
class C(object):
x = attr.ib()
def __hash__(self):
return 0xC0FFEE
assert 0xC0FFEE == hash(C(42))
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
def test_detect_auto_eq(self, slots, frozen):
"""
If auto_detect=True and an __eq__ or an __ne__, exist, don't write one.
"""
@attr.s(auto_detect=True, slots=slots, frozen=frozen)
class C(object):
x = attr.ib()
def __eq__(self, o):
raise ValueError("worked")
with pytest.raises(ValueError, match="worked"):
C(1) == C(1)
@attr.s(auto_detect=True, slots=slots, frozen=frozen)
class D(object):
x = attr.ib()
def __ne__(self, o):
raise ValueError("worked")
with pytest.raises(ValueError, match="worked"):
D(1) != D(1)
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
def test_detect_auto_order(self, slots, frozen):
"""
If auto_detect=True and an __ge__, __gt__, __le__, or and __lt__ exist,
don't write one.
It's surprisingly difficult to test this programmatically, so we do it
by hand.
"""
def assert_not_set(cls, ex, meth_name):
__tracebackhide__ = True
a = getattr(cls, meth_name)
if meth_name == ex:
assert a == 42
else:
assert a is getattr(object, meth_name)
def assert_none_set(cls, ex):
__tracebackhide__ = True
for m in ("le", "lt", "ge", "gt"):
assert_not_set(cls, ex, "__" + m + "__")
@attr.s(auto_detect=True, slots=slots, frozen=frozen)
class LE(object):
__le__ = 42
@attr.s(auto_detect=True, slots=slots, frozen=frozen)
class LT(object):
__lt__ = 42
@attr.s(auto_detect=True, slots=slots, frozen=frozen)
class GE(object):
__ge__ = 42
@attr.s(auto_detect=True, slots=slots, frozen=frozen)
class GT(object):
__gt__ = 42
assert_none_set(LE, "__le__")
assert_none_set(LT, "__lt__")
assert_none_set(GE, "__ge__")
assert_none_set(GT, "__gt__")
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
def test_override_init(self, slots, frozen):
"""
If init=True is passed, ignore __init__.
"""
@attr.s(init=True, auto_detect=True, slots=slots, frozen=frozen)
class C(object):
x = attr.ib()
def __init__(self):
pytest.fail("should not be called")
assert C(1) == C(1)
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
def test_override_repr(self, slots, frozen):
"""
If repr=True is passed, ignore __repr__.
"""
@attr.s(repr=True, auto_detect=True, slots=slots, frozen=frozen)
class C(object):
x = attr.ib()
def __repr__(self):
pytest.fail("should not be called")
assert "C(x=1)" == repr(C(1))
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
def test_override_hash(self, slots, frozen):
"""
If hash=True is passed, ignore __hash__.
"""
@attr.s(hash=True, auto_detect=True, slots=slots, frozen=frozen)
class C(object):
x = attr.ib()
def __hash__(self):
pytest.fail("should not be called")
assert hash(C(1))
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
def test_override_eq(self, slots, frozen):
"""
If eq=True is passed, ignore __eq__ and __ne__.
"""
@attr.s(eq=True, auto_detect=True, slots=slots, frozen=frozen)
class C(object):
x = attr.ib()
def __eq__(self, o):
pytest.fail("should not be called")
def __ne__(self, o):
pytest.fail("should not be called")
assert C(1) == C(1)
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("frozen", [True, False])
@pytest.mark.parametrize(
"eq,order,cmp",
[
(True, None, None),
(True, True, None),
(None, True, None),
(None, None, True),
],
)
def test_override_order(self, slots, frozen, eq, order, cmp):
"""
If order=True is passed, ignore __le__, __lt__, __gt__, __ge__.
eq=True and cmp=True both imply order=True so test it too.
"""
def meth(self, o):
pytest.fail("should not be called")
@attr.s(
cmp=cmp,
order=order,
eq=eq,
auto_detect=True,
slots=slots,
frozen=frozen,
)
class C(object):
x = attr.ib()
__le__ = __lt__ = __gt__ = __ge__ = meth
assert C(1) < C(2)
assert C(1) <= C(2)
assert C(2) > C(1)
assert C(2) >= C(1)
@pytest.mark.parametrize("slots", [True, False])
@pytest.mark.parametrize("first", [True, False])
def test_total_ordering(self, slots, first):
"""
functools.total_ordering works as expected if an order method and an eq
method are detected.
Ensure the order doesn't matter.
"""
class C(object):
x = attr.ib()
own_eq_called = attr.ib(default=False)
own_le_called = attr.ib(default=False)
def __eq__(self, o):
self.own_eq_called = True
return self.x == o.x
def __le__(self, o):
self.own_le_called = True
return self.x <= o.x
if first:
C = functools.total_ordering(
attr.s(auto_detect=True, slots=slots)(C)
)
else:
C = attr.s(auto_detect=True, slots=slots)(
functools.total_ordering(C)
)
c1, c2 = C(1), C(2)
assert c1 < c2
assert c1.own_le_called
c1, c2 = C(1), C(2)
assert c2 > c1
assert c2.own_le_called
c1, c2 = C(1), C(2)
assert c2 != c1
assert c1 == c1
assert c1.own_eq_called
@pytest.mark.parametrize("slots", [True, False])
def test_detects_setstate_getstate(self, slots):
"""
__getstate__ and __setstate__ are not overwritten if either is present.
"""
@attr.s(slots=slots, auto_detect=True)
class C(object):
def __getstate__(self):
return ("hi",)
assert None is getattr(C(), "__setstate__", None)
@attr.s(slots=slots, auto_detect=True)
class C(object):
called = attr.ib(False)
def __setstate__(self, state):
self.called = True
i = C()
assert False is i.called
i.__setstate__(())
assert True is i.called
assert None is getattr(C(), "__getstate__", None)
@pytest.mark.skipif(PY310, reason="Pre-3.10 only.")
def test_match_args_pre_310(self):
"""
__match_args__ is not created on Python versions older than 3.10.
"""
@attr.s
class C(object):
a = attr.ib()
assert None is getattr(C, "__match_args__", None)
@pytest.mark.skipif(not PY310, reason="Structural pattern matching is 3.10+")
class TestMatchArgs(object):
"""
Tests for match_args and __match_args__ generation.
"""
def test_match_args(self):
"""
__match_args__ is created by default on Python 3.10.
"""
@attr.define
class C:
a = attr.field()
assert ("a",) == C.__match_args__
def test_explicit_match_args(self):
"""
A custom __match_args__ set is not overwritten.
"""
ma = ()
@attr.define
class C:
a = attr.field()
__match_args__ = ma
assert C(42).__match_args__ is ma
@pytest.mark.parametrize("match_args", [True, False])
def test_match_args_attr_set(self, match_args):
"""
__match_args__ is set depending on match_args.
"""
@attr.define(match_args=match_args)
class C:
a = attr.field()
if match_args:
assert hasattr(C, "__match_args__")
else:
assert not hasattr(C, "__match_args__")
def test_match_args_kw_only(self):
"""
kw_only classes don't generate __match_args__.
kw_only fields are not included in __match_args__.
"""
@attr.define
class C:
a = attr.field(kw_only=True)
b = attr.field()
assert C.__match_args__ == ("b",)
@attr.define(kw_only=True)
class C:
a = attr.field()
b = attr.field()
assert C.__match_args__ == ()
def test_match_args_argument(self):
"""
match_args being False with inheritance.
"""
@attr.define(match_args=False)
class X:
a = attr.field()
assert "__match_args__" not in X.__dict__
@attr.define(match_args=False)
class Y:
a = attr.field()
__match_args__ = ("b",)
assert Y.__match_args__ == ("b",)
@attr.define(match_args=False)
class Z(Y):
z = attr.field()
assert Z.__match_args__ == ("b",)
@attr.define
class A:
a = attr.field()
z = attr.field()
@attr.define(match_args=False)
class B(A):
b = attr.field()
assert B.__match_args__ == ("a", "z")
def test_make_class(self):
"""
match_args generation with make_class.
"""
C1 = make_class("C1", ["a", "b"])
assert ("a", "b") == C1.__match_args__
C1 = make_class("C1", ["a", "b"], match_args=False)
assert not hasattr(C1, "__match_args__")
C1 = make_class("C1", ["a", "b"], kw_only=True)
assert () == C1.__match_args__
C1 = make_class("C1", {"a": attr.ib(kw_only=True), "b": attr.ib()})
assert ("b",) == C1.__match_args__
|
[
"[email protected]"
] | |
176eada799bbb39f3398440a3dfd78659cf734d7
|
567b5d5eb951825841de0e70276bb82e3c01027e
|
/src/env.py
|
df11357bd0b5b562fc983a9efecf0c6b835eabca
|
[
"MIT"
] |
permissive
|
ningtangla/escapeWithBelief
|
53b36642c092b9162732457558348bec8823af01
|
c4e816b2c5d884b1bea89947df8c163aa9a008c5
|
refs/heads/master
| 2020-05-22T20:11:18.907284 | 2019-05-13T22:38:55 | 2019-05-13T22:38:55 | 186,505,410 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 9,578 |
py
|
import os
import numpy as np
import pandas as pd
import pygame as pg
import itertools as it
import random
#np.random.seed(123)
class TransitionFunction():
def __init__(self, resetPhysicalState, resetBeliefAndAttention, updatePhysicalState, transiteStateWithoutActionChange, updateBeliefAndAttention, updatePhysicalStateByBelief):
self.resetPhysicalState = resetPhysicalState
self.resetBeliefAndAttention = resetBeliefAndAttention
self.updatePhysicalState = updatePhysicalState
self.transiteStateWithoutActionChange = transiteStateWithoutActionChange
self.updateBeliefAndAttention = updateBeliefAndAttention
self.updatePhysicalStateByBelief = updatePhysicalStateByBelief
def __call__(self, oldState, action):
if oldState is None:
newPhysicalState = self.resetPhysicalState()
newBeliefAndAttention = self.resetBeliefAndAttention(newPhysicalState)
newState = [newPhysicalState, newBeliefAndAttention]
else:
#oldState = self.updatePhysicalStateByBelief(oldState)
oldPhysicalState, oldBeliefAndAttention = oldState
#newBeliefAndAttention = self.updateBeliefAndAttention(oldBeliefAndAttention, oldPhysicalState)
#newPhysicalState = self.updatePhysicalState(oldPhysicalState, action)
newPhysicalState = self.updatePhysicalState(oldPhysicalState, action)
stateBeforeNoActionChangeTransition = [newPhysicalState, oldBeliefAndAttention]
physicalStateAfterNoActionChangeTransition, beliefAndAttentionAfterNoActionChangeTransition = self.transiteStateWithoutActionChange(stateBeforeNoActionChangeTransition)
newBeliefAndAttention = self.updateBeliefAndAttention(oldBeliefAndAttention, physicalStateAfterNoActionChangeTransition)
newState = [physicalStateAfterNoActionChangeTransition, newBeliefAndAttention]
newState = self.updatePhysicalStateByBelief(newState)
#print(newBeliefAndAttention[0]['logP'])
#__import__('ipdb').set_trace()
return newState
class TransiteStateWithoutActionChange():
def __init__(self, maxFrame, isTerminal, transiteMultiAgentMotion, render, renderOn):
self.maxFrame = maxFrame
self.isTerminal = isTerminal
self.transiteMultiAgentMotion = transiteMultiAgentMotion
self.render = render
self.renderOn = renderOn
def __call__(self, state):
for frame in range(self.maxFrame):
physicalState, beliefAndAttention = state
agentStates, agentActions, timeStep, wolfIdAndSubtlety = physicalState
if self.isTerminal(state):
break
if self.renderOn == True:
self.render(state)
newAgentStates, newAgentActions = self.transiteMultiAgentMotion(agentStates, agentActions)
newPhysicalState = [newAgentStates, newAgentActions, timeStep, wolfIdAndSubtlety]
stateAfterNoActionChangeTransition = [newPhysicalState, beliefAndAttention]
state = stateAfterNoActionChangeTransition
return state
class IsTerminal():
def __init__(self, sheepId, minDistance):
self.sheepId = sheepId
self.minDistance = minDistance
def __call__(self, state):
terminal = False
physicalState, beliefAndAttention = state
agentStates, agentActions, timeStep, wolfIdAndSubtlety = physicalState
wolfId, wolfSubtlety = wolfIdAndSubtlety
sheepPosition = agentStates[self.sheepId]
wolfPosition = agentStates[wolfId]
if np.sum(np.power(sheepPosition - wolfPosition, 2)) ** 0.5 <= self.minDistance:
terminal = True
return terminal
class Render():
def __init__(self, numAgent, screen, screenColor, sheepColor, wolfColor, circleSize, saveImage, saveImageFile):
self.numAgent = numAgent
self.screen = screen
self.screenColor = screenColor
self.sheepColor = sheepColor
self.wolfColor = wolfColor
self.circleSize = circleSize
self.saveImage = saveImage
self.saveImageFile = saveImageFile
def __call__(self, state):
physicalState, beliefAndAttention = state
agentStates, agentActions, timeStep, wolfIdAndSubtlety = physicalState
hypothesisInformation, positionOldTimeDF = beliefAndAttention
posteriorAllHypothesesBeforeNormalization = np.exp(hypothesisInformation['logP'])
posteriorAllHypotheses = posteriorAllHypothesesBeforeNormalization / (np.sum(posteriorAllHypothesesBeforeNormalization))
posteriorAllWolf = posteriorAllHypotheses.groupby(['wolfIdentity']).sum().values
wolfColors = [self.wolfColor * wolfBelief for wolfBelief in posteriorAllWolf]
circleColorList = [self.sheepColor] + wolfColors
for j in range(1):
for event in pg.event.get():
if event.type == pg.QUIT:
pg.quit
self.screen.fill(self.screenColor)
for i in range(self.numAgent):
oneAgentState = agentStates[i]
oneAgentPosition = np.array(oneAgentState)
pg.draw.circle(self.screen, circleColorList[i], [np.int(oneAgentPosition[0]),np.int(oneAgentPosition[1])], self.circleSize)
#pg.draw.line(self.screen, np.zeros(3), [np.int(positionOldTimeDF.loc[i].values[0]), np.int(positionOldTimeDF.loc[i].values[1])], [np.int(oneAgentPosition[0]),np.int(oneAgentPosition[1])], self.circleSize)
pg.display.flip()
currentDir = os.getcwd()
parentDir = os.path.abspath(os.path.join(currentDir, os.pardir))
saveImageDir=parentDir+'/src/data/'+self.saveImageFile
if self.saveImage==True:
filenameList = os.listdir(saveImageDir)
pg.image.save(self.screen,saveImageDir+'/'+str(len(filenameList))+'.png')
pg.time.wait(1)
class MctsRender():
def __init__(self, numAgent, screen, surfaceWidth, surfaceHeight, screenColor, sheepColor, wolfColor, distractorColor, circleSize, saveImage, saveImageFile):
self.numAgent = numAgent
self.screen = screen
self.surfaceWidth = surfaceWidth
self.surfaceHeight = surfaceHeight
self.screenColor = screenColor
self.sheepColor = sheepColor
self.wolfColor = wolfColor
self.distractorColor = distractorColor
self.circleSize = circleSize
self.saveImage = saveImage
self.saveImageFile = saveImageFile
def __call__(self, currNode, nextNode, backgroundScreen):
surfaceToDraw = pg.Surface((self.surfaceWidth, self.surfaceHeight))
surfaceToDraw.fill(self.screenColor)
#surfaceToDraw.set_colorkey(np.zeros(3))
surfaceToDraw.set_alpha(80)
if backgroundScreen == None:
backgroundScreen = pg.Surface((self.surfaceWidth, self.surfaceHeight))
backgroundScreen.fill(self.screenColor)
self.screen.fill(self.screenColor)
surfaceToDraw.blit(backgroundScreen, (0,0))
pg.display.flip()
pg.time.wait(1)
state = list(currNode.id.values())[0]
physicalState, beliefAndAttention = state
agentStates, agentActions, timeStep, wolfIdAndSubtlety = physicalState
wolfId, wolfSubtlety = wolfIdAndSubtlety
nextState = list(nextNode.id.values())[0]
nextPhysicalState, nextBeliefAndAttention = nextState
nextAgentStates, nextAgentActions, nextTimeStep, nextWolfIdAndSubtlety = nextPhysicalState
lineWidth = nextNode.num_visited + 1
circleColorList = [self.sheepColor] + [self.distractorColor] * (self.numAgent - 1)
circleColorList[wolfId] = self.wolfColor
for j in range(1):
for event in pg.event.get():
if event.type == pg.QUIT:
pg.quit
for i in range(self.numAgent):
oneAgentState = agentStates[i]
oneAgentNextState = nextAgentStates[i]
oneAgentPosition = np.array(oneAgentState)
oneAgentNextPosition = np.array(oneAgentNextState)
if i == 0:
line = pg.draw.line(surfaceToDraw, np.zeros(3), [np.int(oneAgentPosition[0]), np.int(oneAgentPosition[1])], [np.int(oneAgentNextPosition[0]),np.int(oneAgentNextPosition[1])], lineWidth)
circles = pg.draw.circle(surfaceToDraw, circleColorList[i], [np.int(oneAgentNextPosition[0]),np.int(oneAgentNextPosition[1])], self.circleSize)
if i == wolfId:
circles = pg.draw.circle(surfaceToDraw, circleColorList[i], [np.int(oneAgentNextPosition[0]),np.int(oneAgentNextPosition[1])], self.circleSize)
self.screen.blit(surfaceToDraw, (0, 0))
pg.display.flip()
pg.time.wait(1)
backgroundScreenToReturn = self.screen.copy()
if self.saveImage==True:
currentDir = os.getcwd()
parentDir = os.path.abspath(os.path.join(currentDir, os.pardir))
saveImageDir=parentDir+'/src/data/'+self.saveImageFile
filenameList = os.listdir(saveImageDir)
pg.image.save(self.screen,saveImageDir+'/'+str(len(filenameList))+'.png')
return self.screen
if __name__ == '__main__':
a = TransitionFunction
__import__('ipdb').set_trace()
|
[
"[email protected]"
] | |
c93f0d01307038fa67436b4424d1db481e3e53c9
|
69bf192eb08f2c49093d2b63e5ef16a5b4028848
|
/ved/vedomosti/MainApp/migrations/0006_auto_20160721_2321.py
|
b687bb0c52eb40b21ffd1d1998613a87a5bb35f6
|
[] |
no_license
|
jsay-api/vedomosti
|
c7aac45684142f428e3ffb6cb29aff9d77a999e3
|
5255d44386afbe06965b79c50547dcb80a59029f
|
refs/heads/master
| 2021-01-17T18:09:02.543349 | 2016-07-21T20:32:34 | 2016-07-21T20:32:34 | 63,365,926 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 512 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-21 20:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('MainApp', '0005_auto_20160721_2319'),
]
operations = [
migrations.AlterField(
model_name='assetsbeneficiaries',
name='rel_date',
field=models.DateField(blank=True, verbose_name='дата актуальности'),
),
]
|
[
"[email protected]"
] | |
4ded9ca09dbfbb9b99301b5899ee6c07d0abcb31
|
81407be1385564308db7193634a2bb050b4f822e
|
/library/lib_study/112_netdata_binascii.py
|
842c2712049f85a402b204d0cd455f742cd69bb0
|
[
"MIT"
] |
permissive
|
gottaegbert/penter
|
6db4f7d82c143af1209b4259ba32145aba7d6bd3
|
8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d
|
refs/heads/master
| 2022-12-30T14:51:45.132819 | 2020-10-09T05:33:23 | 2020-10-09T05:33:23 | 305,266,398 | 0 | 0 |
MIT
| 2020-10-19T04:56:02 | 2020-10-19T04:53:05 | null |
UTF-8
|
Python
| false | false | 435 |
py
|
# 二进制和 ASCII 码互转 https://docs.python.org/zh-cn/3/library/binascii.html
"""
binascii 模块包含很多在二进制和二进制表示的各种ASCII码之间转换的方法。
通常情况不会直接使用这些函数,而是使用像 uu , base64 ,或 binhex 这样的封装模块。
为了执行效率高,binascii 模块含有许多用 C 写的低级函数,这些底层函数被一些高级模块所使用。
"""
|
[
"[email protected]"
] | |
21ae1362833a5c039555dc1eb6113024b53fed68
|
bd4535b2ff5fc80234eed709f46da53b9ab260cf
|
/Packs/OSQuery/Scripts/OSQueryBasicQuery/OSQueryBasicQuery.py
|
6efabf7dae8ae8298e237cf0d2e9bdbfe70d5657
|
[
"MIT"
] |
permissive
|
vibhuabharadwaj/content
|
0641284c862668b577e82e32e2daecdb9fabb39a
|
518da763814fefce538379560282ff8c2ce661b9
|
refs/heads/master
| 2023-03-07T21:36:31.768989 | 2022-09-28T15:50:46 | 2022-09-28T15:50:46 | 202,795,410 | 1 | 0 |
MIT
| 2023-03-06T17:25:01 | 2019-08-16T20:30:23 |
Python
|
UTF-8
|
Python
| false | false | 1,188 |
py
|
import json
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
# ssh command to run, json format, param = query to execute
COMMAND = 'osqueryi --json "{0}"'
def main():
systems = argToList(demisto.args().get('system'))
query = demisto.args().get('query')
res = []
error_res = []
if query and systems:
for system in systems:
temp_res = demisto.executeCommand("RemoteExec", {'cmd': COMMAND.format(str(query)), 'system': system})
if isError(temp_res[0]):
temp_res_contents = temp_res[0]['Contents']
error_res += [{"Type": entryTypes["error"], "ContentsFormat": formats["text"],
"Contents": f'An Error occurred on remote system:\"{system}\". Error={temp_res_contents}.'}]
else:
data = json.loads(temp_res[0]['Contents'])
res += [{'ContentsFormat': formats['markdown'], 'Type': entryTypes['note'],
"Contents": tblToMd("{0} results:".format(system), data)}]
demisto.results(res + error_res)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
|
[
"[email protected]"
] | |
5f9ff53c21a585394ade8a312d386efe615fa801
|
def2fee9dd1476bb4d782178bffa7d5d34fbbd13
|
/nb_autoimports/__init__.py
|
83a3fd5f2332cd4681a1fe5abd646602a188e248
|
[
"MIT"
] |
permissive
|
sshh12/nb_autoimports
|
0f6298e7d11434bb5a8ca98f44e9460129ccbf4c
|
693c73b07af1882f97c957a0813db42926433978
|
refs/heads/main
| 2023-05-24T08:15:48.298366 | 2021-06-08T23:18:12 | 2021-06-08T23:18:12 | 374,137,829 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 186 |
py
|
from .auto_import import AutoImporter
def load_ipython_extension(ip):
# TODO: unload function
ai = AutoImporter(ip)
ip.events.register("post_run_cell", ai.on_post_run_cell)
|
[
"[email protected]"
] | |
10ff6d612cda6c48feb8e0551c17ebcc01eadad7
|
bd86f45ec9355cf1b76c25307d77c85ff98d30a8
|
/lib/common/color.py
|
1edf3cccfedc2eb0e95268f309e00c535b7eac49
|
[
"MIT"
] |
permissive
|
WhySoGeeky/DroidPot
|
fd39abe490117283f992d80f317574f47809de8d
|
7c3d9e975dae3835e2ccf42c425d65b26466e82a
|
refs/heads/master
| 2021-07-02T12:47:16.269514 | 2015-11-03T17:49:41 | 2015-11-03T17:49:41 | 45,484,292 | 6 | 0 |
MIT
| 2021-06-10T17:59:45 | 2015-11-03T17:44:48 |
Python
|
UTF-8
|
Python
| false | false | 615 |
py
|
__author__ = 'RongShun'
import os
import sys
def color(text, color_code):
if sys.platform == "win32" and os.getenv("TERM") != "xterm":
return text
return "\x1b[%dm%s\x1b[0m" % (color_code, text)
def green(text):
return color(text, 32)
def yellow(text):
return color(text, 33)
def white(text):
return color(text, 37)
def bold(text):
return color(text, 1)
def black(text):
return color(text, 30)
def red(text):
return color(text, 31)
def blue(text):
return color(text, 34)
def magenta(text):
return color(text, 35)
def cyan(text):
return color(text, 36)
|
[
"[email protected]"
] | |
9b45f0f2bd41e194b6694faf529061b87c70ec2d
|
691754124340cf0b361351b367b6c55e10dbc92b
|
/delocate/tests/test_wheelies.py
|
47c46aa49bdb784e328a050e5d071d0425f0b85c
|
[
"BSD-2-Clause"
] |
permissive
|
sgillies/delocate
|
6789c3e89d40bb8c14c975c23163b9a13ad9626d
|
f560aa90d4538ef686e4e859068b4840cae0dad4
|
refs/heads/master
| 2021-01-23T11:32:29.537095 | 2017-04-15T22:12:27 | 2017-04-15T22:12:27 | 93,144,228 | 0 | 0 | null | 2017-06-02T08:24:18 | 2017-06-02T08:24:18 | null |
UTF-8
|
Python
| false | false | 10,348 |
py
|
""" Direct tests of fixes to wheels """
import os
from os.path import (join as pjoin, basename, realpath, abspath, exists)
import shutil
from subprocess import check_call
from ..delocating import (DelocationError, delocate_wheel, patch_wheel,
DLC_PREFIX)
from ..tools import (get_install_names, set_install_name, zip2dir,
dir2zip, back_tick, get_install_id, get_archs)
from ..wheeltools import InWheel
from ..tmpdirs import InTemporaryDirectory, InGivenDirectory
from nose.tools import (assert_true, assert_false, assert_raises, assert_equal)
from .test_install_names import DATA_PATH, EXT_LIBS
from .test_tools import (ARCH_32, ARCH_BOTH)
PLAT_WHEEL = pjoin(DATA_PATH, 'fakepkg1-1.0-cp27-none-macosx_10_6_intel.whl')
PURE_WHEEL = pjoin(DATA_PATH, 'fakepkg2-1.0-py27-none-any.whl')
STRAY_LIB = pjoin(DATA_PATH, 'libextfunc.dylib')
# The install_name in the wheel for the stray library
STRAY_LIB_DEP = ('/Users/mb312/dev_trees/delocate/wheel_makers/'
'fakepkg1/libs/libextfunc.dylib')
WHEEL_PATCH = pjoin(DATA_PATH, 'fakepkg2.patch')
WHEEL_PATCH_BAD = pjoin(DATA_PATH, 'fakepkg2.bad_patch')
def test_fix_pure_python():
# Test fixing a pure python package gives no change
with InTemporaryDirectory():
os.makedirs('wheels')
shutil.copy2(PURE_WHEEL, 'wheels')
wheel_name = pjoin('wheels', basename(PURE_WHEEL))
assert_equal(delocate_wheel(wheel_name), {})
zip2dir(wheel_name, 'pure_pkg')
assert_true(exists(pjoin('pure_pkg', 'fakepkg2')))
assert_false(exists(pjoin('pure_pkg', 'fakepkg2', '.dylibs')))
def _fixed_wheel(out_path):
wheel_base = basename(PLAT_WHEEL)
with InGivenDirectory(out_path):
zip2dir(PLAT_WHEEL, '_plat_pkg')
if not exists('_libs'):
os.makedirs('_libs')
shutil.copy2(STRAY_LIB, '_libs')
stray_lib = pjoin(abspath(realpath('_libs')), basename(STRAY_LIB))
requiring = pjoin('_plat_pkg', 'fakepkg1', 'subpkg', 'module2.so')
old_lib = set(get_install_names(requiring)).difference(EXT_LIBS).pop()
set_install_name(requiring, old_lib, stray_lib)
dir2zip('_plat_pkg', wheel_base)
shutil.rmtree('_plat_pkg')
return pjoin(out_path, wheel_base), stray_lib
def _rename_module(in_wheel, mod_fname, out_wheel):
# Rename module with library dependency in wheel
with InWheel(in_wheel, out_wheel):
mod_dir = pjoin('fakepkg1', 'subpkg')
os.rename(pjoin(mod_dir, 'module2.so'), pjoin(mod_dir, mod_fname))
return out_wheel
def test_fix_plat():
# Can we fix a wheel with a stray library?
# We have to make one that works first
with InTemporaryDirectory() as tmpdir:
fixed_wheel, stray_lib = _fixed_wheel(tmpdir)
assert_true(exists(stray_lib))
# Shortcut
_rp = realpath
# In-place fix
dep_mod = pjoin('fakepkg1', 'subpkg', 'module2.so')
assert_equal(delocate_wheel(fixed_wheel),
{_rp(stray_lib): {dep_mod: stray_lib}})
zip2dir(fixed_wheel, 'plat_pkg')
assert_true(exists(pjoin('plat_pkg', 'fakepkg1')))
dylibs = pjoin('plat_pkg', 'fakepkg1', '.dylibs')
assert_true(exists(dylibs))
assert_equal(os.listdir(dylibs), ['libextfunc.dylib'])
# New output name
fixed_wheel, stray_lib = _fixed_wheel(tmpdir)
assert_equal(delocate_wheel(fixed_wheel, 'fixed_wheel.ext'),
{_rp(stray_lib): {dep_mod: stray_lib}})
zip2dir('fixed_wheel.ext', 'plat_pkg1')
assert_true(exists(pjoin('plat_pkg1', 'fakepkg1')))
dylibs = pjoin('plat_pkg1', 'fakepkg1', '.dylibs')
assert_true(exists(dylibs))
assert_equal(os.listdir(dylibs), ['libextfunc.dylib'])
# Test another lib output directory
assert_equal(delocate_wheel(fixed_wheel,
'fixed_wheel2.ext',
'dylibs_dir'),
{_rp(stray_lib): {dep_mod: stray_lib}})
zip2dir('fixed_wheel2.ext', 'plat_pkg2')
assert_true(exists(pjoin('plat_pkg2', 'fakepkg1')))
dylibs = pjoin('plat_pkg2', 'fakepkg1', 'dylibs_dir')
assert_true(exists(dylibs))
assert_equal(os.listdir(dylibs), ['libextfunc.dylib'])
# Test check for existing output directory
assert_raises(DelocationError,
delocate_wheel,
fixed_wheel,
'broken_wheel.ext',
'subpkg')
# Test that `wheel unpack` works
fixed_wheel, stray_lib = _fixed_wheel(tmpdir)
assert_equal(delocate_wheel(fixed_wheel),
{_rp(stray_lib): {dep_mod: stray_lib}})
back_tick(['wheel', 'unpack', fixed_wheel])
# Check that copied libraries have modified install_name_ids
zip2dir(fixed_wheel, 'plat_pkg3')
base_stray = basename(stray_lib)
the_lib = pjoin('plat_pkg3', 'fakepkg1', '.dylibs', base_stray)
inst_id = DLC_PREFIX + 'fakepkg1/' + base_stray
assert_equal(get_install_id(the_lib), inst_id)
def test_fix_plat_dylibs():
# Check default and non-default searches for dylibs
with InTemporaryDirectory() as tmpdir:
fixed_wheel, stray_lib = _fixed_wheel(tmpdir)
_rename_module(fixed_wheel, 'module.other', 'test.whl')
# With dylibs-only - only analyze files with exts '.dylib', '.so'
assert_equal(delocate_wheel('test.whl', lib_filt_func='dylibs-only'),
{})
# With func that doesn't find the module
func = lambda fn : fn.endswith('.so')
assert_equal(delocate_wheel('test.whl', lib_filt_func=func), {})
# Default - looks in every file
shutil.copyfile('test.whl', 'test2.whl') # for following test
dep_mod = pjoin('fakepkg1', 'subpkg', 'module.other')
assert_equal(delocate_wheel('test.whl'),
{realpath(stray_lib): {dep_mod: stray_lib}})
# With func that does find the module
func = lambda fn : fn.endswith('.other')
assert_equal(delocate_wheel('test2.whl', lib_filt_func=func),
{realpath(stray_lib): {dep_mod: stray_lib}})
def _thin_lib(stray_lib, arch):
check_call(['lipo', '-thin', arch, stray_lib, '-output', stray_lib])
def _thin_mod(wheel, arch):
with InWheel(wheel, wheel):
mod_fname = pjoin('fakepkg1', 'subpkg', 'module2.so')
check_call(['lipo', '-thin', arch, mod_fname, '-output', mod_fname])
def test__thinning():
with InTemporaryDirectory() as tmpdir:
fixed_wheel, stray_lib = _fixed_wheel(tmpdir)
mod_fname = pjoin('fakepkg1', 'subpkg', 'module2.so')
assert_equal(get_archs(stray_lib), ARCH_BOTH)
with InWheel(fixed_wheel):
assert_equal(get_archs(mod_fname), ARCH_BOTH)
_thin_lib(stray_lib, 'i386')
_thin_mod(fixed_wheel, 'i386')
assert_equal(get_archs(stray_lib), ARCH_32)
with InWheel(fixed_wheel):
assert_equal(get_archs(mod_fname), ARCH_32)
def test_check_plat_archs():
# Check flag to check architectures
with InTemporaryDirectory() as tmpdir:
fixed_wheel, stray_lib = _fixed_wheel(tmpdir)
dep_mod = pjoin('fakepkg1', 'subpkg', 'module2.so')
# No complaint for stored / fixed wheel
assert_equal(delocate_wheel(fixed_wheel, require_archs=()),
{realpath(stray_lib): {dep_mod: stray_lib}})
# Make a new copy and break it and fix it again
def _fix_break(arch):
_fixed_wheel(tmpdir)
_thin_lib(stray_lib, arch)
def _fix_break_fix(arch):
_fixed_wheel(tmpdir)
_thin_lib(stray_lib, arch)
_thin_mod(fixed_wheel, arch)
for arch in ('x86_64', 'i386'):
# OK unless we check
_fix_break(arch)
assert_equal(
delocate_wheel(fixed_wheel, require_archs=None),
{realpath(stray_lib): {dep_mod: stray_lib}})
# Now we check, and error raised
_fix_break(arch)
assert_raises(DelocationError, delocate_wheel, fixed_wheel,
require_archs=())
# We can fix again by thinning the module too
_fix_break_fix(arch)
assert_equal(
delocate_wheel(fixed_wheel, require_archs=()),
{realpath(stray_lib): {dep_mod: stray_lib}})
# But if we require the arch we don't have, it breaks
for req_arch in ('intel',
ARCH_BOTH,
ARCH_BOTH.difference([arch])):
_fix_break_fix(arch)
assert_raises(DelocationError, delocate_wheel, fixed_wheel,
require_archs=req_arch)
# Can be verbose (we won't check output though)
_fix_break('x86_64')
assert_raises(DelocationError, delocate_wheel, fixed_wheel,
require_archs=(), check_verbose=True)
def test_patch_wheel():
# Check patching of wheel
with InTemporaryDirectory():
# First wheel needs proper wheel filename for later unpack test
out_fname = basename(PURE_WHEEL)
patch_wheel(PURE_WHEEL, WHEEL_PATCH, out_fname)
zip2dir(out_fname, 'wheel1')
with open(pjoin('wheel1', 'fakepkg2', '__init__.py'), 'rt') as fobj:
assert_equal(fobj.read(), 'print("Am in init")\n')
# Check that wheel unpack works
back_tick(['wheel', 'unpack', out_fname])
# Copy the original, check it doesn't have patch
shutil.copyfile(PURE_WHEEL, 'copied.whl')
zip2dir('copied.whl', 'wheel2')
with open(pjoin('wheel2', 'fakepkg2', '__init__.py'), 'rt') as fobj:
assert_equal(fobj.read(), '')
# Overwrite input wheel (the default)
patch_wheel('copied.whl', WHEEL_PATCH)
# Patched
zip2dir('copied.whl', 'wheel3')
with open(pjoin('wheel3', 'fakepkg2', '__init__.py'), 'rt') as fobj:
assert_equal(fobj.read(), 'print("Am in init")\n')
# Check bad patch raises error
assert_raises(RuntimeError,
patch_wheel, PURE_WHEEL, WHEEL_PATCH_BAD, 'out.whl')
|
[
"[email protected]"
] | |
43634a4192a6b1de1987f2c7343b04f81c9ab576
|
b62ba918b0b96e682d811aa79d0f34ffa50e784c
|
/shop/catalog/models.py
|
0e6804f4c1d6e88a043bf6d32137738765467f99
|
[] |
no_license
|
MaksimLion/django-furniture-shop
|
8ee3edb584cf670c7893c7b836037b97aefafcb2
|
14a5160b29a06947fab7aae7dda15829c1dcf23f
|
refs/heads/master
| 2020-04-24T22:38:26.514663 | 2019-03-10T09:37:06 | 2019-03-10T09:37:06 | 172,318,760 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 903 |
py
|
from django.db import models
class Furniture(models.Model):
TYPES = (
('kitchens','КУХНИ'),
('showcases','ВИТРИНЫ'),
('wardrobes','ШКАФЫ-КУПЕ'),
('offices','МЕБЕЛЬ ДЛЯ ОФИСА'),
('hallways','ПРИХОЖИЕ'),
('lounges','ГОСТИНЫЕ'),
('child','ДЕТСКИЕ'),
('closets','ГАРДЕРОБНЫЕ'),
('others','КРОВАТИ КОМОДЫ ТУМБЫ'),
)
title = models.CharField(max_length=20, verbose_name="Название")
photo = models.ImageField(blank=True, verbose_name="Фото", upload_to="categories/")
option = models.CharField(max_length=20, choices=TYPES, verbose_name="Категория")
class Meta:
verbose_name = "Мебель"
verbose_name_plural = "Мебель"
def __str__(self):
return self.title
|
[
"[email protected]"
] | |
08df8cd4acefaf74f1039287b5260de31247f5da
|
ec61946a176935044d08cf1244d2185f2460df32
|
/pyleecan/Methods/Slot/SlotM11/get_surface_active.py
|
42e164a216dc443c271c333537ee71a505c481e6
|
[
"Apache-2.0"
] |
permissive
|
Lunreth/pyleecan
|
d3974a144cb8a6c332339ab0426f1630b7516fc9
|
1faedde4b24acc6361fa1fdd4e980eaec4ca3a62
|
refs/heads/master
| 2023-06-07T01:46:32.453763 | 2021-07-01T21:29:51 | 2021-07-01T21:29:51 | 383,880,732 | 1 | 0 |
Apache-2.0
| 2021-07-07T17:47:01 | 2021-07-07T17:47:01 | null |
UTF-8
|
Python
| false | false | 1,691 |
py
|
# -*- coding: utf-8 -*-
from numpy import linspace, zeros
from ....Classes.Arc1 import Arc1
from ....Classes.Segment import Segment
from ....Classes.SurfLine import SurfLine
def get_surface_active(self, alpha=0, delta=0):
"""Return the full active surface
Parameters
----------
self : SlotM11
A SlotM11 object
alpha : float
float number for rotation (Default value = 0) [rad]
delta : complex
complex number for translation (Default value = 0)
Returns
-------
surf_act: Surface
Surface corresponding to the Active Area
"""
# get the name of the lamination
st = self.get_name_lam()
Rbo = self.get_Rbo()
point_dict = self._comp_point_coordinate()
ZM1 = point_dict["ZM1"]
ZM2 = point_dict["ZM2"]
ZM3 = point_dict["ZM3"]
ZM4 = point_dict["ZM4"]
curve_list = list()
curve_list.append(Segment(ZM1, ZM2))
if self.is_outwards():
curve_list.append(
Arc1(ZM2, ZM3, (Rbo + self.H0 - self.Hmag), is_trigo_direction=True)
)
else:
curve_list.append(
Arc1(ZM2, ZM3, (Rbo - self.H0 + self.Hmag), is_trigo_direction=True)
)
curve_list.append(Segment(ZM3, ZM4))
if self.is_outwards():
curve_list.append(Arc1(ZM4, ZM1, -Rbo - self.H0, is_trigo_direction=False))
else:
curve_list.append(Arc1(ZM4, ZM1, -Rbo + self.H0, is_trigo_direction=False))
Zmid = (abs(ZM1) + abs(ZM3)) / 2
surface = SurfLine(
line_list=curve_list, label="Wind_" + st + "_R0_T0_S0", point_ref=Zmid
)
# Apply transformation
surface.rotate(alpha)
surface.translate(delta)
return surface
|
[
"[email protected]"
] | |
af004c090784e8fe7a38327a0699f3f1bee2b802
|
179d8aae260d20443e6e87613cff55d42587bc16
|
/examples/x2oneflow/pytorch2oneflow/nodes/test_reduction.py
|
34c6292c3b266174d612648e742d5a682ce50c30
|
[] |
no_license
|
666DZY666/oneflow_convert_tools
|
3b1f9d6ebaf154d7218236c332c6f9613b89a860
|
bb38c52954facbfe977e09c7e4706b7563a7b50c
|
refs/heads/main
| 2023-06-04T10:16:08.786531 | 2021-06-24T08:38:24 | 2021-06-24T08:38:24 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,206 |
py
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import torch
from torch import nn
from oneflow_onnx.x2oneflow.util import load_pytorch_module_and_check
def test_reduce_mean():
class Net(nn.Module):
def forward(self, x):
return torch.mean(x)
load_pytorch_module_and_check(Net)
def test_reduce_mean_axis():
class Net(nn.Module):
def forward(self, x):
return torch.mean(x, dim=2)
load_pytorch_module_and_check(Net)
def test_reduce_mean_axis_keepdim():
class Net(nn.Module):
def forward(self, x):
return torch.mean(x, dim=3, keepdim=True)
load_pytorch_module_and_check(Net)
|
[
"[email protected]"
] | |
ba49774d8e53a281ab7c9a9dc06ba120ee957708
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02612/s358283644.py
|
d00817b67f725713462bf5dbbddad17a49103d13
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 78 |
py
|
s = int(input())
if s%1000 == 0:
c=0
else:
c = 1000 - ( s%1000 )
print(c)
|
[
"[email protected]"
] | |
276d5cb7c2f08b0c622a30d2a0ad6d9e5ebab54b
|
55ceefc747e19cdf853e329dba06723a44a42623
|
/_CodeTopics/LeetCode/801-1000/000954/WA--000954.py3
|
12cb53000239ceb19cbc5b6fee2b54a6673848bf
|
[] |
no_license
|
BIAOXYZ/variousCodes
|
6c04f3e257dbf87cbe73c98c72aaa384fc033690
|
ee59b82125f100970c842d5e1245287c484d6649
|
refs/heads/master
| 2023-09-04T10:01:31.998311 | 2023-08-26T19:44:39 | 2023-08-26T19:44:39 | 152,967,312 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,274 |
py3
|
class Solution:
def canReorderDoubled(self, arr: List[int]) -> bool:
if arr.count(0) & 1 or sum(elem < 0 for elem in arr) & 1:
return False
arr = filter(lambda x : x != 0, arr)
arr = map(abs, arr)
ctr = Counter(arr)
deleted = defaultdict(int)
keys = list(ctr.keys())
keys.sort()
for key in keys:
# 说明 key 已经做为 pair 中较大的那个,被之前更小的匹配完了,所以直接跳过。
# 比如,[2,4,6,12] 中的 4 和 12 都是这种情况。
if ctr[key] == deleted[key]:
continue
doubleKey = 2 * key
# 对于 [2,4,4,8],当 for 循环到 4 时,此时可用的 4 的数目应该是一个,因为
# 虽然一共有两个 4,但是已经有一个和前面的 2 匹配用掉了。
numOfKeyLeft = ctr[key] - deleted[key]
if ctr[doubleKey] < numOfKeyLeft:
return False
else:
deleted[doubleKey] += numOfKeyLeft
return True
"""
https://leetcode-cn.com/submissions/detail/292900128/
101 / 102 个通过测试用例
状态:解答错误
输入:
[-3,-4,2,6]
输出:
true
预期结果:
false
"""
|
[
"[email protected]"
] | |
7bff1f662a66130a50f13bd63bbeb32866d217a1
|
92187fc72f613751e9d215bc1db8fe1bba4b83bc
|
/src/home/migrations/0013_car.py
|
9c675273b9a0d56370b376e831e68bb22653c9bf
|
[] |
no_license
|
bhubs-python/istehar
|
33cb0abeeb4c8da1f1721eee5f0f380c086f4a66
|
3e1679f1b1b17f6e21aff4923d1d5dbcf687fc66
|
refs/heads/master
| 2021-09-10T16:26:31.464986 | 2018-03-29T08:06:15 | 2018-03-29T08:06:15 | 123,160,263 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,085 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-11 10:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0012_audiomp3'),
]
operations = [
migrations.CreateModel(
name='car',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('model_year', models.DateTimeField(blank=True, null=True)),
('registration_year', models.DateTimeField(blank=True, null=True)),
('transmission', models.CharField(blank=True, max_length=100, null=True)),
('body_type', models.CharField(blank=True, max_length=255, null=True)),
('fuel_type', models.CharField(blank=True, max_length=255, null=True)),
('engine_capacity', models.FloatField(blank=True, null=True)),
('kilometer_run', models.FloatField(blank=True, null=True)),
],
),
]
|
[
"[email protected]"
] | |
9c0ecdb04410180dded57b66e4abaa2e72494082
|
50910ddbbbf57cdbf4d40a404fc2672e8a4b340a
|
/application.py
|
e6c3db90918404f80393c32bf61cf2ea20c5f923
|
[] |
no_license
|
aes95/cs50-web-2018-x-projects-1
|
c5ea9255ae1a986b3ab7c9bf166267afdfea49c9
|
3463c48c46a2a1a849c21653a37058c54d660c96
|
refs/heads/master
| 2020-04-16T11:02:22.218936 | 2019-01-14T00:14:00 | 2019-01-14T00:14:00 | 165,521,869 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,961 |
py
|
import os, requests, xml.etree.ElementTree
from flask import Flask, session, render_template, request, jsonify
from flask_session import Session
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
app = Flask(__name__)
# Check for environment variable
if not os.getenv("DATABASE_URL"):
raise RuntimeError("DATABASE_URL is not set")
# Configure session to use filesystem
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Set up database
engine = create_engine(os.getenv("DATABASE_URL"))
db = scoped_session(sessionmaker(bind=engine))
@app.route("/")
def index():
if not session.get('logged_in'):
return render_template('login.html')
return render_template("search.html")
@app.route("/login", methods=["GET", "POST"])
def login():
if request.method == "POST":
email = request.form.get('email')
password = request.form.get('password')
x = db.execute('SELECT password FROM users WHERE email = :email', {'email': email}).fetchone()
if x == None or x['password'] != password:
return 'Incorrect username or password. Please try again.'
else:
session['logged_in'] = True
session['email'] = email
return index()
if request.method == "GET":
return render_template("login.html")
@app.route("/logout")
def logout():
session['logged_in']=False
session['email'] = None
return index()
@app.route("/register", methods=["GET", "POST"])
def register():
if request.method == "GET":
return render_template("register.html")
@app.route("/confirmation", methods=["POST", "GET"])
def confirmation():
pwd = request.form.get('psw')
email = request.form.get('email')
email_check = db.execute("SELECT email FROM users WHERE email = :email",{'email':email}).fetchone()
if email_check != None:
return f"Your email {email} already has an account associated with it. Please log in <a href='/login'> here <a>"
db.execute("INSERT INTO users (email, password) VALUES(:email,:pwd)",{"email":email, "pwd":pwd})
db.commit()
return "You have successfuly registered! Find books <a href='/'> here </a>"
@app.route("/<string:isbn>", methods=["POST", "GET"])
def book(isbn):
if not session.get('logged_in'):
return render_template('login.html')
book_data = db.execute("SELECT * FROM books WHERE isbn=:isbn",{'isbn':isbn}).fetchone()
if book_data == None:
return "Book not found. Please try again <a href='/search'>Here</a>"
title = book_data['title']
author = book_data['author']
year = book_data['year']
res = requests.get("https://www.goodreads.com/book/review_counts.json", params={"key":"EOquiAwYzuZQkS4FGKIQ", "isbns":isbn}).json()
goodreads_avg = res['books'][0]['average_rating']
goodreads_count = res['books'][0]['ratings_count']
reviews = db.execute("SELECT * FROM reviews WHERE isbn=:isbn",{'isbn':isbn}).fetchall()
return render_template("book.html", title=title, author= author, year=year, isbn=isbn, rating=goodreads_avg, count=goodreads_count, reviews=reviews)
@app.route("/search", methods=["POST", "GET"])
def search():
search = f"%{request.form.get('search')}%"
results = db.execute("SELECT * FROM books WHERE title LIKE :search OR author LIKE :search OR isbn LIKE :search",{'search':search}).fetchall()
return render_template('search.html', results=results)
@app.route("/submit", methods=["POST"])
def submit():
email = session['email']
email_check = db.execute("SELECT email FROM reviews WHERE email = :email",{'email':email}).fetchone()
if email_check != None:
return f"Your email {email} has already submitted a review for this book. Please review other books <a href='/search'> here <a>"
isbn = request.form.get('isbn')
print(isbn)
rating = request.form.get('rating')
review = request.form.get('review')
db.execute("INSERT INTO reviews (email, isbn, rating, review) VALUES (:email, :isbn, :rating, :review)", {'email':email, 'isbn':isbn, 'rating':rating, 'review':review})
db.commit()
return index()
@app.route("/api/<string:isbn>")
def api(isbn):
book_data = db.execute("SELECT * FROM books WHERE isbn=:isbn",{'isbn':isbn}).fetchone()
title = book_data['title']
author = book_data['author']
year = book_data['year']
isbn = isbn
review_count = db.execute("SELECT COUNT(*) FROM reviews WHERE isbn=:isbn",{'isbn':isbn}).fetchone()[0]
average_score = db.execute("SELECT AVG(reviews.rating) FROM reviews WHERE isbn=:isbn",{'isbn':isbn}).fetchone()[0]
average_score = round(float(average_score),2)
dic = {"title": title, "author":author, "year": year,"isbn":isbn, "review_count":review_count, "average_score": average_score }
print(dic)
return jsonify(dic)
|
[
"[email protected]"
] | |
8c90d597c7ceb9a5f6d6cf86f71da32121e3b905
|
d0a84d97aaa8dcc2dff4a6b33ce98dee6d474496
|
/com.CheckProofing/2020/Test_w_45_HolidayDeals_T1_Actives/test_w45_CCcheck.py
|
a9f1f29bcb011c9ba984670d8e1baea2714e1275
|
[] |
no_license
|
ahmed-test001/python
|
21a27248c4571a13c0ed4dccab256aede1beea3a
|
eab59b9a54fae1a51fbc18c391599eb3b0e28b3d
|
refs/heads/master
| 2023-03-10T21:00:54.634028 | 2021-02-27T05:31:58 | 2021-02-27T05:31:58 | 342,778,794 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,821 |
py
|
import time
import unittest
import sys
import os
import logging
import warnings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from PageClass.UrlSegmentPage import URLSegemntPage
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
from PageClass.ComputingPage import ComputingPage
from PageClass.MobileAccessoriesPage import MobileAccessoriesPage
from PageClass.TVHomeTheaterPage import TVHomeTheaterPage
from PageClass.SmartPhonePage import SmartPhonePage
from PageClass.HomeAppliancePage import HomeAppliancePage
from PageClass.TabletPage import TabletPage
from Utility_Files import ReadConfig
from Utility_Files.HTMLTestRunner import stdout_redirector
logger=logging.getLogger(__name__)
out_hdlr=logging.StreamHandler(stdout_redirector)
out_hdlr.setFormatter(logging.Formatter('%(asctime)s%(levelname)s%(message)s'))
out_hdlr.setLevel(logging.INFO)
logger.addHandler(out_hdlr)
logger.setLevel(logging.INFO)
class HTMLPage_W_45_CCTest(unittest.TestCase):
method1=""
driver = None
url_list = []
method_list_in_Url = []
@classmethod
def setUp(self):
option = webdriver.ChromeOptions()
option.add_experimental_option('excludeSwitches', ['enable-logging'])
self.driver = webdriver.Chrome(executable_path=ReadConfig.readconfigData('paths', 'chromedriver1'), options=option)
warnings.filterwarnings(action="ignore", message="unclosed", category=ResourceWarning)
self.wait = WebDriverWait(self.driver, 10)
@classmethod
def tearDown(self):
self.driver.quit()
def test_Proofs(self):
with open('../TextFolder_Unique_URL/UniqueList_2.txt')as f:
urls = f.read().splitlines()
for url in urls:
if url != 0:
if "DD" in url:
print("Select DD")
self.driver.get(url)
MB_smartphone=SmartPhonePage(self.driver)
MB_smartphone.get_SMARTPHONE_ShopAll()
# MB_smartphone.get_Module1_link()
elif "CC" in url:
self.driver.get(url)
if "MB" in url:
print("Select CC")
MB_smartphone=SmartPhonePage(self.driver)
MB_smartphone.get_SMARTPHONE_ShopAll()
MB_smartphone.get_Module4_link()
if "HA" in url:
print("Select HA")
HA_homeappliance=HomeAppliancePage(self.driver)
HA_homeappliance.get_HomeAppliance_ShopAll()
HA_homeappliance.get_Module4_link()
if "MB_TABLET" in url:
print("Select MB_TABLET")
MB_tablet = TabletPage(self.driver)
MB_tablet.get_Tablet_ShopAll()
MB_tablet.get_Module1_link()
if "TV" in url:
print("Select TV")
TV_HomeTheater=TVHomeTheaterPage(self.driver)
TV_HomeTheater.get_TVHomeTheater_ShopAll()
TV_HomeTheater.get_Module4_link()
if "MB_WEAR" in url:
print("Select MB_WEAR")
MB_Wear=MobileAccessoriesPage(self.driver)
MB_Wear.get_MobileAccessories_ShopAll()
MB_Wear.get_Module4_link()
if "CE_COMPUTER" in url:
print("Select CE_COMPUTER")
CE_Computer=ComputingPage(self.driver)
CE_Computer.get_Computing_ShopAll()
CE_Computer.get_Module4_link()
else:
print("Not able to RUN")
# def test_computing(self):
# with open('../TextFolder_Unique_URL/UniqueList_2.txt')as f:
# urls = f.read().splitlines()
# for url in urls:
# if url != 0:
# if "CC" in url:
# self.driver.get(url)
# if "CE_COMPUTER" in url:
# print("Select CE_COMPUTER")
# # self.driver.get(url)
# CE_Computer=ComputingPage(self.driver)
# CE_Computer.get_Computing_ShopAll()
# CE_Computer.get_Module4_link()
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
65752b9d1bb71f04389f6f784306953871c845e6
|
51a6413af4995a221bb7cf9bce20a00acedcff9d
|
/snakify-problems-python/ALL/10_09_polyglotes.py
|
2fb598e6e525c544190b13d43f6f67e7916ff2fe
|
[] |
no_license
|
famaxth/Way-to-Coding
|
4dff099de31c1a5870cf72a2aaaab74fdcbfed36
|
bcb2048898edf418b059ec506eb5ad1507889cfb
|
refs/heads/main
| 2023-08-15T19:34:16.510571 | 2021-10-08T04:53:42 | 2021-10-08T04:53:42 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 868 |
py
|
n = int(input()) # количество учеников
lang_nums = [0] * n # количество языков для каждого ученика
langs = [] # наименования языков, для каждого из учеников
for i in range(n):
lang_nums[i] = int(input())
l = set()
for j in range(lang_nums[i]):
l.add(input())
langs.append(l)
uni = set.union(*langs)
inter = set.intersection(*langs)
print(len(inter), '\n'.join(sorted(inter)), len(uni), '\n'.join(sorted(uni)), sep='\n')
# developers solution
# students = [{input() for j in range(int(input()))} for i in range(int(input()))]
# known_by_everyone, known_by_someone = set.intersection(*students), set.union(*students)
# print(len(known_by_everyone), *sorted(known_by_everyone), sep='\n')
# print(len(known_by_someone), *sorted(known_by_someone), sep='\n')
|
[
"[email protected]"
] | |
566c733da6a9ca4011ffcaa3a1e35a486b6be7df
|
09cead98874a64d55b9e5c84b369d3523c890442
|
/py200727_python2/day33_py208024/module_2.py
|
4a804c01cc95026ad482d2195c3fca507f0ab18f
|
[] |
no_license
|
edu-athensoft/stem1401python_student
|
f12b404d749286036a090e941c0268381ce558f8
|
baad017d4cef2994855b008a756758d7b5e119ec
|
refs/heads/master
| 2021-08-29T15:01:45.875136 | 2021-08-24T23:03:51 | 2021-08-24T23:03:51 | 210,029,080 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 274 |
py
|
"""
module 2
from...import
from...import statement
"""
from py200727_python2.day33_py208024.myfunc import add
print(add(1,2))
# import math
from math import sqrt
result = sqrt(4)
print(result)
#
from math import *
print(gcd(12,4))
print(ceil(3.4))
print(fabs(5.1))
|
[
"[email protected]"
] | |
2d3085eafbaea0f942754274f2975d8c59f460f2
|
ff487fe5f2956bac2f80ee8f515a17f4fad4bd07
|
/apps/alerts/urls.py
|
d032c1db6e513c613eead5b8c9970cbafe511ef7
|
[] |
no_license
|
101t/django-lms
|
8c11c28321675c52a82f5111912e58ed4edf221f
|
4bee87f299c588b8ad0145bff3b82a51f89b4cac
|
refs/heads/master
| 2021-11-24T22:44:39.401464 | 2021-11-16T20:45:33 | 2021-11-16T20:45:33 | 219,135,709 | 1 | 2 | null | 2021-11-16T06:49:56 | 2019-11-02T10:07:43 |
HTML
|
UTF-8
|
Python
| false | false | 287 |
py
|
from django.urls import path
from django.contrib.auth.decorators import login_required
from .views import AlertList, acknowledge
app_name = "alerts"
urlpatterns = [
path('', login_required(AlertList.as_view()), name='list'),
path('acknowledge/', acknowledge, name='acknowledge'),
]
|
[
"[email protected]"
] | |
e0bb242f5160791f6c4210c2e0806d3032922695
|
97b77417e31c98f695f5fe8697b779d8a92196cc
|
/leanerp/leanerp/urls.py
|
2e6ffef1e545ef0071d1350edf7607b0c88eb52e
|
[
"Apache-2.0"
] |
permissive
|
paullammacau/leaf
|
7f6cbd31e58a8b3b680ba947a7ca276994a9b3cf
|
1e9d0531b1674c5ff722b343fd82fe8307f745ff
|
refs/heads/master
| 2021-07-10T17:56:05.301477 | 2017-10-09T13:45:26 | 2017-10-09T13:45:26 | 106,223,344 | 1 | 0 | null | 2017-10-09T01:25:48 | 2017-10-09T01:25:48 | null |
UTF-8
|
Python
| false | false | 910 |
py
|
"""leanerp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from .views import index, logout
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', index),
url(r'^logout/', logout),
url(r'^erpadmin/', include('erpadmin.urls')),
]
|
[
"[email protected]"
] | |
a0126bed627d15a32c8e0da5723ce62e338341f9
|
de56ee2369d36c93ad802f0359f3274b9a3f0a25
|
/photos/utils.py
|
c2c4b34843b95f366d0f71718aee276495d84c97
|
[] |
no_license
|
Anubhav722/asynchronous-celery-tasks
|
bdfd485b6c6b2777a4712ad64ebabf347e717654
|
a21f055e8e524db662d21f60dac2f8daab075f63
|
refs/heads/master
| 2021-01-23T00:45:41.631402 | 2017-05-31T10:47:44 | 2017-05-31T10:47:44 | 92,840,568 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,083 |
py
|
import json
import requests
from photos.models import Photo
from django.conf import settings
def get_latest_flickr_image():
"""
Grabs the latest image from the flick public image feed
"""
url = settings.FLICKR_JSON_FEED_URL
r = requests.get(url)
page_content = r.text
# It turns out flickr escapes single quotes (')
# and apparently this isn't allowed and makes the json invalid
# we use String.replace to get around this.
probably_json = page_content.replace("\\'", "'")
# now we load json
feed = json.loads(probably_json)
images = feed['items']
return images[0]
def save_latest_flickr_image():
"""
We get the lastest image and save it to flickr model
"""
flickr_image = get_latest_flickr_image()
# make sure we don't save the image more than once
# assuming each flickr image has a unique link
if not Photo.objects.filter(link=flickr_image['link']).exists():
photo = Photo(
title = flickr_image['title'],
link = flickr_image['link'],
image_url = flickr_image['media']['m'],
description = flickr_image['description'],
)
photo.save()
|
[
"[email protected]"
] | |
55b79e0d9a0de22080a98b43b205b927f983600f
|
4ff8676136167cdd81d7a983272102fff86360e8
|
/python/面试题 08.06. 汉诺塔问题.py
|
5b98a8435f155bdd5b6ff5e032c3e17d3a9fbe93
|
[] |
no_license
|
geniuscynic/leetcode
|
0ec256af2377d19fee22ce736462a7e95e3f4e67
|
379a8f27f8213951ee8be41bd56598036995d267
|
refs/heads/master
| 2023-07-19T07:22:20.001770 | 2021-09-07T14:50:40 | 2021-09-07T14:50:40 | 297,277,833 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 680 |
py
|
import sys
from collections import defaultdict
from collections import Counter
class Solution:
dicts = {}
def helper(self, n, A, B, C):
if n == 1:
C.append(A.pop())
return
self.helper(n - 1, A, C, B)
C.append(A.pop())
self.helper(n - 1, B, A, C)
def hanota(self, A, B, C):
self.helper(len(A), A, B, C)
if __name__ == "__main__":
solution = Solution()
A = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
B = []
C = []
m = [1,2,3,4]
nums2 = [1,2,3]
n = 3
result = solution.hanota(A, B, C)
#print(solution.ls)
print(A, B, C)
|
[
"[email protected]"
] | |
78f14676ee6a3da73b6f8b8ccc4e3da343164a3a
|
007f7d8c93725457bc5692715587227d6c8acc0c
|
/blender/.blender/scripts/bpymodules/colladaImEx/helperObjects.py
|
26f5c32af87a37b9c817a60a8c431fea1dfdbe18
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"GPL-2.0-only",
"PSF-2.0",
"GPL-1.0-or-later"
] |
permissive
|
Nicoeevee/sketchfab_download
|
cf1c72ab45a88bebb0e08d7fb984fa01a3be97fa
|
a81ad3a2053e715608e657fd62c9dc1194ffe290
|
refs/heads/master
| 2023-04-21T08:05:28.322657 | 2021-05-13T18:01:30 | 2021-05-13T18:01:30 | 354,547,290 | 0 | 0 |
Apache-2.0
| 2021-05-14T12:04:21 | 2021-04-04T13:13:28 |
Python
|
UTF-8
|
Python
| false | false | 10,986 |
py
|
# --------------------------------------------------------------------------
# Illusoft Collada 1.4 plugin for Blender
# --------------------------------------------------------------------------
# ***** BEGIN GPL LICENSE BLOCK *****
#
# Copyright (C) 2006: Illusoft - [email protected]
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License,
# or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENCE BLOCK *****
# --------------------------------------------------------------------------
import Blender
import collada
from Blender.Mathutils import *
debprn = 0 #False #--- print debug "print 'deb: ..."
class Armature(object):
# static vars
# A list of all created armatures
_armatures = dict()
def __init__(self, armatureBObject, daeNode):
self.armatureBObject = armatureBObject
self.blenderArmature = Blender.Armature.New()
self.armatureBObject.link(self.blenderArmature)
print self.armatureBObject
self.boneInfos = dict()
self.rootBoneInfos = dict()
# The real blender name of this armature
self.realName = None
self.deaNode = daeNode
def GetBlenderObject(self):
return self.armatureBObject
def GetBlenderArmature(self):
return self.blenderArmature
def HasBone(self, boneName):
return boneName in self.boneInfos
def AddNewBone(self, boneName, parentBoneName, daeNode):
# Create a new Editbone.
editBone = Blender.Armature.Editbone()
# Add the bone to the armature
self.blenderArmature.bones[boneName] = editBone
# Get the boneInfo for the parent of this bone. (if it exists)
parentBoneInfo = None
if not parentBoneName is None and parentBoneName in self.boneInfos:
parentBoneInfo = self.boneInfos[parentBoneName]
# Create a new boneInfo object
boneInfo = BoneInfo(boneName, parentBoneInfo, self, daeNode)
# Store the boneInfo object in the boneInfos collection of this armature.
self.boneInfos[boneName] = boneInfo
# If this bone has a parent, set it.
if not parentBoneName is None and parentBoneName in self.boneInfos:
parentBoneInfo = self.boneInfos[parentBoneName]
parentBoneInfo.childs[boneName] = boneInfo
editBone.parent = self.GetBone(parentBoneName)
##boneInfo.SetConnected()
else:
self.rootBoneInfos[boneName] = boneInfo
return boneInfo
def MakeEditable(self,makeEditable):
if makeEditable:
self.GetBlenderArmature().makeEditable()
else:
self.GetBlenderArmature().update()
def GetBone(self, boneName):
if boneName is None or not (boneName in self.blenderArmature.bones.keys()):
return None
else:
return self.blenderArmature.bones[boneName]
# Get the location of the armature (VECTOR)
def GetLocation(self):
return Vector(self.armatureBObject.loc).resize4D()
def GetTransformation(self):
return self.armatureBObject.matrix
def GetBoneInfo(self, boneName):
if boneName is None:
return None
else:
return self.boneInfos[boneName]
def GetBoneInfoFromJoint(self, jointName):
for boneInfo in self.boneInfos:
if boneInfo.jointName == jointName:
return boneInfo
return None
def GetJointList(self):
result = dict()
for boneInfo in self.boneInfos.values():
result[boneInfo.GetJointName()] = boneInfo
return result
#---CLASSMETHODS
# Factory method
def CreateArmature(cls,objectName,armatureName, realArmatureName, daeNode):
armatureBObject = armature_obj = Blender.Object.New ('Armature', objectName)
armatureBObject.name = str(realArmatureName)
armature = Armature(armatureBObject, daeNode)
armature.name = armatureName
cls._armatures[armatureName] = armature
return armature
CreateArmature = classmethod(CreateArmature)
def GetArmature(cls, armatureName):
return cls._armatures.setdefault(armatureName)
GetArmature = classmethod(GetArmature)
def FindArmatureWithJoint(cls, jointName):
for armature in cls._armatures.values():
jointList = armature.GetJointList()
if jointName in jointList:
return armature
return None
FindArmatureWithJoint = classmethod(FindArmatureWithJoint)
class BoneInfo(object):
def __init__(self, boneName, parentBoneInfo, armature, daeNode):
if debprn: print 'deb:class BoneInfo_INITIALIZE............' #--------
if debprn: print 'deb: boneName=', boneName #--------
if debprn: print 'deb: parentBoneInfo=', #--------
if parentBoneInfo: print parentBoneInfo.name #, parentBoneInfo #--------
else: print parentBoneInfo #--------
#if debprn: print 'deb: armature=', #--------
#if armature: print armature.name #, armature #--------
#else: print armature, #--------
self.name = boneName
self.parent = parentBoneInfo
self.armature = armature
self.childs = dict()
self.daeNode = daeNode
self.headTransformMatrix = None
self.tailTransformMatrix = None
self.localTransformMatrix = Matrix()
self.worldTransformMatrix = Matrix()
def GetBone(self):
return self.armature.GetBone(self.name)
def SetTail(self, tailLocVector):
if len(tailLocVector) == 4:
tailLocVector.resize3D()
self.GetBone().tail = tailLocVector
def GetTail(self):
return self.GetBone().tail
def SetHead(self, headLocVector):
if len(headLocVector) == 4:
headLocVector.resize3D()
self.GetBone().head = headLocVector
def GetHead(self):
return self.GetBone().head
def SetConnected(self):
self.GetBone().options = Blender.Armature.CONNECTED
def IsEnd(self):
return len(self.childs) == 0
def IsRoot(self):
return self.parent is None
def GetTailName(self):
return self.daeNode.name
def GetJointName(self):
return self.name
## if not self.parent is None:
## return self.parent.name
## else:
## return self.armature.name
class AnimationInfo(object):
_animations = dict()
def __init__(self, nodeId):
self.nodeId = nodeId
self.times = dict()
def GetTypes(self, daeNode):
types = []
if len(self.times) > 0:
for target in self.times.values()[0]:
ta = self.GetType(daeNode, target)
if ta[0] == collada.DaeSyntax.TRANSLATE and not Blender.Object.Pose.LOC in types:
types.append(Blender.Object.Pose.LOC)
elif ta[0] == collada.DaeSyntax.ROTATE and not Blender.Object.Pose.ROT in types:
types.append(Blender.Object.Pose.ROT)
#TODO: check if scale correct implemented
elif ta[0] == collada.DaeSyntax.SCALE and not Blender.Object.Pose.SCALE in types:
types.append(Blender.Object.Pose.SCALE)
return types
def GetType(self, daeNode, target):
ta = target.split('.', 1)
for t in daeNode.transforms:
if t[2] == ta[0]:
return [t[0], ta]
def CreateAnimations(cls, animationsLibrary, fps, axiss):
for daeAnimation in animationsLibrary.daeLibrary.items:
for channel in daeAnimation.channels:
# Get the id of the node
targetArray = channel.target.split("/", 1)
nodeId = targetArray[0]
targetId = targetArray[1]
# Get the animationInfo object for this node (or create a new one)
animation = cls._animations.setdefault(nodeId, AnimationInfo(nodeId))
#if debprn: print 'deb:helperObj.py:class AnimationInfo CreateAnimations() dir(animation)', dir(animation) #----------
# loop trough all samplers
sampler = None
if debprn: print 'deb:helperObj.py:class AnimationInfo CreateAnimations() \ndeb: channel.source= ', channel.source #----------
for s in daeAnimation.samplers:
#if debprn: print 'deb: sampler.id = ', s.id #----------
#if debprn: print 'deb: channel.source[1:]= ', channel.source[1:] #----------
#org if s.id == channel.source[1:]:
if s.id == channel.source:
sampler = s
# Get the values for all the inputs
if not sampler is None:
input = sampler.GetInput("INPUT")
inputSource = daeAnimation.GetSource(input.source)
if inputSource.techniqueCommon.accessor.HasParam("TIME") and len(inputSource.techniqueCommon.accessor.params) == 1:
if debprn: print 'deb: DDDDD getting target' #----------
output = sampler.GetInput("OUTPUT")
outputSource = daeAnimation.GetSource(output.source)
outputAccessor = outputSource.techniqueCommon.accessor
accessorCount = outputAccessor.count
accessorStride = outputAccessor.stride
interpolations = sampler.GetInput("INTERPOLATION")
interpolationsSource = daeAnimation.GetSource(interpolations.source)
if 0: #because probably interpolationsAccessor is identical to outputAccessor
interpolationsAccessor = interpolationsSource.techniqueCommon.accessor
accessorCount = interpolationsAccessor.count
accessorStride = interpolationsAccessor.stride
if debprn: print 'deb: outputSource.source.data: ', outputSource.source.data #----------
#if debprn: print 'deb: dir(outputAccessor.params): ', dir(outputAccessor.params) #----------
#if debprn: print 'deb: dir(outputAccessor.params[0]): ', str(outputAccessor.params[0]) #----------
times = [x*fps for x in inputSource.source.data]
if debprn: print 'deb: times=', times #---------
for timeIndex in range(len(times)):
time = animation.times.setdefault(times[timeIndex], dict())
target = time.setdefault(targetId, dict())
#interp = time.setdefault(targetId, dict())
#if debprn: print 'deb: accessorStride=', accessorStride #---------
value = []
for j in range(accessorStride):
#if debprn: print 'deb: timeIndex,j,data=',timeIndex, j, outputSource.source.data[timeIndex*accessorStride + j] #---------
#if debprn: print 'deb: outputAccessor.params[j]=',outputAccessor.params[j] #---------
#target[outputAccessor.params[j]] = outputSource.source.data[timeIndex*accessorStride + j]
value.append(outputSource.source.data[timeIndex*accessorStride + j])
if debprn: print 'deb: value=', value #---------
target[outputAccessor.params[0]] = value
#interp[outputAccessor.params[j]] = interpolationsSource.source.data[timeIndex*accessorStride + j]
if debprn: print 'deb: time=', time #---------
if debprn: print 'deb: target=', target #---------
#if debprn: print 'deb:helperObj.py: X X X X X X X X X class AnimationInfo CreateAnimations() animation=', animation #----------
CreateAnimations = classmethod(CreateAnimations)
def GetAnimationInfo(cls, nodeId):
for animation in cls._animations.values():
if animation.nodeId == nodeId:
return animation
return None
GetAnimationInfo = classmethod(GetAnimationInfo)
|
[
"[email protected]"
] | |
aa69c7a051939ec0e565da4a832fe5aa529aee8d
|
8b0d9eb0c04426f544e34726981643dbe7b91bdc
|
/TestBotDeploy/Bot/settings.py
|
6e1ec557579fbb81e8b85387857ed5cf04e4e321
|
[] |
no_license
|
VicGjb/bot
|
555777661115c3ebf33169ed9d5f61a8f2a1bbbb
|
ad3cecbec46f5b78dd97e9a4d04d527bed853e14
|
refs/heads/master
| 2023-03-04T07:51:38.049016 | 2021-02-14T19:08:10 | 2021-02-14T19:08:10 | 324,197,624 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,306 |
py
|
"""
Django settings for Bot project.
Generated by 'django-admin startproject' using Django 3.1.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '8yd7qz&cu35z!^qh_o6zzdk*u-%tmtbi#*bf-$i2(rq&f8wi@2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['104.236.40.45']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Bot.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Bot.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
if DEBUG:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
else:
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL='/media/'
STATICFILES_DIRS=[os.path.join(BASE_DIR, 'static_in_env')]
STATIC_ROOT=os.path.join(BASE_DIR,'static_root')
MEDIA_ROOT = os.path.join(BASE_DIR, 'media_root')``
|
[
"="
] |
=
|
39f7c29ea95996c14613d1e200fbe93a42a90aa3
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/otherforms/_braces.py
|
f13585bba0e3d0198f64068b01917659abec0f3c
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 218 |
py
|
#calss header
class _BRACES():
def __init__(self,):
self.name = "BRACES"
self.definitions = brace
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['brace']
|
[
"[email protected]"
] | |
90c37b461a483c698c0268516b9f48a327639005
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02987/s975288502.py
|
18a6f7eba6ed3672b3244f190eda5c785f421de8
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 118 |
py
|
S = input()
S1 = []
for i in S:
if i not in S1:
S1.append(i)
if len(S1) == 2:
print('Yes')
else:
print('No')
|
[
"[email protected]"
] | |
4fee162a9707bcfbb449862ff68c9713ae67654a
|
f0316e656767cf505b32c83eef4df13bb9f6b60c
|
/LeetCode/Python/Medium/1476_subrectangle_queries.py
|
aac76bf6f8f0c4ead16e60f36d62d2a45052f2d8
|
[] |
no_license
|
AkshdeepSharma/Classroom
|
70ec46b35fab5fc4a9d2eac430659d7dafba93da
|
4e55799466c101c736de6c7e07d716ff147deb83
|
refs/heads/master
| 2022-06-13T18:14:03.236503 | 2022-05-17T20:16:28 | 2022-05-17T20:16:28 | 94,828,359 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 646 |
py
|
class SubrectangleQueries:
def __init__(self, rectangle: List[List[int]]):
self.rectangle = rectangle
def updateSubrectangle(self, row1: int, col1: int, row2: int, col2: int, newValue: int) -> None:
for i in range(row1, row2 + 1):
for j in range(col1, col2 + 1):
self.rectangle[i][j] = newValue
def getValue(self, row: int, col: int) -> int:
return self.rectangle[row][col]
# Your SubrectangleQueries object will be instantiated and called as such:
# obj = SubrectangleQueries(rectangle)
# obj.updateSubrectangle(row1,col1,row2,col2,newValue)
# param_2 = obj.getValue(row,col)
|
[
"[email protected]"
] | |
5fb2baa588a5a5c73bcdcbc8e3eb2aa4c3a49ff8
|
3eb5c0db9d82a3cf1c1f867eb02e562d0d9c0de4
|
/core/knavi.py
|
9f09c7255beca81c2cb39b10ba62662ceb672458
|
[] |
no_license
|
whigg/Crawling_CNKI
|
cc03b140c175da3fc0eed1ef3d75b13781512cce
|
407c6ff5409d5831acd6ad4620281733dd2ad13a
|
refs/heads/master
| 2021-01-01T07:16:28.324919 | 2019-04-10T09:15:00 | 2019-04-10T09:15:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 10,602 |
py
|
# -*- coding:utf-8 -*-
"""
@#: 页面: main
@#:URL : http://navi.cnki.net/KNavi/All.html
@#: 时间: 2019/3/26
@#: 作者: Mr.L
"""
import time, requests
from bs4 import BeautifulSoup
from tools.base import dict_append, url_translate
from selenium import webdriver
from core.article import article_info
from tools.base import Sleep
from tools.model import mongo_model
# -----<期刊基本信息>----- #
def periodical_base_info(URL, html_code):
# class = infobox, titbox
infobox = html_code.find(name="dd", attrs={"class": "infobox"})
# 期刊标签[CSSCI, 核心期刊,等...]
journalType = [i.text for i in infobox.find(name="p", attrs={"class": "journalType"}).findAll("span")]
print("期刊标签:", journalType)
titbox = infobox.find(name="h3", attrs={"class": "titbox"})
# 英文期刊名
english_period_name = infobox.find(name="p").text
# 中文期刊名
chinese_period_name = str(titbox.text).strip().rstrip(str(english_period_name))
# listbox clearfix
baseinfo_and_issesinfo = html_code.find(name="div", attrs={"class": "listbox clearfix"})
"""
这里有三个<ul>, 有的有两个, 基本信息(JournalBaseInfo), 出版信息(publishInfo), 评价信息(evaluateInfo)
"""
info = baseinfo_and_issesinfo.findAll(name="ul")
info_data_list = list()
# 中文
info_data_list.append(chinese_period_name)
# 英文
info_data_list.append(english_period_name)
# url
info_data_list.append(URL)
# 标签
info_data_list.append(journalType)
for sub_ul in info:
sub_ul_id = sub_ul.get("id").strip()
if sub_ul_id == "JournalBaseInfo":
ul_name = "基本信息"
elif sub_ul_id == "publishInfo":
ul_name = "出版信息"
elif sub_ul_id == "evaluateInfo":
ul_name = "评价信息"
else:
ul_name = ""
p_data = dict()
p_all_info = sub_ul.findAll(name="p")
for sub_p_key in p_all_info:
sub_p_data = dict()
p_subdata_key = str(sub_p_key.text).split(":")[0]
try:
p_subdata_value = sub_p_key.find(name="span").text
except:
continue
sub_p_data[p_subdata_key] = p_subdata_value
p_data = dict_append(p_data, sub_p_data)
sub_info_data = dict()
sub_info_data["ul_name"] = ul_name
sub_info_data["ul_english_name"] = sub_ul_id
sub_info_data["data"] = p_data
info_data_list.append(sub_info_data)
return info_data_list
# -----<期刊浏览:时间区块>----- #
def periodical_date(driver, html_code, URL, periodical_base_info_data):
yearissue = html_code.find(name="div", attrs={"id": "yearissue+0"})
yearissuepage = yearissue.findAll(name='dl')
issue_data = list()
# 年
for page in yearissuepage:
"""
[
{
"year": "2017",
"year_func": "JournalDetail.BindYearClick(this);"
这里注意: 需要把<a标签 id拿到>方便出发js
<a id="yq201805" onclick="JournalDetail.BindIssueClick(this)">No.05</a>
driver.execute_script("document.getElementsByClassName('contentbox').onclick = %s" % subdata["dd_onclick"], qqq)
"year_version":
[
{
"sub_version": "No.05",
"sub_ver_func": "JournalDetail.BindIssueClick(this)",
"a_id": "yq201805"
}, ...
]
},.....
]
"""
year_dict = dict()
page_year_js_func = page.find(name="dt").get("onclick")
page_year_data = page.find(name="dt").text
# 年份有分页,有的期刊从1956年开始的《心理学报》
# 年份
year_dict["year"] = str(page_year_data)
# 时间函数
year_dict["year_func"] = str(page_year_js_func).strip()
# 版本
page_version = page.find(name="dd").findAll(name="a")
page_version_data_list = list()
# 出版期号
for sub_version in page_version:
# 子版本
sub_year_version_data_dict = dict()
page_ver_a_id = sub_version.get("id")
page_ver_a_func = sub_version.get("onclick")
page_ver_text = sub_version.text
sub_year_version_data_dict["sub_version"] = page_ver_text
sub_year_version_data_dict["sub_ver_func"] = page_ver_a_func
sub_year_version_data_dict["a_id"] = page_ver_a_id
# 期刊每期,得到内容目录
# 出发a标签每年的版本
print(sub_year_version_data_dict)
driver.get(URL)
"""
# 触发无效果,改用requests
time.sleep(Sleep.time_count)
js_dom_func = "document.getElementById('%s').onclick = %s;" % (page_ver_a_id, page_ver_a_func)
print("js_dom_func: ", js_dom_func)
driver.execute_script(js_dom_func)
time.sleep(Sleep.time_count)
print(BeautifulSoup(driver.page_source, "html.parser"))
perdical_directory_list = perdical_directory(driver, BeautifulSoup(driver.page_source, "html.parser"))
"""
url_arguments = url_translate(URL)
print(url_arguments)
# "http://navi.cnki.net/knavi/JournalDetail/GetArticleList?year=2003&issue=S1&pykm=BZJL&pageIdx=0&pcode=CJFD"
pykm = url_arguments["pykm"] if "pykm" in url_arguments else url_arguments["baseid"]
version_articlt_html_url = "http://navi.cnki.net/knavi/JournalDetail/GetArticleList?year=%s&issue=%s&pykm=%s&pageIdx=0&pcode=%s" % (
year_dict["year"],
str(page_ver_a_id).replace("yq" + year_dict["year"], ""),
pykm,
url_arguments["pcode"])
print("version_articlt_html_url: ", version_articlt_html_url)
result = requests.post(version_articlt_html_url)
data = result.text
perdical_directory_list = perdical_directory(driver, BeautifulSoup(data, "html.parser"),
periodical_base_info_data, page_year_data, page_ver_text) # year, version
sub_year_version_data_dict["article_directory"] = perdical_directory_list
page_version_data_list.append(sub_year_version_data_dict)
year_dict["year_version"] = page_version_data_list
issue_data.append(year_dict)
# print("issue_data: ", issue_data)
return issue_data
# -----<期刊浏览: 内容目录页>----- #
def perdical_directory(driver, html_code, periodical_base_info_data, page_year_data, page_ver_text):
# directory_all = html_code.find(name="dl", attrs={"id": "CataLogContent"})
# 文章标题
article_title = html_code.findAll(name="dd") # []
article_title_list = list()
for single_article in article_title:
single_article_dict = dict()
single_article_title_a = single_article.find(name="span", attrs={'class': 'name'}).find(name="a")
article_title_a_link = single_article_title_a.get("href")
print('article_title_a_link: ', article_title_a_link)
article_title_a_text = single_article_title_a.text.strip()
# 跳转链接 http://kns.cnki.net/kcms/detail/detail.aspx? + DBCode + DBName + fileName + uid
'''
http://kns.cnki.net/kcms/detail/detail.aspx?dbcode=CJFD&filename=BJWD201706001&dbname=CJFDLAST2018&uid=WEEvREcwSlJHSldRa1FhdkJkVG1COG9jZzUxQWhaWU05UjM2SGZ0aEoyUT0=$9A4hF_YAuvQ5obgVAqNKPCYcEjKensW4IQMovwHtwkF4VYPoHbKxJw!!
Common/RedirectPage?sfield=FN&dbCode=CJFD&filename=BJWD201706001&tableName=CJFDLAST2018&url=
'''
head_url = "http://kns.cnki.net/kcms/detail/detail.aspx?"
url_arguments = url_translate(article_title_a_link)
cookie = [(item["name"], item["value"]) for item in driver.get_cookies()]
# 单个文章的url链接拼接
# http://kns.cnki.net/kcms/detail/detail.aspx?dbcode=CJFD&filename=SWSL201400002&dbname=CJFDLASN2014&uid=...
url = head_url + "dbcode=" + url_arguments["dbCode"] + "&filename=" + url_arguments["filename"] + "&dbname=" + \
url_arguments["tableName"]
single_article_dict["title"] = article_title_a_text
single_article_dict["link"] = article_title_a_link
single_article_dict["url"] = url
single_article_dict["article_arguments"] = url_arguments
if url[:4] == "http":
try:
single_article_dict["article_data"] = article_info(driver, url, url_arguments,
periodical_base_info_data, page_year_data, page_ver_text)
mongo_model(single_article_dict["title"], single_article_dict["article_data"])
except:
single_article_dict["article_data"] = []
else:
single_article_dict["article_data"] = []
# 这个地方需要--》进入单个文章,得到详细信息
article_title_list.append(single_article_dict)
return article_title_list
# -----<期刊>----- #
def periodical(driver, URL):
driver.get(URL) # 获取页面
time.sleep(Sleep.time_count)
soup = BeautifulSoup(driver.page_source, "html.parser")
# 期刊基本信息
periodical_base_info_data = periodical_base_info(URL, soup)
periodical_date_info_data = periodical_date(driver, soup, URL, periodical_base_info_data)
# print(periodical_base_info_data)
# print(periodical_date_info_data)
data = {}
data["periodical_info"] = periodical_base_info_data
data["periodical_data"] = periodical_date_info_data
# 期刊浏览--》 有的期刊没有
# 年份
# 期号
# 目录大标题
# 标题
print(data)
return data
def main():
'''
一个刊期 —— > n年 -》 n版本 -》内容 -》 每个文章 -》 文章标题, 作者,作者详细信息, 其他...
:return:
'''
driver = webdriver.Chrome() # Optional argument, if not specified will search path.
# periodical(driver, "http://navi.cnki.net/KNavi/pubDetail?pubtype=journal&pcode=CJFD&baseid=SWSL")
periodical(driver, "http://navi.cnki.net/knavi/JournalDetail?pcode=CJFD&pykm=BJDZ&tdsourcetag=s_pctim_aiomsg")
time.sleep(Sleep.time_count)
driver.close()
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
c3452dd8aecdacf3bd2f698102e86b7017748a11
|
df541a802b2dfa89d3aab14af627358dc7c76e6e
|
/APP自动化/App/StoneUIFramework/public/setting/系统设置/退出/_Logout.py
|
74aa1a7a16757b9144ada4802842241e26290418
|
[] |
no_license
|
gupan2018/PyAutomation
|
de966aff91f750c7207c9d3f3dfb488698492342
|
230aebe3eca5799c621673afb647d35a175c74f1
|
refs/heads/master
| 2021-09-07T19:44:20.710574 | 2017-12-22T15:58:23 | 2017-12-22T15:58:23 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,031 |
py
|
__author__ = 'Administrator'
#登出确认页
from StoneUIFramework.public.setting.系统设置._Syssetting import _Syssetting
import logging
class _Logout(_Syssetting):
def Syssetting_logout_confirm(self):
#定位:设置-系统安全-退出-确定
try:
__Syssetting_logout_confirm = self.driver.find_element_by_id("android:id/button1")
except Exception as err:
logging.info("Syssetting_logout_confirm:error@@!!!!!!!")
assert False,\
"点击设置-系统安全-退出-确定失败"
return __Syssetting_logout_confirm
def Syssetting_logout_cancel(self):
#定位:设置-系统安全-退出-取消
try:
__Syssetting_logout_cancel = self.driver.find_element_by_id("android:id/button2")
except Exception as err:
logging.info("Syssetting_logout_cancel:error@@!!!!!!!")
assert False,\
"点击设置-系统安全-退出-取消"
return __Syssetting_logout_cancel
|
[
"[email protected]"
] | |
87ccf88abce864ab8fdb05be5339c0883908d290
|
ab621c65fc91f5194c4032d68e750efaa5f85682
|
/l10n_th_account_tax_detail/__openerp__.py
|
bd15f29f96dcd2ad04cab3560b4c476f85ba1f8a
|
[] |
no_license
|
pabi2/pb2_addons
|
a1ca010002849b125dd89bd3d60a54cd9b9cdeef
|
e8c21082c187f4639373b29a7a0905d069d770f2
|
refs/heads/master
| 2021-06-04T19:38:53.048882 | 2020-11-25T03:18:24 | 2020-11-25T03:18:24 | 95,765,121 | 6 | 15 | null | 2022-10-06T04:28:27 | 2017-06-29T10:08:49 |
Python
|
UTF-8
|
Python
| false | false | 754 |
py
|
# -*- coding: utf-8 -*-
{
'name': "Invoice Tax Detail",
'summary': "Allow editing tax table in detail",
'author': "Ecosoft",
'website': "http://ecosoft.co.th",
'category': 'Account',
'version': '0.1.0',
'depends': [
'account',
'l10n_th_account',
'account_invoice_check_tax_lines_hook',
],
'data': [
'data/config_data.xml',
'security/ir.model.access.csv',
'wizard/account_tax_detail_view.xml',
'views/account_view.xml',
'views/account_invoice_view.xml',
'views/account_voucher_view.xml',
'views/account_config.xml',
],
'demo': [
],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"[email protected]"
] | |
644fc1bf3f6b8d07653d023d8f31d01f65603566
|
02f0b44446d0ae1456db790866f8c184dc9b2652
|
/trunk/convertdialog.py
|
2d3c32c6b4d323b8028e177c1d5ac090a9334565
|
[] |
no_license
|
BGCX261/zoomtc-svn-to-git
|
23d26688429d5d0db5937c630ecb6243438c3721
|
8937a6a53bb58c1457e6f93af619070929a4839a
|
refs/heads/master
| 2021-01-23T12:11:25.771636 | 2015-08-25T15:20:29 | 2015-08-25T15:20:29 | 41,591,859 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,708 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*
import os
import sys
from PIL import Image
from PyQt4 import QtCore, QtGui
import zoomtc_rc
from ui_convertdialog import Ui_convertDialog
# all supported image formats list here
EXTS = ('.bmp', '.im', '.msp', '.pcx', '.ppm',
'.spider', '.tiff', '.xbm', '.xv', '.jpg', '.jpeg', '.gif', '.png',)
class ConvertDialog(QtGui.QDialog, Ui_convertDialog):
def __init__(self, parent=None, initDir= '.', initRate = 0.055):
QtGui.QDialog.__init__(self, parent)
self.setupUi(self)
# English version UI messages
self.messages = {
'selectSrc': u'Please select picture source dir',
'outDir': u'zoomtc_out',
'progressFormat': u'processing: %d / %d',
'cancel': u'Cancel',
'processing': u'Processing...',
'dirLabel': u'Picture Dir:',
'rateLabel': u'Zoom Rate:',
'helpLabel': u"<p>1.Drag & Drop picture directory on `Picture Dir'.<br/>"
u"2.Set `Zoom Rate' as R,the zoomed size will be SIZE*R.<br/>"
u"3.Zoomed pictures stored in 'zoomtc_out' under the same directory.</p>",
'dirButton': u'Browser...',
'convertButton': u'Zoom',
'windowTitle': u'Zoomtc, a picture batch zoom tool',
'criticalTitle': u'Something is Wrong',
'criticalInfo': u'Check the zoom rate and picture format.\nPlease try again.',
}
# If system locale is Chinese, then we define Chinese version UI messages
loc = QtCore.QLocale.system()
if loc.country()==loc.China:
self.messages = {
'selectSrc': u'请选择图片源目录',
'outDir': u'缩放输出目录',
'progressFormat': u'进度: %d / %d',
'cancel': u'取消',
'processing': u"正在处理图片……",
'dirLabel': u'图片源目录:',
'rateLabel': u'缩放比例:',
'helpLabel': u'<p>1.拖放图片目录到"图片源目录"<br/>'
u'2.设置"缩放比例"为R, 缩放后尺寸为"原尺寸*R"<br/>'
u'3.缩放后的文件保存在原图片目录下的“缩放输出目录"中</p>',
'dirButton': u"浏览...",
'convertButton': u"缩放",
'windowTitle': u'Zoomtc, 图片批量缩放工具',
'criticalTitle': u'错误',
'criticalInfo': u'请检查是否正确设置了缩放比例.',
}
# set the UI, English or Chinese according to the system locale
self.dirLabel.setText(self.messages['dirLabel'])
self.rateLabel.setText(self.messages['rateLabel'])
self.helpLabel.setText(self.messages['helpLabel'])
self.dirButton.setText(self.messages['dirButton'])
self.convertButton.setText(self.messages['convertButton'])
self.setWindowTitle(self.messages['windowTitle'])
self.setWindowIcon(QtGui.QIcon(":/logo.ico"))
# enable Drag & Drop
self.dirLineEdit.setAcceptDrops(False)
self.rateLineEdit.setAcceptDrops(False)
self.setAcceptDrops(True)
self.connect(self.dirButton, QtCore.SIGNAL("clicked()"),
self.getDir)
self.connect(self.convertButton, QtCore.SIGNAL("clicked()"),
self.doConvert)
self.cwd = os.path.abspath(initDir)
self.dirLineEdit.setText(self.cwd)
self.rate = float(initRate)
self.rateLineEdit.setText("%.3f"%round(self.rate, 3))
def dragEnterEvent(self, event):
if event.mimeData().hasFormat("text/uri-list"):
event.acceptProposedAction()
def dropEvent(self, event):
urls = event.mimeData().urls()
if not urls:
return
fileName = urls[0].toLocalFile()
if not fileName:
return
self.dirLineEdit.setText(fileName)
# save rate value when closing
def closeEvent(self, event):
rate = float(self.rateLineEdit.text())
settings = QtCore.QSettings(u"ctootc", u"zoomtc")
settings.setValue("rate", QtCore.QVariant(rate))
def getDir(self):
dirName = QtGui.QFileDialog.getExistingDirectory(self, self.messages['selectSrc'],
self.cwd)
if dirName:
self.dirLineEdit.setText(dirName)
#self.cwd = os.path.basename(dirName)
# process one image file
def _processFile(self, fileName, rate, progressDialog):
print 'process on:', fileName
path = os.path.dirname(fileName)
os.chdir(path)
outdir = os.path.join(path, self.messages['outDir'])
print 'outdir', outdir
name = os.path.basename(fileName)
print 'name', name
self.processValue += 1
progressDialog.setValue(self.processValue)
progressDialog.setLabelText(self.messages['progressFormat'] % (self.processValue, self.processTotal))
QtGui.qApp.processEvents()
if progressDialog.wasCanceled():
return
n,ext = os.path.splitext(name)
if ext.lower() in EXTS:
im = Image.open(fileName)
(w,h) = im.size
iout = im.resize((int(w*rate),int(h*rate)), Image.ANTIALIAS)
print 'outname', os.path.join(outdir, name)
if not os.path.exists(outdir):
os.mkdir(outdir)
iout.save(os.path.join(outdir, name))
# process all image files under this directories
def _processDir(self, path, rate, progressDialog):
print 'process on:', path
os.chdir(path)
outdir = os.path.join(path, self.messages['outDir'])
print 'outdir', outdir
for name in os.listdir(path):
print 'name', name
fullname = os.path.join(path, name)
if os.path.isdir(fullname):
self._processDir(fullname, rate, progressDialog)
else:
self._processFile(fullname, rate, progressDialog)
# count image files need to be processed, we need this number to initialize ProgressDialog
def _totalfiles(self, path):
if os.path.isdir(path):
total = 0
for name in os.listdir(path):
fullname = os.path.join(path, name)
if os.path.isdir(fullname):
total += self._totalfiles(fullname)
else:
total += 1
return total
else:
return 1
def doConvert(self):
try:
rate = float(self.rateLineEdit.text())
path = unicode(self.dirLineEdit.text())
progressDialog = QtGui.QProgressDialog(self)
progressDialog.setCancelButtonText(self.messages['cancel'])
self.processTotal = self._totalfiles(path)
progressDialog.setRange(0, self.processTotal)
progressDialog.setWindowTitle(self.messages['processing'])
self.processValue = 0
if os.path.isdir(path):
self._processDir(path, rate, progressDialog)
else:
self._processFile(path, rate, progressDialog)
progressDialog.close()
except:
QtGui.QMessageBox.critical(self, self.messages['criticalTitle'], self.messages['criticalInfo'])
return
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
settings = QtCore.QSettings(u"ctootc", u"zoomtc")
rate = settings.value("rate", QtCore.QVariant(0.5)).toDouble()[0]
s = ConvertDialog(initRate=rate)
s.show()
sys.exit(app.exec_())
|
[
"[email protected]"
] | |
6ffb0c65163ec92ac6eb7e8bbb2eeee963366b3e
|
04ad466db13a382cc679d9562e515d57b54c47e6
|
/scripts/maxent_priors.py
|
a69077dddf5e3009d6b707e0e4792d6890804360
|
[
"MIT"
] |
permissive
|
shivaditya-meduri/pyprobml
|
d9423463ae7b352c52f3d005fbf33ee66d366971
|
9dbe0c95f4ec061b98bf32fa3ac1deafe2e0c04d
|
refs/heads/master
| 2023-04-12T13:09:45.572071 | 2021-05-07T18:22:02 | 2021-05-07T18:22:02 | 356,659,290 | 1 | 0 |
MIT
| 2021-04-11T05:04:38 | 2021-04-10T18:07:31 | null |
UTF-8
|
Python
| false | false | 1,105 |
py
|
# jeffreys prior for bernoulli using 2 paramterizatiobs
# fig 1.10 of 'Bayeysian Modeling and Computation'
import numpy as np
import matplotlib.pyplot as plt
import pyprobml_utils as pml
from scipy import stats
from scipy.stats import entropy
from scipy.optimize import minimize
C = 10
xs = np.arange(1,C+1)
cons = [[{"type": "eq", "fun": lambda x: np.sum(x) - 1}],
[{"type": "eq", "fun": lambda x: np.sum(x) - 1},
{"type": "eq", "fun": lambda x: 1.5 - np.sum(x *xs)}],
[{"type": "eq", "fun": lambda x: np.sum(x) - 1},
{"type": "eq", "fun": lambda x: np.sum(x[[2, 3]]) - 0.8}]]
max_ent = []
names= ['unconstrained', 'mean of 1.5', 'p(3,4)=0.8']
for i, c in enumerate(cons):
val = minimize(lambda x: -entropy(x),
x0=[1/C]*C,
bounds=[(0., 1.)] * C,
constraints=c)['x']
max_ent.append(entropy(val))
plt.plot(xs, val, 'o--', lw=2.5, label=names[i])
#plt.stem(xs, val, label=names[i])
plt.xlabel(r"$\theta$")
plt.ylabel(r"$p(\theta)$")
plt.legend()
pml.savefig("maxent_priors.pdf", dpi=300)
|
[
"[email protected]"
] | |
b56e012cd852d9dee8afd57b3ff0a9a240fe1828
|
185bea7d9c7dc9288b021e0c27f24e087f668109
|
/AI/Project/Sudoku/eliminate_q5.py
|
472e64c213c8524b8d5000fa04d1d1df0dd7b613
|
[] |
no_license
|
JasonVann/Udacity
|
3430fa86d9863dd0c4657b525243a91154f4b57f
|
9584bf90e8f1f9d4faf0973c7c36325f367558e7
|
refs/heads/master
| 2021-01-12T09:38:53.896396 | 2017-09-05T03:51:47 | 2017-09-05T03:51:47 | 76,217,990 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 864 |
py
|
from utils import *
def eliminate(values):
"""Eliminate values from peers of each box with a single value.
Go through all the boxes, and whenever there is a box with a single value,
eliminate this value from the set of values of all its peers.
Args:
values: Sudoku in dictionary form.
Returns:
Resulting Sudoku in dictionary form after eliminating values.
"""
import copy
values2 = copy.deepcopy(values)
changed = True
'''
print(values)
print(peers)
return
'''
#while changed:
changed = False
for k, v in values2.items():
if len(v) == 1:
for peer in peers[k]:
if v in values[peer]:
values[peer] = values[peer].replace(v,'')
changed = True
#'''
return values
|
[
"[email protected]"
] | |
f7afb969c707a49319ea9771a712ab4977d48f95
|
642f50ad3a7abc63032815847fe9a49246b03506
|
/setup.py
|
ae3cdb64db3b8ddeab1b8efb9189075e6969709a
|
[
"MIT"
] |
permissive
|
circuitpython/CircuitPython_Org_DisplayIO_Annotation
|
f76557c616498480c2f14c6423150f6eb6aa2709
|
df982a2f65d8dfe77759905820f5e27aead425a7
|
refs/heads/main
| 2023-06-13T07:31:35.924175 | 2021-07-05T15:06:11 | 2021-07-05T15:06:11 | 358,929,751 | 0 | 2 |
MIT
| 2021-07-05T15:06:11 | 2021-04-17T16:34:49 |
Python
|
UTF-8
|
Python
| false | false | 2,336 |
py
|
# SPDX-FileCopyrightText: 2017 Scott Shawcroft, written for Adafruit Industries
# SPDX-FileCopyrightText: Copyright (c) 2021 Kevin Matocha for circuitpython
#
# SPDX-License-Identifier: MIT
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, "README.rst"), encoding="utf-8") as f:
long_description = f.read()
setup(
# Community Bundle Information
name="circuitpython-displayio-annotation",
use_scm_version=True,
setup_requires=["setuptools_scm"],
description="A CircuitPython DisplayIO widget for annotating other widgets or freeform positions.",
long_description=long_description,
long_description_content_type="text/x-rst",
# The project's main homepage.
url="https://github.com/circuitpython/CircuitPython_Org_DisplayIO_Annotation.git",
# Author details
author="CircuitPython Organization",
author_email="",
install_requires=[
"Adafruit-Blinka",
"adafruit-circuitpython-display-text",
"adafruit-circuitpython-display_shapes",
"adafruit-circuitpython-displayio-layout",
],
# Choose your license
license="MIT",
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries",
"Topic :: System :: Hardware",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
# What does your project relate to?
keywords="adafruit blinka circuitpython micropython displayio_annotation displayio widget "
"graphics",
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
# TODO: IF LIBRARY FILES ARE A PACKAGE FOLDER,
# CHANGE `py_modules=['...']` TO `packages=['...']`
py_modules=["displayio_annotation"],
)
|
[
"[email protected]"
] | |
5a27684254e81e031f857cf3f929151d09307a8b
|
ad553dd718a8df51dabc9ba636040da740db57cf
|
/.history/app_20181208180113.py
|
5eb0e0ff5a594f01d11592f60c728896dcfaa4e9
|
[] |
no_license
|
NergisAktug/E-Commerce-PythonWithFlask-Sqlite3
|
8e67f12c28b11a7a30d13788f8dc991f80ac7696
|
69ff4433aa7ae52ef854d5e25472dbd67fd59106
|
refs/heads/main
| 2023-01-01T14:03:40.897592 | 2020-10-19T20:36:19 | 2020-10-19T20:36:19 | 300,379,376 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,256 |
py
|
import datetime
from flask import Flask,flash, request, render_template_string, render_template
from flask import Flask, url_for, render_template, request, redirect, session, escape, render_template_string
from flask_babelex import Babel
from flask_sqlalchemy import SQLAlchemy
from flask_user import current_user, login_required, roles_required
from sqlalchemy.sql import table, column, select
from sqlalchemy import MetaData, create_engine
from flask_user import login_required, roles_required, UserManager, UserMixin
class ConfigClass(object):
SECRET_KEY = 'This is an INSECURE secret!! DO NOT use this in production!!'
SQLALCHEMY_DATABASE_URI = 'sqlite:///eticaret.sqlite'
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USE_TLS = False
MAIL_USERNAME = '[email protected]'
MAIL_PASSWORD = '05383896877'
MAIL_DEFAULT_SENDER = '"MyApp" <[email protected]>'
USER_ENABLE_EMAIL = True
USER_ENABLE_USERNAME = False
USER_EMAIL_SENDER_EMAIL = "[email protected]"
def create_app():
app = Flask(__name__)
app.config.from_object(__name__ + '.ConfigClass')
db = SQLAlchemy(app)
class Kullanici(db.Model):
__tablename__ = 'Kullanici'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(80), unique=True)
sifre = db.Column(db.String(80))
rolId = db.Column(db.Integer, db.ForeignKey('rol.rolId', ondelete='CASCADE'))
active = db.Column('is_active', db.Boolean(), nullable=False, server_default='1')
def __init__(self, email, sifre):
self.email = email
self.sifre = sifre
self.rolId = 0
class Roller(db.Model):
__tablename__ = 'rol'
rolId = db.Column(db.Integer, primary_key=True)
rolisim = db.Column(db.String(80))
class urunler(db.Model):
__tablename__ = 'urunler'
urun_id = db.Column(db.Integer, primary_key=True)
urunismi = db.Column(db.String(80))
urunresmi = db.Column(db.String(80))
urunFiyati = db.Column(db.Integer)
markaId = db.Column(db.Integer(), db.ForeignKey('markalar.markaId', ondelete='CASCADE'))
def __init__(self, urunismi, urunresmi, urunFiyati,markaId):
self.urunismi =urunismi
self.urunresmi = urunresmi
self.urunFiyati = urunFiyati
self.markaId=markaId
class markalar(db.Model):
__tablename__ = 'markalar'
markaId = db.Column(db.Integer, primary_key=True)
markaadi = db.Column(db.String(80))
marka_modeli = db.Column(db.String(80))
def __init__(self, markaadi, marka_modeli):
self.markaadi = markaadi
self.marka_modeli = marka_modeli
class musteri(db.Model):
__tablename__ = 'musteri'
musteriId = db.Column(db.Integer, primary_key=True)
musteriadi = db.Column(db.String(80))
musterisoyadi = db.Column(db.String(80))
mail = db.Column(db.String(80), unique=True)
telefon = db.Column(db.Integer)
sifre = db.Column(db.String(80))
il = db.Column(db.String(80))
ilce = db.Column(db.String(80))
kullaniciId = db.Column(db.Integer(), db.ForeignKey('Kullanici.id', ondelete='CASCADE'))
def __init__(self, musteriadi, musterisoyadi, mail, telefon, sifre, il, ilce, kullaniciId):
self.musteriadi = musteriadi
self.musterisoyadi = musterisoyadi
self.mail = mail
self.telefon = telefon
self.sifre = sifre
self.il = il
self.ilce = ilce
self.kullaniciId = kullaniciId
class siparis(db.Model):
__tablename__ = 'siparis'
siparisId = db.Column(db.Integer, primary_key=True)
musteriId = db.Column(db.Integer(), db.ForeignKey('musteri.musteriId', ondelete='CASCADE'))
urunId = db.Column(db.Integer(), db.ForeignKey('urunler.urun_id', ondelete='CASCADE'))
siparisno = db.Column(db.Integer)
siparisTarihi = db.Column(db.Integer)
odemeId = db.Column(db.Integer())
def __init__(self, musteriId, urunId, siparisno, siparisTarihi, odemeId):
self.musteriId = musteriId
self.urunId = urunId
self.siparisno = siparisno
self.siparisTarihi = siparisTarihi
self.odemeId = odemeId
db.create_all()
@app.route('/')
def anasayfa():
return render_template('index.html')
@app.route('/kayit', methods=['GET', 'POST'])
def kayit():
if request.method == 'POST':
mail = request.form['email']
parola = request.form['sifre']
yeniKullanici = Kullanici(email=mail, sifre=parola)
db.session.add(yeniKullanici)
db.session.commit()
if yeniKullanici is not None:
mesaj = "Kayıt Başarıyla Sağlanmıştır."
return render_template("index.html", mesaj=mesaj)
else:
return render_template('kayit.html')
@app.route('/admin')
def admin():
return render_template("admin.html")
@app.route('/uye', methods=['GET', 'POST'])
def uye():
return render_template("uyeGirisi.html")
@app.route('/giris', methods=['GET', 'POST'])
def giris():
hata=None
if request.method=='POST':
if request.form['email']!='[email protected]' or request.form['sifre']!='admin':
if Kullanici.query.filter_by(email=request.form['email'],sifre=request.form['sifre']) is not None:
session['uye_giris']=True
return redirect(url_for('anasayfa'))
else:
hata='hatalı giris yaptınız'
else:
flash('giriş başarılı')
session['admin_giris']=True
return redirect(url_for('admin'))
return render_template('uyeGiris.html',hata=hata)
@app.route('/cikis')
def cikis():
session.pop('admin_giris',None)
session.pop('uye_giris',None)
return render_template("index.html")
@app.route('/urunEkle')
def urunEkle():
tumVeri=urunler.query.all()
return render_template("urunEkle.html",tumVeri=tumVeri)
@app.route('/urunEklemeYap',methods=['POST'])
def urunEklemeYap():
urunismi=request.form['urunismi']
urunResmi=request.form['urunresmi']
urunFiyati=request.form['fiyati']
markaId=request.form['markaId']
yeniUrun=urunler(urunismi=urunismi,urunresmi=urunResmi,urunFiyati=urunFiyati,markaId=markaId)
db.session.add(yeniUrun)
db.session.commit()
return redirect(url_for("urunEkle"))
@app.route("/sil/<string:id>")
def sil(id):
urun=urunler.query.filter_by(urun_id=id).first()
db.session.delete(urun)
db.session.commit()
return redirect(url_for("urunEkle"))
@app.route('/Markalar')
def Markalar():
return render_template("marka.html")
return app
if __name__ == '__main__':
app=create_app()
app.run(host='127.0.0.1', port=5000, debug=True)
|
[
"[email protected]"
] | |
820ef2144d3dd85072afa1fb2bb8df44a8a3087d
|
006ff11fd8cfd5406c6f4318f1bafa1542095f2a
|
/FastSimulation/L1CaloTriggerProducer/test/test_cfg.py
|
817cecbd05903876f5b300c0f4b38be6b0da5ac0
|
[] |
permissive
|
amkalsi/cmssw
|
8ac5f481c7d7263741b5015381473811c59ac3b1
|
ad0f69098dfbe449ca0570fbcf6fcebd6acc1154
|
refs/heads/CMSSW_7_4_X
| 2021-01-19T16:18:22.857382 | 2016-08-09T16:40:50 | 2016-08-09T16:40:50 | 262,608,661 | 0 | 0 |
Apache-2.0
| 2020-05-09T16:10:07 | 2020-05-09T16:10:07 | null |
UTF-8
|
Python
| false | false | 1,347 |
py
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("L1")
process.load("FastSimulation.L1CaloTriggerProducer.fastl1calosim_cfi")
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.load("Geometry.CMSCommonData.cmsIdealGeometryXML_cfi")
process.load("Geometry.CaloEventSetup.CaloGeometry_cfi")
process.load("Geometry.CaloEventSetup.CaloTopology_cfi")
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('/store/relval/2008/6/6/RelVal-RelValZTT-1212543891-STARTUP-2nd-02/0000/40FA3C45-E533-DD11-9B17-000423D98C20.root')
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1000)
)
process.Out = cms.OutputModule("PoolOutputModule",
outputCommands = cms.untracked.vstring('drop *',
'keep l1extraL1JetParticles_*_*_*',
'keep l1extraL1EmParticles_*_*_*',
'keep l1extraL1MuonParticles_*_*_*',
'keep l1extraL1EtMissParticle_*_*_*',
'keep l1extraL1ParticleMaps_*_*_*'),
fileName = cms.untracked.string('test.root')
)
process.CaloTowerConstituentsMapBuilder = cms.ESProducer("CaloTowerConstituentsMapBuilder",
MapFile = cms.untracked.string('Geometry/CaloTopology/data/CaloTowerEEGeometric.map.gz')
)
process.p = cms.Path(process.fastL1CaloSim)
process.e = cms.EndPath(process.Out)
process.Out.fileName = 'test.root'
|
[
"[email protected]"
] | |
d6704396575c1ef54a76c33a3040048b12fb8e75
|
a94e87dab0f3848ae8a02bb482633e953eb3062f
|
/code/default/python27/1.0/lib/noarch/front_base/http2_stream.py
|
334bcc2372913fe05eb138fa5422452befb55f50
|
[
"BSD-2-Clause"
] |
permissive
|
yulongpo/XX-Net
|
0699e50c666c3d1665c415d2871fc46bbcd6ec06
|
1dcd667eb8f704540b5ee19424a534d20cf12a83
|
refs/heads/master
| 2021-04-12T08:42:54.965106 | 2018-03-22T01:14:11 | 2018-03-22T01:14:11 | 126,262,175 | 1 | 1 | null | 2018-03-22T01:30:22 | 2018-03-22T01:30:22 | null |
UTF-8
|
Python
| false | false | 15,312 |
py
|
# -*- coding: utf-8 -*-
"""
port from hyper/http20/stream for async
remove push support
increase init window size to improve performance
~~~~~~~~~~~~~~~~~~~
Objects that make up the stream-level abstraction of hyper's HTTP/2 support.
Conceptually, a single HTTP/2 connection is made up of many streams: each
stream is an independent, bi-directional sequence of HTTP headers and data.
Each stream is identified by a monotonically increasing integer, assigned to
the stream by the endpoint that initiated the stream.
"""
import threading
from hyper.common.headers import HTTPHeaderMap
from hyper.packages.hyperframe.frame import (
FRAME_MAX_LEN, FRAMES, HeadersFrame, DataFrame, PushPromiseFrame,
WindowUpdateFrame, ContinuationFrame, BlockedFrame, RstStreamFrame
)
from hyper.http20.exceptions import ProtocolError, StreamResetError
from hyper.http20.util import h2_safe_headers
from hyper.http20.response import strip_headers
from hyper.common.util import to_host_port_tuple, to_native_string, to_bytestring
import simple_http_client
from http_common import *
# Define a set of states for a HTTP/2 stream.
STATE_IDLE = 0
STATE_OPEN = 1
STATE_HALF_CLOSED_LOCAL = 2
STATE_HALF_CLOSED_REMOTE = 3
STATE_CLOSED = 4
class Stream(object):
"""
A single HTTP/2 stream.
A stream is an independent, bi-directional sequence of HTTP headers and
data. Each stream is identified by a single integer. From a HTTP
perspective, a stream _approximately_ matches a single request-response
pair.
"""
def __init__(self,
logger,
config,
connection,
ip,
stream_id,
task,
send_cb,
close_cb,
encoder,
decoder,
receive_window_manager,
remote_window_size,
max_frame_size):
self.logger = logger
self.config = config
self.connection = connection
self.ip = ip
self.stream_id = stream_id
self.task = task
self.state = STATE_IDLE
self.get_head_time = None
# There are two flow control windows: one for data we're sending,
# one for data being sent to us.
self.receive_window_manager = receive_window_manager
self.remote_window_size = remote_window_size
self.max_frame_size = max_frame_size
# This is the callback handed to the stream by its parent connection.
# It is called when the stream wants to send data. It expects to
# receive a list of frames that will be automatically serialized.
self._send_cb = send_cb
# This is the callback to be called when the stream is closed.
self._close_cb = close_cb
# A reference to the header encoder and decoder objects belonging to
# the parent connection.
self._encoder = encoder
self._decoder = decoder
self.request_headers = HTTPHeaderMap()
# Convert the body to bytes if needed.
self.request_body = to_bytestring(self.task.body)
# request body not send blocked by send window
# the left body will send when send window opened.
self.request_body_left = len(self.request_body)
self.request_body_sended = False
# data list before decode
self.response_header_datas = []
# Set to a key-value set of the response headers once their
# HEADERS..CONTINUATION frame sequence finishes.
self.response_headers = None
# Unconsumed response data chunks
self.response_body = []
self.response_body_len = 0
def start_request(self):
"""
Open the stream. Does this by encoding and sending the headers: no more
calls to ``add_header`` are allowed after this method is called.
The `end` flag controls whether this will be the end of the stream, or
whether data will follow.
"""
# Strip any headers invalid in H2.
#headers = h2_safe_headers(self.request_headers)
host = self.connection.get_host(self.task.host)
self.add_header(":method", self.task.method)
self.add_header(":scheme", "https")
self.add_header(":authority", host)
self.add_header(":path", self.task.path)
default_headers = (':method', ':scheme', ':authority', ':path')
#headers = h2_safe_headers(self.task.headers)
for name, value in self.task.headers.items():
is_default = to_native_string(name) in default_headers
self.add_header(name, value, replace=is_default)
# Encode the headers.
encoded_headers = self._encoder(self.request_headers)
# It's possible that there is a substantial amount of data here. The
# data needs to go into one HEADERS frame, followed by a number of
# CONTINUATION frames. For now, for ease of implementation, let's just
# assume that's never going to happen (16kB of headers is lots!).
# Additionally, since this is so unlikely, there's no point writing a
# test for this: it's just so simple.
if len(encoded_headers) > FRAME_MAX_LEN: # pragma: no cover
raise ValueError("Header block too large.")
header_frame = HeadersFrame(self.stream_id)
header_frame.data = encoded_headers
# If no data has been provided, this is the end of the stream. Either
# way, due to the restriction above it's definitely the end of the
# headers.
header_frame.flags.add('END_HEADERS')
if self.request_body_left == 0:
header_frame.flags.add('END_STREAM')
# Send the header frame.
self.task.set_state("start send header")
self._send_cb(header_frame)
# Transition the stream state appropriately.
self.state = STATE_OPEN
self.task.set_state("start send left body")
threading.Thread(target=self.left_work).start()
def left_work(self):
if self.request_body_left > 0:
self.send_left_body()
self.timeout_response()
def add_header(self, name, value, replace=False):
"""
Adds a single HTTP header to the headers to be sent on the request.
"""
if not replace:
self.request_headers[name] = value
else:
self.request_headers.replace(name, value)
def send_left_body(self):
while self.remote_window_size and not self.request_body_sended:
send_size = min(self.remote_window_size, self.request_body_left, self.max_frame_size)
f = DataFrame(self.stream_id)
data_start = len(self.request_body) - self.request_body_left
f.data = self.request_body[data_start:data_start+send_size]
self.remote_window_size -= send_size
self.request_body_left -= send_size
# If the length of the data is less than MAX_CHUNK, we're probably
# at the end of the file. If this is the end of the data, mark it
# as END_STREAM.
if self.request_body_left == 0:
f.flags.add('END_STREAM')
# Send the frame and decrement the flow control window.
self._send_cb(f)
# If no more data is to be sent on this stream, transition our state.
if self.request_body_left == 0:
self.request_body_sended = True
self._close_local()
self.task.set_state("end send left body")
def receive_frame(self, frame):
"""
Handle a frame received on this stream.
called by connection.
"""
# self.logger.debug("stream %d recved frame %r", self.stream_id, frame)
if frame.type == WindowUpdateFrame.type:
self.remote_window_size += frame.window_increment
self.send_left_body()
elif frame.type == HeadersFrame.type:
# Begin the header block for the response headers.
#self.response_header_datas = [frame.data]
self.response_header_datas.append(frame.data)
elif frame.type == PushPromiseFrame.type:
self.logger.error("%s receive PushPromiseFrame:%d", self.ip, frame.stream_id)
elif frame.type == ContinuationFrame.type:
# Continue a header block begun with either HEADERS or PUSH_PROMISE.
self.response_header_datas.append(frame.data)
elif frame.type == DataFrame.type:
# Append the data to the buffer.
if not self.task.finished:
self.task.put_data(frame.data)
if 'END_STREAM' not in frame.flags:
# Increase the window size. Only do this if the data frame contains
# actual data.
# don't do it if stream is closed.
size = frame.flow_controlled_length
increment = self.receive_window_manager._handle_frame(size)
#if increment:
# self.logger.debug("stream:%d frame size:%d increase win:%d", self.stream_id, size, increment)
#content_len = int(self.request_headers.get("Content-Length")[0])
#self.logger.debug("%s get:%d s:%d", self.ip, self.response_body_len, size)
if increment and not self._remote_closed:
w = WindowUpdateFrame(self.stream_id)
w.window_increment = increment
self._send_cb(w)
elif frame.type == BlockedFrame.type:
# If we've been blocked we may want to fixup the window.
increment = self.receive_window_manager._blocked()
if increment:
w = WindowUpdateFrame(self.stream_id)
w.window_increment = increment
self._send_cb(w)
elif frame.type == RstStreamFrame.type:
# Rest Frame send from server is not define in RFC
inactive_time = time.time() - self.connection.last_active_time
self.logger.debug("%s Stream %d Rest by server, inactive:%d. error code:%d",
self.ip, self.stream_id, inactive_time, frame.error_code)
self.connection.close("RESET")
elif frame.type in FRAMES:
# This frame isn't valid at this point.
#raise ValueError("Unexpected frame %s." % frame)
self.logger.error("%s Unexpected frame %s.", self.ip, frame)
else: # pragma: no cover
# Unknown frames belong to extensions. Just drop it on the
# floor, but log so that users know that something happened.
self.logger.error("%s Received unknown frame, type %d", self.ip, frame.type)
pass
if 'END_HEADERS' in frame.flags:
if self.response_headers is not None:
raise ProtocolError("Too many header blocks.")
# Begin by decoding the header block. If this fails, we need to
# tear down the entire connection.
if len(self.response_header_datas) == 1:
header_data = self.response_header_datas[0]
else:
header_data = b''.join(self.response_header_datas)
try:
headers = self._decoder.decode(header_data)
except Exception as e:
self.logger.exception("decode h2 header %s fail:%r", header_data, e)
raise e
self.response_headers = HTTPHeaderMap(headers)
# We've handled the headers, zero them out.
self.response_header_datas = None
self.get_head_time = time.time()
length = self.response_headers.get("Content-Length", None)
if isinstance(length, list):
length = int(length[0])
if not self.task.finished:
self.task.content_length = length
self.task.set_state("h2_get_head")
self.send_response()
if 'END_STREAM' in frame.flags:
#self.logger.debug("%s Closing remote side of stream:%d", self.ip, self.stream_id)
time_now = time.time()
time_cost = time_now - self.get_head_time
if time_cost > 0 and \
isinstance(self.task.content_length, int) and \
not self.task.finished:
speed = self.task.content_length / time_cost
self.task.set_state("h2_finish[SP:%d]" % speed)
self._close_remote()
self.close("end stream")
if not self.task.finished:
self.connection.continue_timeout = 0
def send_response(self):
if self.task.responsed:
self.logger.error("http2_stream send_response but responsed.%s", self.task.url)
self.close("h2 stream send_response but sended.")
return
self.task.responsed = True
status = int(self.response_headers[b':status'][0])
strip_headers(self.response_headers)
response = simple_http_client.BaseResponse(status=status, headers=self.response_headers)
response.ssl_sock = self.connection.ssl_sock
response.worker = self.connection
response.task = self.task
self.task.queue.put(response)
if status in self.config.http2_status_to_close:
self.connection.close("status %d" % status)
def close(self, reason="close"):
if not self.task.responsed:
self.connection.retry_task_cb(self.task, reason)
else:
self.task.finish()
# empty block means fail or closed.
self._close_remote()
self._close_cb(self.stream_id, reason)
@property
def _local_closed(self):
return self.state in (STATE_CLOSED, STATE_HALF_CLOSED_LOCAL)
@property
def _remote_closed(self):
return self.state in (STATE_CLOSED, STATE_HALF_CLOSED_REMOTE)
@property
def _local_open(self):
return self.state in (STATE_OPEN, STATE_HALF_CLOSED_REMOTE)
def _close_local(self):
self.state = (
STATE_HALF_CLOSED_LOCAL if self.state == STATE_OPEN
else STATE_CLOSED
)
def _close_remote(self):
self.state = (
STATE_HALF_CLOSED_REMOTE if self.state == STATE_OPEN
else STATE_CLOSED
)
def timeout_response(self):
start_time = time.time()
while time.time() - start_time < self.task.timeout:
time.sleep(1)
if self._remote_closed:
return
self.logger.warn("h2 timeout %s task_trace:%s worker_trace:%s",
self.connection.ssl_sock.ip,
self.task.get_trace(),
self.connection.get_trace())
self.task.set_state("timeout")
if self.task.responsed:
self.task.finish()
else:
self.task.response_fail("timeout")
self.connection.continue_timeout += 1
if self.connection.continue_timeout >= self.connection.config.http2_max_timeout_tasks and \
time.time() - self.connection.last_active_time > self.connection.config.http2_timeout_active:
self.connection.close("down fail")
|
[
"[email protected]"
] | |
5ae9fad5761840beb8eaa4696530a64d6f482cf1
|
9d7d88cc4dc326993c6be9ba2a79b5afe86254c5
|
/posner/activations/__init__.py
|
047cea63440ba493cc001394e049f3a910f8f19f
|
[] |
no_license
|
LeeKLTW/posner
|
7ebe0e287c8a9db91e150ba08c41772757b2639f
|
9a1c6e00c463644a78ebf413b676c74c846dc23d
|
refs/heads/master
| 2022-12-16T17:32:38.327191 | 2020-02-26T11:50:47 | 2020-02-26T11:50:47 | 240,471,085 | 5 | 1 | null | 2022-12-08T03:36:50 | 2020-02-14T09:22:13 |
Python
|
UTF-8
|
Python
| false | false | 46 |
py
|
# -*- coding: utf-8 -*-
from .gelu import gelu
|
[
"[email protected]"
] | |
c3406d2aabbb544a97bb3aee11a10506f35f184e
|
cac9947cec2aace94fb4a7c69fd32654bb53e853
|
/lib/python3.7/site-packages/boltons/statsutils.py
|
e4b71b28c22b31a5bd4f8efa5e1d730e09473056
|
[
"BSD-3-Clause"
] |
permissive
|
nguyentranhoan/uit-mobile
|
7bc1a020251ca583fe11cf1f729630466203537a
|
8546312b01373d94cf00c64f7eacb769e0f4ccce
|
refs/heads/master
| 2023-01-05T03:29:54.732892 | 2020-04-08T07:14:32 | 2020-04-08T07:14:32 | 235,015,840 | 0 | 0 |
BSD-3-Clause
| 2022-12-27T15:35:57 | 2020-01-20T04:04:47 |
Python
|
UTF-8
|
Python
| false | false | 28,624 |
py
|
# -*- coding: utf-8 -*-
"""``statsutils`` provides tools aimed primarily at descriptive
statistics for data analysis, such as :func:`mean` (average),
:func:`median`, :func:`variance`, and many others,
The :class:`Stats` type provides all the main functionality of the
``statsutils`` module. A :class:`Stats` object wraps a given dataset,
providing all statistical measures as property attributes. These
attributes cache their results, which allows efficient computation of
multiple measures, as many measures rely on other measures. For
example, relative standard deviation (:attr:`Stats.rel_std_dev`)
relies on both the mean and standard deviation. The Stats object
caches those results so no rework is done.
The :class:`Stats` type's attributes have module-level counterparts for
convenience when the computation reuse advantages do not apply.
>>> stats = Stats(range(42))
>>> stats.mean
20.5
>>> mean(range(42))
20.5
Statistics is a large field, and ``statsutils`` is focused on a few
basic techniques that are useful in software. The following is a brief
introduction to those techniques. For a more in-depth introduction,
`Statistics for Software
<https://www.paypal-engineering.com/2016/04/11/statistics-for-software/>`_,
an article I wrote on the topic. It introduces key terminology vital
to effective usage of statistics.
Statistical moments
-------------------
Python programmers are probably familiar with the concept of the
*mean* or *average*, which gives a rough quantitiative middle value by
which a sample can be can be generalized. However, the mean is just
the first of four `moment`_-based measures by which a sample or
distribution can be measured.
The four `Standardized moments`_ are:
1. `Mean`_ - :func:`mean` - theoretical middle value
2. `Variance`_ - :func:`variance` - width of value dispersion
3. `Skewness`_ - :func:`skewness` - symmetry of distribution
4. `Kurtosis`_ - :func:`kurtosis` - "peakiness" or "long-tailed"-ness
For more information check out `the Moment article on Wikipedia`_.
.. _moment: https://en.wikipedia.org/wiki/Moment_(mathematics)
.. _Standardized moments: https://en.wikipedia.org/wiki/Standardized_moment
.. _Mean: https://en.wikipedia.org/wiki/Mean
.. _Variance: https://en.wikipedia.org/wiki/Variance
.. _Skewness: https://en.wikipedia.org/wiki/Skewness
.. _Kurtosis: https://en.wikipedia.org/wiki/Kurtosis
.. _the Moment article on Wikipedia: https://en.wikipedia.org/wiki/Moment_(mathematics)
Keep in mind that while these moments can give a bit more insight into
the shape and distribution of data, they do not guarantee a complete
picture. Wildly different datasets can have the same values for all
four moments, so generalize wisely.
Robust statistics
-----------------
Moment-based statistics are notorious for being easily skewed by
outliers. The whole field of robust statistics aims to mitigate this
dilemma. ``statsutils`` also includes several robust statistical methods:
* `Median`_ - The middle value of a sorted dataset
* `Trimean`_ - Another robust measure of the data's central tendency
* `Median Absolute Deviation`_ (MAD) - A robust measure of
variability, a natural counterpart to :func:`variance`.
* `Trimming`_ - Reducing a dataset to only the middle majority of
data is a simple way of making other estimators more robust.
.. _Median: https://en.wikipedia.org/wiki/Median
.. _Trimean: https://en.wikipedia.org/wiki/Trimean
.. _Median Absolute Deviation: https://en.wikipedia.org/wiki/Median_absolute_deviation
.. _Trimming: https://en.wikipedia.org/wiki/Trimmed_estimator
Online and Offline Statistics
-----------------------------
Unrelated to computer networking, `online`_ statistics involve
calculating statistics in a `streaming`_ fashion, without all the data
being available. The :class:`Stats` type is meant for the more
traditional offline statistics when all the data is available. For
pure-Python online statistics accumulators, look at the `Lithoxyl`_
system instrumentation package.
.. _Online: https://en.wikipedia.org/wiki/Online_algorithm
.. _streaming: https://en.wikipedia.org/wiki/Streaming_algorithm
.. _Lithoxyl: https://github.com/mahmoud/lithoxyl
"""
from __future__ import print_function
import bisect
from math import floor, ceil
class _StatsProperty(object):
def __init__(self, name, func):
self.name = name
self.func = func
self.internal_name = '_' + name
doc = func.__doc__ or ''
pre_doctest_doc, _, _ = doc.partition('>>>')
self.__doc__ = pre_doctest_doc
def __get__(self, obj, objtype=None):
if obj is None:
return self
if not obj.data:
return obj.default
try:
return getattr(obj, self.internal_name)
except AttributeError:
setattr(obj, self.internal_name, self.func(obj))
return getattr(obj, self.internal_name)
class Stats(object):
"""The ``Stats`` type is used to represent a group of unordered
statistical datapoints for calculations such as mean, median, and
variance.
Args:
data (list): List or other iterable containing numeric values.
default (float): A value to be returned when a given
statistical measure is not defined. 0.0 by default, but
``float('nan')`` is appropriate for stricter applications.
use_copy (bool): By default Stats objects copy the initial
data into a new list to avoid issues with
modifications. Pass ``False`` to disable this behavior.
is_sorted (bool): Presorted data can skip an extra sorting
step for a little speed boost. Defaults to False.
"""
def __init__(self, data, default=0.0, use_copy=True, is_sorted=False):
self._use_copy = use_copy
self._is_sorted = is_sorted
if use_copy:
self.data = list(data)
else:
self.data = data
self.default = default
cls = self.__class__
self._prop_attr_names = [a for a in dir(self)
if isinstance(getattr(cls, a, None),
_StatsProperty)]
self._pearson_precision = 0
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def _get_sorted_data(self):
"""When using a copy of the data, it's better to have that copy be
sorted, but we do it lazily using this method, in case no
sorted measures are used. I.e., if median is never called,
sorting would be a waste.
When not using a copy, it's presumed that all optimizations
are on the user.
"""
if not self._use_copy:
return sorted(self.data)
elif not self._is_sorted:
self.data.sort()
return self.data
def clear_cache(self):
"""``Stats`` objects automatically cache intermediary calculations
that can be reused. For instance, accessing the ``std_dev``
attribute after the ``variance`` attribute will be
significantly faster for medium-to-large datasets.
If you modify the object by adding additional data points,
call this function to have the cached statistics recomputed.
"""
for attr_name in self._prop_attr_names:
attr_name = getattr(self.__class__, attr_name).internal_name
if not hasattr(self, attr_name):
continue
delattr(self, attr_name)
return
def _calc_count(self):
"""The number of items in this Stats object. Returns the same as
:func:`len` on a Stats object, but provided for pandas terminology
parallelism.
>>> Stats(range(20)).count
20
"""
return len(self.data)
count = _StatsProperty('count', _calc_count)
def _calc_mean(self):
"""
The arithmetic mean, or "average". Sum of the values divided by
the number of values.
>>> mean(range(20))
9.5
>>> mean(list(range(19)) + [949]) # 949 is an arbitrary outlier
56.0
"""
return sum(self.data, 0.0) / len(self.data)
mean = _StatsProperty('mean', _calc_mean)
def _calc_max(self):
"""
The maximum value present in the data.
>>> Stats([2, 1, 3]).max
3
"""
if self._is_sorted:
return self.data[-1]
return max(self.data)
max = _StatsProperty('max', _calc_max)
def _calc_min(self):
"""
The minimum value present in the data.
>>> Stats([2, 1, 3]).min
1
"""
if self._is_sorted:
return self.data[0]
return min(self.data)
min = _StatsProperty('min', _calc_min)
def _calc_median(self):
"""
The median is either the middle value or the average of the two
middle values of a sample. Compared to the mean, it's generally
more resilient to the presence of outliers in the sample.
>>> median([2, 1, 3])
2
>>> median(range(97))
48
>>> median(list(range(96)) + [1066]) # 1066 is an arbitrary outlier
48
"""
return self._get_quantile(self._get_sorted_data(), 0.5)
median = _StatsProperty('median', _calc_median)
def _calc_iqr(self):
"""Inter-quartile range (IQR) is the difference between the 75th
percentile and 25th percentile. IQR is a robust measure of
dispersion, like standard deviation, but safer to compare
between datasets, as it is less influenced by outliers.
>>> iqr([1, 2, 3, 4, 5])
2
>>> iqr(range(1001))
500
"""
return self.get_quantile(0.75) - self.get_quantile(0.25)
iqr = _StatsProperty('iqr', _calc_iqr)
def _calc_trimean(self):
"""The trimean is a robust measure of central tendency, like the
median, that takes the weighted average of the median and the
upper and lower quartiles.
>>> trimean([2, 1, 3])
2.0
>>> trimean(range(97))
48.0
>>> trimean(list(range(96)) + [1066]) # 1066 is an arbitrary outlier
48.0
"""
sorted_data = self._get_sorted_data()
gq = lambda q: self._get_quantile(sorted_data, q)
return (gq(0.25) + (2 * gq(0.5)) + gq(0.75)) / 4.0
trimean = _StatsProperty('trimean', _calc_trimean)
def _calc_variance(self):
"""\
Variance is the average of the squares of the difference between
each value and the mean.
>>> variance(range(97))
784.0
"""
global mean # defined elsewhere in this file
return mean(self._get_pow_diffs(2))
variance = _StatsProperty('variance', _calc_variance)
def _calc_std_dev(self):
"""\
Standard deviation. Square root of the variance.
>>> std_dev(range(97))
28.0
"""
return self.variance ** 0.5
std_dev = _StatsProperty('std_dev', _calc_std_dev)
def _calc_median_abs_dev(self):
"""\
Median Absolute Deviation is a robust measure of statistical
dispersion: http://en.wikipedia.org/wiki/Median_absolute_deviation
>>> median_abs_dev(range(97))
24.0
"""
global median # defined elsewhere in this file
sorted_vals = sorted(self.data)
x = float(median(sorted_vals))
return median([abs(x - v) for v in sorted_vals])
median_abs_dev = _StatsProperty('median_abs_dev', _calc_median_abs_dev)
mad = median_abs_dev # convenience
def _calc_rel_std_dev(self):
"""\
Standard deviation divided by the absolute value of the average.
http://en.wikipedia.org/wiki/Relative_standard_deviation
>>> print('%1.3f' % rel_std_dev(range(97)))
0.583
"""
abs_mean = abs(self.mean)
if abs_mean:
return self.std_dev / abs_mean
else:
return self.default
rel_std_dev = _StatsProperty('rel_std_dev', _calc_rel_std_dev)
def _calc_skewness(self):
"""\
Indicates the asymmetry of a curve. Positive values mean the bulk
of the values are on the left side of the average and vice versa.
http://en.wikipedia.org/wiki/Skewness
See the module docstring for more about statistical moments.
>>> skewness(range(97)) # symmetrical around 48.0
0.0
>>> left_skewed = skewness(list(range(97)) + list(range(10)))
>>> right_skewed = skewness(list(range(97)) + list(range(87, 97)))
>>> round(left_skewed, 3), round(right_skewed, 3)
(0.114, -0.114)
"""
data, s_dev = self.data, self.std_dev
if len(data) > 1 and s_dev > 0:
return (sum(self._get_pow_diffs(3)) /
float((len(data) - 1) * (s_dev ** 3)))
else:
return self.default
skewness = _StatsProperty('skewness', _calc_skewness)
def _calc_kurtosis(self):
"""\
Indicates how much data is in the tails of the distribution. The
result is always positive, with the normal "bell-curve"
distribution having a kurtosis of 3.
http://en.wikipedia.org/wiki/Kurtosis
See the module docstring for more about statistical moments.
>>> kurtosis(range(9))
1.99125
With a kurtosis of 1.99125, [0, 1, 2, 3, 4, 5, 6, 7, 8] is more
centrally distributed than the normal curve.
"""
data, s_dev = self.data, self.std_dev
if len(data) > 1 and s_dev > 0:
return (sum(self._get_pow_diffs(4)) /
float((len(data) - 1) * (s_dev ** 4)))
else:
return 0.0
kurtosis = _StatsProperty('kurtosis', _calc_kurtosis)
def _calc_pearson_type(self):
precision = self._pearson_precision
skewness = self.skewness
kurtosis = self.kurtosis
beta1 = skewness ** 2.0
beta2 = kurtosis * 1.0
# TODO: range checks?
c0 = (4 * beta2) - (3 * beta1)
c1 = skewness * (beta2 + 3)
c2 = (2 * beta2) - (3 * beta1) - 6
if round(c1, precision) == 0:
if round(beta2, precision) == 3:
return 0 # Normal
else:
if beta2 < 3:
return 2 # Symmetric Beta
elif beta2 > 3:
return 7
elif round(c2, precision) == 0:
return 3 # Gamma
else:
k = c1 ** 2 / (4 * c0 * c2)
if k < 0:
return 1 # Beta
raise RuntimeError('missed a spot')
pearson_type = _StatsProperty('pearson_type', _calc_pearson_type)
@staticmethod
def _get_quantile(sorted_data, q):
data, n = sorted_data, len(sorted_data)
idx = q / 1.0 * (n - 1)
idx_f, idx_c = int(floor(idx)), int(ceil(idx))
if idx_f == idx_c:
return data[idx_f]
return (data[idx_f] * (idx_c - idx)) + (data[idx_c] * (idx - idx_f))
def get_quantile(self, q):
"""Get a quantile from the dataset. Quantiles are floating point
values between ``0.0`` and ``1.0``, with ``0.0`` representing
the minimum value in the dataset and ``1.0`` representing the
maximum. ``0.5`` represents the median:
>>> Stats(range(100)).get_quantile(0.5)
49.5
"""
q = float(q)
if not 0.0 <= q <= 1.0:
raise ValueError('expected q between 0.0 and 1.0, not %r' % q)
elif not self.data:
return self.default
return self._get_quantile(self._get_sorted_data(), q)
def get_zscore(self, value):
"""Get the z-score for *value* in the group. If the standard deviation
is 0, 0 inf or -inf will be returned to indicate whether the value is
equal to, greater than or below the group's mean.
"""
mean = self.mean
if self.std_dev == 0:
if value == mean:
return 0
if value > mean:
return float('inf')
if value < mean:
return float('-inf')
return (float(value) - mean) / self.std_dev
def trim_relative(self, amount=0.15):
"""A utility function used to cut a proportion of values off each end
of a list of values. This has the effect of limiting the
effect of outliers.
Args:
amount (float): A value between 0.0 and 0.5 to trim off of
each side of the data.
.. note:
This operation modifies the data in-place. It does not
make or return a copy.
"""
trim = float(amount)
if not 0.0 <= trim < 0.5:
raise ValueError('expected amount between 0.0 and 0.5, not %r'
% trim)
size = len(self.data)
size_diff = int(size * trim)
if size_diff == 0.0:
return
self.data = self._get_sorted_data()[size_diff:-size_diff]
self.clear_cache()
def _get_pow_diffs(self, power):
"""
A utility function used for calculating statistical moments.
"""
m = self.mean
return [(v - m) ** power for v in self.data]
def _get_bin_bounds(self, count=None, with_max=False):
if not self.data:
return [0.0] # TODO: raise?
data = self.data
len_data, min_data, max_data = len(data), min(data), max(data)
if len_data < 4:
if not count:
count = len_data
dx = (max_data - min_data) / float(count)
bins = [min_data + (dx * i) for i in range(count)]
elif count is None:
# freedman algorithm for fixed-width bin selection
q25, q75 = self.get_quantile(0.25), self.get_quantile(0.75)
dx = 2 * (q75 - q25) / (len_data ** (1 / 3.0))
bin_count = max(1, int(ceil((max_data - min_data) / dx)))
bins = [min_data + (dx * i) for i in range(bin_count + 1)]
bins = [b for b in bins if b < max_data]
else:
dx = (max_data - min_data) / float(count)
bins = [min_data + (dx * i) for i in range(count)]
if with_max:
bins.append(float(max_data))
return bins
def get_histogram_counts(self, bins=None, **kw):
"""Produces a list of ``(bin, count)`` pairs comprising a histogram of
the Stats object's data, using fixed-width bins. See
:meth:`Stats.format_histogram` for more details.
Args:
bins (int): maximum number of bins, or list of
floating-point bin boundaries. Defaults to the output of
Freedman's algorithm.
bin_digits (int): Number of digits used to round down the
bin boundaries. Defaults to 1.
The output of this method can be stored and/or modified, and
then passed to :func:`statsutils.format_histogram_counts` to
achieve the same text formatting as the
:meth:`~Stats.format_histogram` method. This can be useful for
snapshotting over time.
"""
bin_digits = int(kw.pop('bin_digits', 1))
if kw:
raise TypeError('unexpected keyword arguments: %r' % kw.keys())
if not bins:
bins = self._get_bin_bounds()
else:
try:
bin_count = int(bins)
except TypeError:
try:
bins = [float(x) for x in bins]
except Exception:
raise ValueError('bins expected integer bin count or list'
' of float bin boundaries, not %r' % bins)
if self.min < bins[0]:
bins = [self.min] + bins
else:
bins = self._get_bin_bounds(bin_count)
# floor and ceil really should have taken ndigits, like round()
round_factor = 10.0 ** bin_digits
bins = [floor(b * round_factor) / round_factor for b in bins]
bins = sorted(set(bins))
idxs = [bisect.bisect(bins, d) - 1 for d in self.data]
count_map = {} # would have used Counter, but py26 support
for idx in idxs:
try:
count_map[idx] += 1
except KeyError:
count_map[idx] = 1
bin_counts = [(b, count_map.get(i, 0)) for i, b in enumerate(bins)]
return bin_counts
def format_histogram(self, bins=None, **kw):
"""Produces a textual histogram of the data, using fixed-width bins,
allowing for simple visualization, even in console environments.
>>> data = list(range(20)) + list(range(5, 15)) + [10]
>>> print(Stats(data).format_histogram(width=30))
0.0: 5 #########
4.4: 8 ###############
8.9: 11 ####################
13.3: 5 #########
17.8: 2 ####
In this histogram, five values are between 0.0 and 4.4, eight
are between 4.4 and 8.9, and two values lie between 17.8 and
the max.
You can specify the number of bins, or provide a list of
bin boundaries themselves. If no bins are provided, as in the
example above, `Freedman's algorithm`_ for bin selection is
used.
Args:
bins (int): Maximum number of bins for the
histogram. Also accepts a list of floating-point
bin boundaries. If the minimum boundary is still
greater than the minimum value in the data, that
boundary will be implicitly added. Defaults to the bin
boundaries returned by `Freedman's algorithm`_.
bin_digits (int): Number of digits to round each bin
to. Note that bins are always rounded down to avoid
clipping any data. Defaults to 1.
width (int): integer number of columns in the longest line
in the histogram. Defaults to console width on Python
3.3+, or 80 if that is not available.
format_bin (callable): Called on each bin to create a
label for the final output. Use this function to add
units, such as "ms" for milliseconds.
Should you want something more programmatically reusable, see
the :meth:`~Stats.get_histogram_counts` method, the output of
is used by format_histogram. The :meth:`~Stats.describe`
method is another useful summarization method, albeit less
visual.
.. _Freedman's algorithm: https://en.wikipedia.org/wiki/Freedman%E2%80%93Diaconis_rule
"""
width = kw.pop('width', None)
format_bin = kw.pop('format_bin', None)
bin_counts = self.get_histogram_counts(bins=bins, **kw)
return format_histogram_counts(bin_counts,
width=width,
format_bin=format_bin)
def describe(self, quantiles=None, format=None):
"""Provides standard summary statistics for the data in the Stats
object, in one of several convenient formats.
Args:
quantiles (list): A list of numeric values to use as
quantiles in the resulting summary. All values must be
0.0-1.0, with 0.5 representing the median. Defaults to
``[0.25, 0.5, 0.75]``, representing the standard
quartiles.
format (str): Controls the return type of the function,
with one of three valid values: ``"dict"`` gives back
a :class:`dict` with the appropriate keys and
values. ``"list"`` is a list of key-value pairs in an
order suitable to pass to an OrderedDict or HTML
table. ``"text"`` converts the values to text suitable
for printing, as seen below.
Here is the information returned by a default ``describe``, as
presented in the ``"text"`` format:
>>> stats = Stats(range(1, 8))
>>> print(stats.describe(format='text'))
count: 7
mean: 4.0
std_dev: 2.0
mad: 2.0
min: 1
0.25: 2.5
0.5: 4
0.75: 5.5
max: 7
For more advanced descriptive statistics, check out my blog
post on the topic `Statistics for Software
<https://www.paypal-engineering.com/2016/04/11/statistics-for-software/>`_.
"""
if format is None:
format = 'dict'
elif format not in ('dict', 'list', 'text'):
raise ValueError('invalid format for describe,'
' expected one of "dict"/"list"/"text", not %r'
% format)
quantiles = quantiles or [0.25, 0.5, 0.75]
q_items = []
for q in quantiles:
q_val = self.get_quantile(q)
q_items.append((str(q), q_val))
items = [('count', self.count),
('mean', self.mean),
('std_dev', self.std_dev),
('mad', self.mad),
('min', self.min)]
items.extend(q_items)
items.append(('max', self.max))
if format == 'dict':
ret = dict(items)
elif format == 'list':
ret = items
elif format == 'text':
ret = '\n'.join(['%s%s' % ((label + ':').ljust(10), val)
for label, val in items])
return ret
def describe(data, quantiles=None, format=None):
"""A convenience function to get standard summary statistics useful
for describing most data. See :meth:`Stats.describe` for more
details.
>>> print(describe(range(7), format='text'))
count: 7
mean: 3.0
std_dev: 2.0
mad: 2.0
min: 0
0.25: 1.5
0.5: 3
0.75: 4.5
max: 6
See :meth:`Stats.format_histogram` for another very useful
summarization that uses textual visualization.
"""
return Stats(data).describe(quantiles=quantiles, format=format)
def _get_conv_func(attr_name):
def stats_helper(data, default=0.0):
return getattr(Stats(data, default=default, use_copy=False),
attr_name)
return stats_helper
for attr_name, attr in list(Stats.__dict__.items()):
if isinstance(attr, _StatsProperty):
if attr_name in ('max', 'min', 'count'): # don't shadow builtins
continue
if attr_name in ('mad',): # convenience aliases
continue
func = _get_conv_func(attr_name)
func.__doc__ = attr.func.__doc__
globals()[attr_name] = func
delattr(Stats, '_calc_' + attr_name)
# cleanup
del attr
del attr_name
del func
def format_histogram_counts(bin_counts, width=None, format_bin=None):
"""The formatting logic behind :meth:`Stats.format_histogram`, which
takes the output of :meth:`Stats.get_histogram_counts`, and passes
them to this function.
Args:
bin_counts (list): A list of bin values to counts.
width (int): Number of character columns in the text output,
defaults to 80 or console width in Python 3.3+.
format_bin (callable): Used to convert bin values into string
labels.
"""
lines = []
if not format_bin:
format_bin = lambda v: v
if not width:
try:
import shutil # python 3 convenience
width = shutil.get_terminal_size()[0]
except Exception:
width = 80
bins = [b for b, _ in bin_counts]
count_max = max([count for _, count in bin_counts])
count_cols = len(str(count_max))
labels = ['%s' % format_bin(b) for b in bins]
label_cols = max([len(l) for l in labels])
tmp_line = '%s: %s #' % ('x' * label_cols, count_max)
bar_cols = max(width - len(tmp_line), 3)
line_k = float(bar_cols) / count_max
tmpl = "{label:>{label_cols}}: {count:>{count_cols}} {bar}"
for label, (bin_val, count) in zip(labels, bin_counts):
bar_len = int(round(count * line_k))
bar = ('#' * bar_len) or '|'
line = tmpl.format(label=label,
label_cols=label_cols,
count=count,
count_cols=count_cols,
bar=bar)
lines.append(line)
return '\n'.join(lines)
|
[
"[email protected]"
] | |
e8940800e8c4feb508acd637a11a9f70573c15ce
|
a1ad2715e306fd4e7eaeda5348e00e1a363e7884
|
/leetcode/hashmap.py
|
17f3ac037e825715ccd3106d80f804bebe69a9a4
|
[] |
no_license
|
MayankMaheshwar/DS-and-Algo-solving
|
cef54a800b3e8a070a707f97b4f30fccaa17d5c6
|
ac6ea8f880920242a55d40c747368d68cb6f7534
|
refs/heads/master
| 2022-12-07T07:55:08.380505 | 2022-12-05T09:32:14 | 2022-12-05T09:32:14 | 237,103,468 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 458 |
py
|
class Solution:
# @param A : list of integers
# @param B : list of integers
# @param C : list of integers
# @return a list of integers
def solve(self, A, B, C):
Hash1, Hash2, Hash3 = set(A), set(B), set(C)
res = set()
for i in Hash1:
if i in Hash2 or i in Hash3:
res.add(i)
for j in Hash2:
if j in Hash3:
res.add(j)
return sorted(list(res))
|
[
"[email protected]"
] | |
5a9986aa822a01cb23c0463c4f918c8d9472004c
|
968d726d3acfca79d6a0b445b8930b62136a7504
|
/python/app/loan_approve.py
|
2a432a006ede8fd08915368b16d20eec861bdf61
|
[] |
no_license
|
arita37/avenir
|
db97c04ca598c48bb7a2bc90f20ef3946d261561
|
ea2de8538b2596d9f6bb09ba8398a92030fbd59b
|
refs/heads/master
| 2023-08-27T14:34:38.998926 | 2021-10-05T05:43:48 | 2021-10-05T05:43:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 11,607 |
py
|
#!/usr/local/bin/python3
# avenir-python: Machine Learning
# Author: Pranab Ghosh
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
import os
import sys
from random import randint
import time
from array import *
sys.path.append(os.path.abspath("../lib"))
sys.path.append(os.path.abspath("../supv"))
from mlutil import *
from util import *
from sampler import *
from tnn import *
from mcalib import *
NFEAT = 11
NFEAT_EXT = 14
class LoanApprove:
def __init__(self, numLoans=None):
self.numLoans = numLoans
self.marStatus = ["married", "single", "divorced"]
self.loanTerm = ["7", "15", "30"]
self.addExtra = False
def initOne(self):
"""
initialize samplers
"""
self.threshold = 118
self.margin = 5
# distributions
self.marriedDistr = CategoricalRejectSampler(("married", 80), ("single", 100), ("divorced", 30))
self.numChildDistr = CategoricalRejectSampler(("1", 80), ("2", 100), ("2", 40))
self.eduDistr = CategoricalRejectSampler(("1", 60), ("2", 100), ("3", 30))
self.selfEmployedDistr = CategoricalRejectSampler(("1", 30), ("0", 100))
self.incomeDistr = GaussianRejectSampler(100,20)
self.numYearsExpDistr = GaussianRejectSampler(10,3)
self.outstandingLoanDistr = GaussianRejectSampler(20,5)
self.loanAmDistr = GaussianRejectSampler(300,70)
self.loanTermDistr = CategoricalRejectSampler(("10", 40), ("15", 60), ("30", 100))
self.credScoreDistr = GaussianRejectSampler(700,50)
zipClusterDistr = [("high", 30), ("average", 100), ("low", 60)]
zipClusters = {\
"high" : ["95061", "95062", "95064", "95065", "95067"], \
"average" : ["95103", "95104", "95106", "95107", "95109", "95113", "95115", "95118", "95121" ], \
"low" : ["95376", "95377", "95378", "95353", "95354", "95356"]}
self.zipDistr = ClusterSampler(zipClusters, ("high", 30), ("average", 100), ("low", 60))
# scores
self.marriedScore = {"married" : 16, "single" : 10, "divorced" : 6}
self.numChildScore = {1 : 12, 2 : 9 , 3 : 4}
self.eduScore = {1 : 7 , 2 : 12, 3 : 15}
self.selfEmployedScore = {"0" : 15, "1" : 11}
self.incomeScore = StepFunction((50, 70, 2), (70, 90, 5), (90, 100, 8), (100, 110, 12),\
(110, 130, 14), (130, 150, 18))
self.numYearsExpScore = StepFunction((6, 10, 4), (10, 14, 9), (14, 20, 13))
self.outstandingLoanScore = StepFunction((2, 4, 16), (4, 8, 13), (8, 14, 10), (14, 22, 8),\
(22, 32, 6), (32, 44, 2))
self.loanAmScore = StepFunction((200, 250, 22), (250, 300, 20), (300, 350, 16), (350, 400, 10),\
(400, 450, 5), (450, 500, 2))
self.loanTermScore = {10 : 15, 15 : 18 , 30 : 23}
self.credScoreScore = StepFunction((600, 650, 8), (650, 700, 12), (700, 750, 17), (750, 800, 23),\
(800, 850, 31))
self.zipRateScore = {"high" : 17, "average" : 15, "low" : 11}
def generateOne(self):
"""
sample
"""
self.initOne()
posCount = 0
for i in range(self.numLoans):
id = genID(10)
married = self.marriedDistr.sample()
numChild = int(self.numChildDistr.sample())
edu = int(self.eduDistr.sample())
selfEmployed = self.selfEmployedDistr.sample()
income = int(self.incomeDistr.sample())
income = rangeSample(income, 50, 160)
numYearsExp = int(self.numYearsExpDistr.sample())
numYearsExp = rangeSample(numYearsExp, 6, 20)
outstandingLoan = int(self.outstandingLoanDistr.sample())
loanAm = int(self.loanAmDistr.sample())
loanAm = rangeSample(loanAm, 200, 500)
loanTerm = int(self.loanTermDistr.sample())
credScore = int(self.credScoreDistr.sample())
credScore = rangeSample(credScore, 600, 850)
(zipRate, zipCode) = self.zipDistr.sample()
# score for each score
score = 0
score += self.marriedScore[married]
score += self.numChildScore[numChild]
score += self.eduScore[edu]
score += self.selfEmployedScore[selfEmployed]
score += self.incomeScore.find(income)
score += self.numYearsExpScore.find(numYearsExp)
score += self.outstandingLoanScore.find(outstandingLoan)
score += self.loanTermScore[loanTerm]
score += self.credScoreScore.find(credScore)
score += self.zipRateScore[zipRate]
# feature coupling
if (income > 140 and loanAm < 300):
score += 10
if (income < 80 and loanAm > 280):
score -= 12
if (credScore > 760 and loanAm < 320):
score += 12
if (credScore < 700 and loanAm > 260):
score -= 14
if (numChild == 3 and income < 100):
score -= 8
# outcome
if score > (self.threshold + self.margin):
approved = 1
elif score < (self.threshold - self.margin):
approved = 0
else:
if randint(0, 100) < 50:
approved = 1
else:
approved = 0
if approved == 1:
posCount += 1
print ("{},{},{},{},{},{},{},{},{},{},{},{},{}".format(id, married, numChild, edu, selfEmployed, income,\
numYearsExp, outstandingLoan, loanAm, loanTerm, credScore, zipCode, approved))
#print "positive count " + str(posCount)
def initTwo(self):
"""
initialize samplers
"""
self.approvDistr = CategoricalRejectSampler(("1", 60), ("0", 40))
self.featCondDister = {}
#marital status
key = ("1", 0)
distr = CategoricalRejectSampler(("married", 100), ("single", 60), ("divorced", 40))
self.featCondDister[key] = distr
key = ("0", 0)
distr = CategoricalRejectSampler(("married", 40), ("single", 100), ("divorced", 40))
self.featCondDister[key] = distr
# num of children
key = ("1", 1)
distr = CategoricalRejectSampler(("1", 100), ("2", 90), ("3", 40))
self.featCondDister[key] = distr
key = ("0", 1)
distr = CategoricalRejectSampler(("1", 50), ("2", 70), ("3", 100))
self.featCondDister[key] = distr
# education
key = ("1", 2)
distr = CategoricalRejectSampler(("1", 30), ("2", 80), ("3", 100))
self.featCondDister[key] = distr
key = ("0", 2)
distr = CategoricalRejectSampler(("1", 100), ("2", 40), ("3", 30))
self.featCondDister[key] = distr
#self employed
key = ("1", 3)
distr = CategoricalRejectSampler(("1", 40), ("0", 100))
self.featCondDister[key] = distr
key = ("0", 3)
distr = CategoricalRejectSampler(("1", 100), ("0", 30))
self.featCondDister[key] = distr
# income
key = ("1", 4)
distr = GaussianRejectSampler(120,15)
self.featCondDister[key] = distr
key = ("0", 4)
distr = GaussianRejectSampler(50,10)
self.featCondDister[key] = distr
# years of experience
key = ("1", 5)
distr = GaussianRejectSampler(15,3)
self.featCondDister[key] = distr
key = ("0", 5)
distr = GaussianRejectSampler(5,1)
self.featCondDister[key] = distr
# number of years in current job
key = ("1", 6)
distr = GaussianRejectSampler(3,.5)
self.featCondDister[key] = distr
key = ("0", 6)
distr = GaussianRejectSampler(1,.2)
self.featCondDister[key] = distr
# outstanding debt
key = ("1", 7)
distr = GaussianRejectSampler(20,5)
self.featCondDister[key] = distr
key = ("0", 7)
distr = GaussianRejectSampler(60,10)
self.featCondDister[key] = distr
# loan amount
key = ("1", 8)
distr = GaussianRejectSampler(300,50)
self.featCondDister[key] = distr
key = ("0", 8)
distr = GaussianRejectSampler(600,50)
self.featCondDister[key] = distr
# loan term
key = ("1", 9)
distr = CategoricalRejectSampler(("7", 100), ("15", 40), ("30", 60))
self.featCondDister[key] = distr
key = ("0", 9)
distr = CategoricalRejectSampler(("7", 30), ("15", 100), ("30", 60))
self.featCondDister[key] = distr
# credit score
key = ("1", 10)
distr = GaussianRejectSampler(700,20)
self.featCondDister[key] = distr
key = ("0", 10)
distr = GaussianRejectSampler(500,50)
self.featCondDister[key] = distr
if self.addExtra:
# saving
key = ("1", 11)
distr = NormalSampler(80,10)
self.featCondDister[key] = distr
key = ("0", 11)
distr = NormalSampler(60,8)
self.featCondDister[key] = distr
# retirement
zDistr = NormalSampler(0, 0)
key = ("1", 12)
sDistr = DiscreteRejectSampler(0,1,1,20,80)
nzDistr = NormalSampler(100,20)
distr = DistrMixtureSampler(sDistr, zDistr, nzDistr)
self.featCondDister[key] = distr
key = ("0", 12)
sDistr = DiscreteRejectSampler(0,1,1,50,50)
nzDistr = NormalSampler(40,10)
distr = DistrMixtureSampler(sDistr, zDistr, nzDistr)
self.featCondDister[key] = distr
#num od prior mortgae loans
key = ("1", 13)
distr = DiscreteRejectSampler(0,3,1,20,60,40,15)
self.featCondDister[key] = distr
key = ("0", 13)
distr = DiscreteRejectSampler(0,1,1,70,30)
self.featCondDister[key] = distr
def generateTwo(self, noise, keyLen, addExtra):
"""
ancestral sampling
"""
self.addExtra = addExtra
self.initTwo()
#error
erDistr = GaussianRejectSampler(0, noise)
#sampler
numChildren = NFEAT_EXT if self.addExtra else NFEAT
sampler = AncestralSampler(self.approvDistr, self.featCondDister, numChildren)
for i in range(self.numLoans):
(claz, features) = sampler.sample()
# add noise
features[4] = int(features[4])
features[7] = int(features[7])
features[8] = int(features[8])
features[10] = int(features[10])
if self.addExtra:
features[11] = int(features[11])
features[12] = int(features[12])
claz = addNoiseCat(claz, ["0", "1"], noise)
strFeatures = list(map(lambda f: toStr(f, 2), features))
rec = genID(keyLen) + "," + ",".join(strFeatures) + "," + claz
print (rec)
def encodeDummy(self, fileName, extra):
"""
dummy var encoding
"""
catVars = {}
catVars[1] = self.marStatus
catVars[10] = self.loanTerm
rSize = NFEAT_EXT if extra else NFEAT
rSize += 2
dummyVarGen = DummyVarGenerator(rSize, catVars, "1", "0", ",")
for row in fileRecGen(fileName, None):
newRow = dummyVarGen.processRow(row)
print (newRow)
def encodeLabel(self, fileName):
"""
label encoding
"""
catVars = {}
catVars[1] = self.marStatus
catVars[10] = self.loanTerm
encoder = CatLabelGenerator(catVars, ",")
for row in fileRecGen(fileName, None):
newRow = encoder.processRow(row)
print (newRow)
##########################################################################################
if __name__ == "__main__":
op = sys.argv[1]
if op == "generate" or op == "genOne" :
""" generate data """
numLoans = int(sys.argv[2])
loan = LoanApprove(numLoans)
loan.generateOne()
elif op == "genTwo":
""" generate data """
numLoans = int(sys.argv[2])
loan = LoanApprove(numLoans)
noise = float(sys.argv[3])
keyLen = int(sys.argv[4])
addExtra = True if len(sys.argv) == 6 and sys.argv[5] == "extra" else False
loan.generateTwo(noise, keyLen, addExtra)
elif op == "encDummy":
""" encode binary """
fileName = sys.argv[2]
extra = True if len(sys.argv) == 4 and sys.argv[3] == "extra" else False
loan = LoanApprove(numLoans)
loan.encodeDummy(fileName, extra)
elif op == "encLabel":
""" encode label """
fileName = sys.argv[2]
loan = LoanApprove(numLoans)
loan.encodeLabel(fileName)
elif op == "nnTrain":
""" tran neural network model """
prFile = sys.argv[2]
clflier = FeedForwardNetwork(prFile)
clflier.buildModel()
FeedForwardNetwork.batchTrain(clflier)
else:
exitWithMsg("unknow operation")
|
[
"[email protected]"
] | |
00ad4f40b3123901479230db421ad0cc1ba9fb83
|
3ae937aec30f413dc87a1a6398ea6ef95b90f58a
|
/Estrutura-Decisao/ex10.py
|
372887472117d1248bf3d2232b86466610e03f97
|
[] |
no_license
|
lucas-sigma/Python-Brasil-Resposta-Exercicios
|
74f53c2531bea03fb65fa9addf8106450edb5d5e
|
af2df5b7d8d013ca176817af5f7bfa08ba5e33d1
|
refs/heads/master
| 2020-03-24T22:53:36.317235 | 2018-11-23T19:43:24 | 2018-11-23T19:43:24 | 143,107,121 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 513 |
py
|
# Faça um Programa que pergunte em que turno você estuda. Peça para digitar M-matutino ou V-Vespertino ou N- Noturno. Imprima a mensagem "Bom Dia!", "Boa Tarde!" ou "Boa Noite!" ou "Valor Inválido!", conforme o caso.
print('M - Matutino | V - Vespertino | N - Noturno')
turno = input('Digite o turno em que você estuda: ')
if turno.upper() == 'M':
print('Bom Dia!')
elif turno.upper() == 'V':
print('Boa Tarde!')
elif turno.upper() == 'N':
print('Boa Noite!')
else:
print('Valor Inválido!')
|
[
"[email protected]"
] | |
92f8adefb2a72e066420f7f170628fd3531a481a
|
8ae2dc044d056cb9a4b0cd25fbaed977288ba926
|
/Python_OOP/car.py
|
016554f5f0206353f5ff091f7fdd5e1875e00d1a
|
[] |
no_license
|
februarypython/Alex_Green
|
17a3c79d4e1eb751e6b5d76b2ab036506ba43a12
|
ad72d761c33708d80200ee896e1d145fd74009c1
|
refs/heads/master
| 2021-04-30T05:06:31.439798 | 2018-04-03T20:54:02 | 2018-04-03T20:54:02 | 121,407,382 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,274 |
py
|
# Create a class called Car. In the__init__(), allow the user to specify the following attributes: price, speed, fuel, mileage.
# If the price is greater than 10,000, set the tax to be 15%. Otherwise, set the tax to be 12%.
# Create six different instances of the class Car. In the class have a method called display_all() that returns all the information
# about the car as a string. In your __init__(), call this display_all() method to display information about the car once the
# attributes have been defined.
class Car(object):
def __init__(self, price, speed, fuel, mileage):
self.price = price
self.speed = speed
self.fuel = fuel
self.mileage = mileage
self.displayall()
def displayall(self):
print "Price:", self.price
print "Speed:", self.speed, "mph"
print "Fuel:", self.fuel
print "Mileage:", self.mileage, "mpg"
if self.price > 10000:
print "Tax: 0.15"
else:
print "Tax: 0.12"
print "---------------"
car1 = Car(11000, 100, "full", 25)
car2 = Car(8000, 75, "empty", 15)
car3 = Car(14000, 85, "mostly full", 7000)
car4 = Car(200, 25, "none", 8)
car5 = Car(55000, 200, "full", 15)
car6 = Car(6500, 112, "mostly empty", 28)
|
[
"[email protected]"
] | |
e7f6ee71d68187d1c702e84d43bfe2bc4768f18f
|
67e817ca139ca039bd9eee5b1b789e5510119e83
|
/Linked_List/Swap_Nodes_in_Pairs.py
|
c36d59cada641e25326ec7b2bb48a3989fbbf1e3
|
[] |
no_license
|
dstch/my_leetcode
|
0dc41e7a2526c2d85b6b9b6602ac53f7a6ba9273
|
48a8c77e81cd49a75278551048028c492ec62994
|
refs/heads/master
| 2021-07-25T21:30:41.705258 | 2021-06-06T08:58:29 | 2021-06-06T08:58:29 | 164,360,878 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,189 |
py
|
#!/usr/bin/env python
# encoding: utf-8
"""
@author: dstch
@license: (C) Copyright 2013-2019, Regulus Tech.
@contact: [email protected]
@file: Swap_Nodes_in_Pairs.py
@time: 2019/1/15 22:10
@desc: Given a linked list, swap every two adjacent nodes and return its head.
Example:
Given 1->2->3->4, you should return the list as 2->1->4->3.
"""
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def swapPairs(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
return_head = ListNode(0)
return_head.next = head
left = head
head = return_head
while left is not None and left.next is not None:
right = left.next
head.next = right
left.next = right.next
right.next = left
head = left
left = left.next
return return_head.next
if __name__ == '__main__':
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
head.next.next.next = ListNode(4)
s = Solution()
s.swapPairs(head)
# output: [2,1,3]
|
[
"[email protected]"
] | |
6f432459933bf081f23978750e8849105323f79a
|
b7b2f80ab5e1ee0ea028576e3014b62b8d3a8d7e
|
/neural/neural-003/neunet.py
|
25d12bbdf37b38a80a32b4d929cd5f48eb1e1e90
|
[] |
no_license
|
pglen/pgpygtk
|
4d1405478a714f003984cf3e3db04ff1f767470b
|
33f58010e304f1a312f2356de453ecedb7aa21ef
|
refs/heads/master
| 2021-01-22T01:18:52.238415 | 2019-01-01T01:37:24 | 2019-01-01T01:37:24 | 102,215,955 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 10,396 |
py
|
#!/usr/bin/env python
# ------------------------------------------------------------------------
# Neural network test
import os, sys, getopt, signal, select, socket, time, struct
import random, stat, math
from pgutil import *
from mainwin import *
# ------------------------------------------------------------------------
# Globals
gl_level = 0
gl_num = 0
gl_serial = 0
# ------------------------------------------------------------------------
# Old values for the undo
gl_old_bias = 0
gl_old_weight = 0
gl_old_post = 0
gl_last_neuron = None
# ------------------------------------------------------------------------
verbose = 0
# Deliver a random number in range of -1 to +1
def neurand():
ret = random.random() * 2 - 1;
#print "%+0.3f " % ret,
return ret
# Deliver a random member of an array
def randmemb(var):
rnd = random.randint(0, len(var)-1)
#print "randmemb", rnd, "of", len(var)-1
return var[rnd];
# ------------------------------------------------------------------------
# Transfer function for neunet.
# Calculate logaritmic taper, preserve sign
def tfunc(val):
ret = 0.
try:
cc = float(val)
ll = math.log(1 + 1 * abs(cc))
ret = 1 * ll
except ValueError:
print sys.exc_info()
pass
except:
pass
if val < 0:
ret = -ret;
return ret
# ------------------------------------------------------------------------
# Basic building block of the neuron's input:
class tenticle():
def __init__(self, curr):
self.input = neurand()
self.weight = neurand()
self.bias = neurand()
self.post = neurand()
self.curr = curr
# Calculate output
def fire(self, parent):
#res = (self.input + self.bias)
#res = (self.input + self.bias + self.post)
#res = (self.input + self.bias - self.post) * (1 + self.weight / 4 )
#res = (self.input + self.bias) * (self.input + self.post) #* self.weight
res = (self.input) * (self.weight + self.bias)
#res = (self.input) * (1. + self.weight) + self.bias
#print parent.level, parent.num, self.curr, \
# "input", self.input, "weight", self.weight, "bias", self.bias, "res", res
#return tfunc(res)
#return max(min(res, 2), -2)
return res
def getstr(self):
return " [inp: %0.3f" % self.input, "weigh: %0.3f" % self.weight, \
"bias: %0.3f ]" % self.bias, \
"post: %0.3f ]" % self.post,
def randtip(self):
global gl_old_bias, gl_old_bias, gl_last_neuron, gl_old_post
gl_last_neuron = self
gl_old_weight = self.weight
gl_old_bias = self.bias
gl_old_post = self.post
rr = random.randint(0, 2)
if rr == 0:
self.weight += neurand()
elif rr == 1:
self.bias += neurand()
elif rr == 2:
self.post += neurand()
else:
print "bad random index"
# ------------------------------------------------------------------------
# The basic building block
class neuron():
def __init__(self, inputs):
global gl_level, gl_num, gl_serial
#print "neuron init", gl_level, gl_num, gl_serial
self.output = 0.0
# These are helpers
self.num = gl_num; self.serial = gl_serial; self.level = gl_level
gl_serial += 1; gl_num += 1
# Tenticles are where the magic happens (dentrites)
self.tentarr = []
for aa in range(inputs):
self.tentarr.append(tenticle(aa))
# Fire one neuron by calling every tenticle's fire and avarage it
def fire(self):
global verbose
sum = .0; xlen = len(self.tentarr)
for aa in range(xlen):
diff = self.tentarr[aa].fire(self)
#print " firing neron tent ", aa, diff
sum += diff
#sum = math.sqrt(abs(sum))
#if sum < 0: self.output = -self.output
#self.output = tfunc(self.output)
sum /= len(self.tentarr)
#self.output = sum
self.output = tfunc(sum)
if verbose:
print " ", self.level, self.num ,
for dd in self.tentarr:
#print " [%0.3f" % dd.input, "%0.3f" % dd.weight, "%0.3f] " % dd.bias,
print "[%0.3f]" % dd.input,
print "Out: %0.3f" % self.output
def randtip(self):
randmemb(self.tentarr).randtip()
if verbose:
print "randtip", self.level, self.num
# ------------------------------------------------------------------------
# One level:
class neunetlevel():
def __init__(self, members, inputs):
global gl_level, gl_num
self.membarr = []
for aa in range(members):
self.membarr.append(neuron(inputs));
self.level = gl_level
gl_level += 1; gl_num = 0
def fire(self):
#print "firing level", self.level
for aa in range(len(self.membarr)):
#print " firing member ", aa
self.membarr[aa].fire()
# Tip a random neuron
def randtip(self):
randmemb(self.membarr).randtip()
# ------------------------------------------------------------------------
# The whole net:
#
# /--\ /--\
# | |-----------| |-----
# |__|----\ /----|__|
# x
# /--\ / \ /--\
# | |---/ \---| |-----
# |__|-----------|__|
#
class neunet():
def __init__(self, levels):
self.levels = levels; #self.members = members
self.levarr = []
exp = 1
for aa in range(levels):
self.levarr.append(neunetlevel(exp, 2))
exp *= 2
# Diagnostic dump
def dump(self):
print self
for bb in self.levarr:
print " ", bb, bb.level
for cc in bb.membarr:
print " ", cc, cc.level, cc.num, cc.serial
print " Inputs: ",
for dd in cc.tentarr:
print " [%0.3f] " % dd.input,
#print " [%0.3f" % dd.input, "%0.3f" % dd.weight, "%0.3f] " % dd.bias,
pass
print
print " ", dd.getstr()
print "Out: ", "%0.3f" % cc.output
# Reverse the last poke
def undo(self):
global gl_old_bias, gl_old_bias, gl_last_neuron, gl_old_post
if gl_last_neuron != None:
gl_last_neuron.bias = gl_old_bias
gl_last_neuron.weight = gl_old_weight
gl_last_neuron.post = gl_old_post
gl_last_neuron = None
else:
print "double undo"
# Recalculate whole net
def fire(self):
xlen = len(self.levarr)
for bb in range(xlen-1, -1, -1):
#print "neu", bb,
self.levarr[bb].fire()
if bb > 0:
self.transfer(self.levarr[bb], self.levarr[bb - 1])
#print
# Propagate down the net
def transfer(self, src, targ):
#print "transfer", src, targ
xlen = len(src.membarr)
neu = 0; tent = 0
for aa in range(xlen):
#print "nn", neu, tent,
targ.membarr[neu].tentarr[tent].input = src.membarr[aa].output
tent += 1
if tent >= len(targ.membarr[neu].tentarr):
tent = 0; neu += 1
def showin(self):
#print "NeuNet output:",
arr = self.levarr[len(self.levarr) - 1]
for aa in arr.membarr:
for bb in aa.tentarr:
print "%+0.3f" % bb.input,
print
def showout(self):
#print "NeuNet input:",
arr = self.levarr[0]
for aa in arr.membarr:
print "%+0.3f" % aa.output,
print
def sum(self):
xsum = 0.
arr = self.levarr[len(self.levarr) - 1]
for aa in arr.membarr:
xsum += aa.output
return xsum
def randtip(self):
randmemb(self.levarr).randtip()
# --------------------------------------------------------------------
# Set input value on the basis of the data coming in
def setinput(self, val):
#if self.members < 8:
# raise(ValueError("Not enough inputs for supplied data"))
myarr = self.levarr[len(self.levarr)-1];
xlen = len(myarr.membarr); xshift = 1
#print "xlen", xlen
neu = 0; tent = 0
for aa in range(8):
#print "Input", aa, xshift, val & xshift, "neu", neu, "tent", tent
try:
if val & xshift != 0:
#print "bit", aa, 1,
myarr.membarr[neu].tentarr[tent].input = 1.
else:
#print "bit", aa, 0,
myarr.membarr[neu].tentarr[tent].input = -0.
except:
print "overflow on input", sys.exc_info()
pass
xshift <<= 1; tent += 1
if tent >= len(myarr.membarr[neu].tentarr):
neu += 1; tent = 0
#print
# Compare outputs with expected data
def cmp(self, val):
endarr = self.levarr[0]
diff = abs(val - endarr.membarr[0].output)
return diff
# Train this particular input to expected output
def trainone(self, val, passes = 1000):
#print "origin:", ; neu.showout()
cnt = 0; cnt2 = 0
diff = 0.; old_sum = -100.
for aa in range(passes):
self.randtip()
self.fire()
diff = self.cmp(val)
if abs(diff) >= abs(old_sum):
#print sum
self.undo()
#self.fire()
#print "undone:",
else:
print " ", "%+0.3f " % diff,
cnt += 1
#neu.showout()
old_sum = diff
#if diff < 0.01:
# break
cnt2 += 1
print
return cnt
|
[
"[email protected]"
] | |
60edd456c9b939797562234d52e71e26515afd9d
|
9c52756a74134a10f8a1b35af0e2e70350d0e36a
|
/ivy_vision_tests/data.py
|
9a4634660b9d1363512cb88b9790e542dfb461ac
|
[
"Apache-2.0"
] |
permissive
|
MZSHAN/vision
|
e13023cf7d3f4a32740f60418b1071770ce26a76
|
ac9f9fcec6d3b4e12659f5216ab5ce4aacc7d9ea
|
refs/heads/master
| 2023-07-13T09:59:29.384738 | 2021-08-20T13:25:26 | 2021-08-20T13:25:26 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 12,212 |
py
|
# global
import os
import cv2
import ivy_mech
import ivy.numpy
import numpy as np
import xml.etree.ElementTree as ETree
MIN_DENOMINATOR = 1e-12
def str_list_to_list(str_list):
return [float(item) for item in str_list[1:-1].split(',')]
class TestData:
def __init__(self):
self.batch_size = 1
self.image_dims = [480, 640]
self.num_cameras = 2
# load camera data
calib_mats_list = list()
vrep_mats_list = list()
depth_maps_list = list()
state_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data', 'camera_data.xml')
with open(state_filepath) as xml_file:
data_string = xml_file.read()
root = ETree.fromstring(data_string)
for i in range(self.num_cameras):
camera_data = root.find('camera' + str(i + 1))
calib_mat_element = camera_data.find('row_major_calib_mat')
calib_mat = np.array(str_list_to_list(calib_mat_element.text)).reshape(3, 3)
calib_mats_list.append(calib_mat)
vrep_mat_element = camera_data.find('row_major_inv_ext_mat')
vrep_mat = np.array(str_list_to_list(vrep_mat_element.text)).reshape(3, 4)
vrep_mats_list.append(vrep_mat)
depth_image = cv2.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data',
'depth_image_' + str(i + 1) + '.png'), -1)
depth_buffer_bytes = depth_image.reshape(-1).tobytes()
depth_buffer_flat = np.frombuffer(depth_buffer_bytes, np.float32)
depth_map = depth_buffer_flat.reshape((self.image_dims[0], self.image_dims[1]))
depth_maps_list.append(depth_map)
# intrinsic mats
self.calib_mats = np.tile(np.concatenate([np.expand_dims(item, 0) for item in calib_mats_list], 0),
(self.batch_size, 1, 1, 1))
self.inv_calib_mats = np.linalg.inv(self.calib_mats)
# intrinsic data
self.focal_lengths = np.concatenate((self.calib_mats[:, :, 0, 0:1], self.calib_mats[:, :, 1, 1:2]), -1)
self.persp_angles = 2 * np.arctan(np.flip(np.array(self.image_dims), -1) / (2 * -self.focal_lengths))
self.pp_offsets = np.concatenate((self.calib_mats[:, :, 0, 2:3], self.calib_mats[:, :, 1, 2:3]), -1)
# camera centres
self.C_hats = np.tile(np.concatenate([np.expand_dims(item[:, -1:], 0) for item in vrep_mats_list], 0),
(self.batch_size, 1, 1, 1))
# camera rotation matrix wrt world frame
self.inv_Rs = np.tile(np.concatenate([np.expand_dims(item[:, :-1], 0) for item in vrep_mats_list], 0),
(self.batch_size, 1, 1, 1))
self.Rs = np.linalg.inv(self.inv_Rs)
# extrinsic mats
self.R_C_hats = np.matmul(self.Rs, self.C_hats)
self.ext_mats = np.concatenate((self.Rs, -self.R_C_hats), -1)
self.ext_mats_homo = np.concatenate((self.ext_mats, np.tile(np.array([0, 0, 0, 1]),
(self.batch_size, self.num_cameras, 1, 1))), 2)
# inv extrinsic mats
self.inv_ext_mats_homo = np.linalg.inv(self.ext_mats_homo)
self.inv_ext_mats = self.inv_ext_mats_homo[:, :, 0:3]
self.pinv_ext_mats = np.linalg.pinv(self.ext_mats)
# full mats
self.full_mats = np.matmul(self.calib_mats, self.ext_mats)
self.full_mats_homo = np.concatenate((self.full_mats, np.tile(np.array([0, 0, 0, 1]),
(self.batch_size, self.num_cameras, 1, 1))), 2)
self.inv_full_mats_homo = np.linalg.inv(self.full_mats_homo)
self.inv_full_mats = self.inv_full_mats_homo[:, :, 0:3]
self.pinv_full_mats = np.linalg.pinv(self.full_mats)
# cam2cam ext mats
self.cam2cam_ext_mats_homo = np.matmul(np.flip(self.ext_mats_homo, 1), self.inv_ext_mats_homo)
self.cam2cam_ext_mats = self.cam2cam_ext_mats_homo[:, :, 0:3]
# cam2cam full mats
self.cam2cam_full_mats_homo = np.matmul(np.flip(self.full_mats_homo, 1), self.inv_full_mats_homo)
self.cam2cam_full_mats = self.cam2cam_full_mats_homo[:, :, 0:3]
# uniform pixel coords
pixel_x_coords = np.reshape(np.tile(np.arange(self.image_dims[1]), [self.image_dims[0]]),
(self.image_dims[0], self.image_dims[1], 1)).astype(np.float32)
pixel_y_coords_ = np.reshape(np.tile(np.arange(self.image_dims[0]), [self.image_dims[1]]),
(self.image_dims[1], self.image_dims[0], 1)).astype(np.float32)
pixel_y_coords = np.transpose(pixel_y_coords_, (1, 0, 2))
ones = np.ones_like(pixel_x_coords)
uniform_pixel_coords = np.tile(np.expand_dims(np.concatenate((pixel_x_coords, pixel_y_coords, ones), -1), 0),
(self.batch_size, 1, 1, 1))
self.uniform_pixel_coords = np.tile(np.expand_dims(uniform_pixel_coords, 1), (1, 2, 1, 1, 1))
# depth maps
self.depth_maps = np.tile(np.concatenate([item.reshape((1, 1, self.image_dims[0], self.image_dims[1], 1))
for item in depth_maps_list], 1), (self.batch_size, 1, 1, 1, 1))
# pixel coords
self.pixel_coords_to_scatter = self.uniform_pixel_coords * self.depth_maps
self.pixel_coords_normed = self.pixel_coords_to_scatter / self.pixel_coords_to_scatter[:, :, :, :, -1:]
# cam coords
coords_reshaped = np.reshape(np.transpose(self.pixel_coords_to_scatter, (0, 1, 4, 2, 3)),
(self.batch_size, self.num_cameras, 3, -1))
transformed_coords_vector = np.matmul(self.inv_calib_mats, coords_reshaped)
transformed_coords_vector_transposed = np.transpose(transformed_coords_vector, (0, 1, 3, 2))
self.cam_coords_not_homo = np.reshape(transformed_coords_vector_transposed,
(self.batch_size, self.num_cameras, self.image_dims[0],
self.image_dims[1], 3))
self.cam_coords = np.concatenate((self.cam_coords_not_homo, np.ones(
(self.batch_size, self.num_cameras, self.image_dims[0], self.image_dims[1], 1))), -1)
# sphere coords
with ivy.numpy.use:
cam_coords_not_homo = ivy.concatenate((self.cam_coords_not_homo[..., 2:3],
self.cam_coords_not_homo[..., 0:1],
self.cam_coords_not_homo[..., 1:2]), -1)
self.sphere_coords = ivy_mech.cartesian_to_polar_coords(cam_coords_not_homo)
# radial depth
self.radial_depth_maps = self.sphere_coords[..., -1:]
# angular_pixel_coords
self.sphere_img_dims = [90, 180]
self.pixels_per_degree = 1
sphere_angle_coords = self.sphere_coords[..., 0:2]
sphere_radius_vals = self.sphere_coords[..., -1:]
sphere_angle_coords_in_degs = sphere_angle_coords * 180 / np.pi
sphere_x_coords = ((180 - sphere_angle_coords_in_degs[..., 0:1]) % 360) * self.pixels_per_degree
sphere_y_coords = (sphere_angle_coords_in_degs[..., 1:2] % 180) * self.pixels_per_degree
self.angular_pixel_coords = np.concatenate((sphere_x_coords, sphere_y_coords, sphere_radius_vals), -1)
# world coords
coords_reshaped = np.reshape(np.transpose(self.cam_coords, (0, 1, 4, 2, 3)),
(self.batch_size, self.num_cameras, 4, -1))
transformed_coords_vector = np.matmul(self.inv_ext_mats, coords_reshaped)
transformed_coords_vector_transposed = np.transpose(transformed_coords_vector, (0, 1, 3, 2))
self.world_coords_not_homo = np.reshape(transformed_coords_vector_transposed, (
self.batch_size, self.num_cameras, self.image_dims[0], self.image_dims[1], 3))
self.world_coords = np.concatenate((self.world_coords_not_homo, np.ones(
(self.batch_size, self.num_cameras, self.image_dims[0], self.image_dims[1], 1))), -1)
# world rays
vectors = self.world_coords[:, :, :, :, 0:3] - np.reshape(self.C_hats,
(self.batch_size, self.num_cameras, 1, 1, 3))
self.world_rays = vectors / (np.sqrt(np.sum(np.square(vectors), -1, keepdims=True)) + MIN_DENOMINATOR)
# projected world rays
vectors = np.flip(self.world_coords[:, :, :, :, 0:3], 1) - np.reshape(self.C_hats, (
self.batch_size, self.num_cameras, 1, 1, 3))
self.proj_world_rays = vectors / (np.sqrt(np.sum(np.square(vectors), -1, keepdims=True)) + MIN_DENOMINATOR)
# projected cam coords
coords_reshaped = np.reshape(np.transpose(np.flip(self.world_coords, 1), (0, 1, 4, 2, 3)),
(self.batch_size, self.num_cameras, 4, -1))
transformed_coords_vector = np.matmul(self.ext_mats, coords_reshaped)
transformed_coords_vector_transposed = np.transpose(transformed_coords_vector, (0, 1, 3, 2))
proj_cam_coords_not_homo = np.reshape(transformed_coords_vector_transposed, (
self.batch_size, self.num_cameras, self.image_dims[0], self.image_dims[1], 3))
self.proj_cam_coords = np.concatenate((proj_cam_coords_not_homo, np.ones(
(self.batch_size, self.num_cameras, self.image_dims[0], self.image_dims[1], 1))), -1)
# projected sphere coords
with ivy.numpy.use:
proj_cam_coords = ivy.concatenate((self.proj_cam_coords[..., 2:3],
self.proj_cam_coords[..., 0:1],
self.proj_cam_coords[..., 1:2]), -1)
self.proj_sphere_coords = \
np.reshape(ivy_mech.cartesian_to_polar_coords(
np.reshape(proj_cam_coords, (-1, 3))),
(self.batch_size, self.num_cameras, self.image_dims[0], self.image_dims[1], 3))
# projected pixel coords
self.proj_cam_coords_not_homo = self.proj_cam_coords[:, :, :, :, 0:3]
coords_reshaped = np.reshape(np.transpose(self.proj_cam_coords_not_homo, (0, 1, 4, 2, 3)),
(self.batch_size, self.num_cameras, 3, -1))
transformed_coords_vector = np.matmul(self.calib_mats, coords_reshaped)
transformed_coords_vector_transposed = np.transpose(transformed_coords_vector, (0, 1, 3, 2))
self.proj_pixel_coords = np.reshape(transformed_coords_vector_transposed, (
self.batch_size, self.num_cameras, self.image_dims[0], self.image_dims[1], 3))
self.proj_pixel_coords_normed = self.proj_pixel_coords / self.proj_pixel_coords[:, :, :, :, -1:]
# projected angular pixel coords
sphere_radius_vals = self.proj_sphere_coords[..., -1:]
sphere_angle_coords = self.proj_sphere_coords[..., 0:2]
sphere_angle_coords_in_degs = sphere_angle_coords * 180 / np.pi
sphere_x_coords = ((180 - sphere_angle_coords_in_degs[..., 0:1]) % 360) * self.pixels_per_degree
sphere_y_coords = (sphere_angle_coords_in_degs[..., 1:2] % 360) * self.pixels_per_degree
self.proj_angular_pixel_coords =\
np.concatenate((sphere_x_coords, sphere_y_coords, sphere_radius_vals), -1)
# pixel correspondences
self.pixel_correspondences = np.concatenate((self.pixel_coords_to_scatter[:, 0:1], self.proj_pixel_coords_normed[:, 0:1]),
1)
# optical flow
self.optical_flow = self.proj_pixel_coords_normed[:, 1, :, :, 0:2] - \
self.pixel_coords_normed[:, 0, :, :, 0:2]
self.reverse_optical_flow = self.proj_pixel_coords_normed[:, 0, :, :, 0:2] - \
self.pixel_coords_normed[:, 1, :, :, 0:2]
# velocity from flow
self.delta_t = np.ones((1, 1)) * 0.05
|
[
"[email protected]"
] | |
166322ebabb0c976ace6f2e0fbdbfd220d2d019c
|
7d2c27662499f2c594c6f706c0d774955cd97ec9
|
/tensorpack/dataflow/imgaug/paste.py
|
73a44523297d5cd85063a71158e91e7d97d6b1d5
|
[
"Apache-2.0"
] |
permissive
|
RyannnXU/tensorpack
|
8ce0d5166719879a6a947ec253170751f7f45c30
|
b335a7baa00f578a5229315a3c8841efba602dcd
|
refs/heads/master
| 2021-06-09T18:23:38.410559 | 2017-01-03T16:44:20 | 2017-01-03T16:44:20 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,035 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: paste.py
# Author: Yuxin Wu <[email protected]>
from .base import ImageAugmentor
from abc import abstractmethod
import numpy as np
__all__ = ['CenterPaste', 'BackgroundFiller', 'ConstantBackgroundFiller',
'RandomPaste']
class BackgroundFiller(object):
""" Base class for all BackgroundFiller"""
def fill(self, background_shape, img):
"""
Return a proper background image of background_shape, given img
:param background_shape: a shape of [h, w]
:param img: an image
:returns: a background image
"""
return self._fill(background_shape, img)
@abstractmethod
def _fill(self, background_shape, img):
pass
class ConstantBackgroundFiller(BackgroundFiller):
""" Fill the background by a constant """
def __init__(self, value):
"""
:param value: the value to fill the background.
"""
self.value = value
def _fill(self, background_shape, img):
assert img.ndim in [3, 2]
if img.ndim == 3:
return_shape = background_shape + (3,)
else:
return_shape = background_shape
return np.zeros(return_shape) + self.value
class CenterPaste(ImageAugmentor):
"""
Paste the image onto the center of a background canvas.
"""
def __init__(self, background_shape, background_filler=None):
"""
:param background_shape: shape of the background canvas.
:param background_filler: a `BackgroundFiller` instance. Default to zero-filler.
"""
if background_filler is None:
background_filler = ConstantBackgroundFiller(0)
self._init(locals())
def _augment(self, img, _):
img_shape = img.shape[:2]
assert self.background_shape[0] > img_shape[0] and self.background_shape[1] > img_shape[1]
background = self.background_filler.fill(
self.background_shape, img)
y0 = int((self.background_shape[0] - img_shape[0]) * 0.5)
x0 = int((self.background_shape[1] - img_shape[1]) * 0.5)
background[y0:y0 + img_shape[0], x0:x0 + img_shape[1]] = img
return background
def _fprop_coord(self, coord, param):
raise NotImplementedError()
class RandomPaste(CenterPaste):
"""
Randomly paste the image onto a background convas
"""
def _get_augment_params(self, img):
img_shape = img.shape[:2]
assert self.background_shape[0] > img_shape[0] and self.background_shape[1] > img_shape[1]
y0 = self._rand_range(self.background_shape[0] - img_shape[0])
x0 = self._rand_range(self.background_shape[1] - img_shape[1])
return int(x0), int(y0)
def _augment(self, img, loc):
x0, y0 = loc
img_shape = img.shape[:2]
background = self.background_filler.fill(
self.background_shape, img)
background[y0:y0 + img_shape[0], x0:x0 + img_shape[1]] = img
return background
|
[
"[email protected]"
] | |
15e8e6e2ce097df49256bb67b42d19a87f2491b0
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/sieve-big-1485.py
|
3354bf1aaf14a6e069590cfa5fc3f5d79836bf4d
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 31,753 |
py
|
# A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.$ID = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
|
[
"[email protected]"
] | |
c53c7a9ab52018aad680ab9e0754c63891fb0dea
|
5a95daadcdf2eb4a9957be43d8231cd12615bda6
|
/pipeline/metric_fuc.py
|
a956061df328cf72cd447b7c422fcca60b80b07a
|
[] |
no_license
|
fendaq/cail2018_repo
|
0fe7126ca052f57782aae6ce3863e6bad6833093
|
750c3846a678402220c8badd0c377deda277db6a
|
refs/heads/master
| 2020-03-21T19:32:58.539058 | 2018-06-21T12:01:55 | 2018-06-21T12:01:55 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,623 |
py
|
import mmap
import numpy as np
from keras.callbacks import Callback
from sklearn.metrics import f1_score
from tqdm import tqdm
from config import *
import os
def predict2half(predictions):
y_pred = np.zeros(predictions.shape)
y_pred[predictions > 0.5] = 1
return y_pred
def predict2tag(predictions):
y_pred = np.array(predictions, copy=True)
for index, x in enumerate(y_pred):
x[x > 0.5] = 1
if x.max() < 1:
x[x == x.max()] = 1
y_pred[y_pred < 1] = 0
return y_pred
class F1ScoreCallback(Callback):
def __init__(self, predict_batch_size=1024, include_on_batch=False,data_test=None):
super(F1ScoreCallback, self).__init__()
self.predict_batch_size = predict_batch_size
self.include_on_batch = include_on_batch
self.data_test=data_test;
def on_batch_begin(self, batch, logs={}):
pass
def on_train_begin(self, logs={}):
if not ('avg_f1_score_val' in self.params['metrics']):
self.params['metrics'].append('avg_f1_score_val')
def on_batch_end(self, batch, logs={}):
if (self.include_on_batch):
logs['avg_f1_score_val'] = float('-inf')
def on_epoch_end(self, epoch, logs={}):
logs['avg_f1_score_val'] = float('-inf')
if (self.validation_data):
predict = self.model.predict(self.validation_data[0],
batch_size=self.predict_batch_size)
y_predict = predict2half(predict)
f1 = f1_score(self.validation_data[1], y_predict, average='macro')
print("macro f1_score %.4f " % f1)
f2 = f1_score(self.validation_data[1], y_predict, average='micro')
print("micro f1_score %.4f " % f2)
avgf1 = (f1 + f2) / 2
# print("avg_f1_score %.4f " % (avgf1))
logs['avg_f1_score_val'] = avgf1
if(self.data_test):
predict = self.model.predict(self.data_test[0],
batch_size=self.predict_batch_size)
y_predict = predict2tag(predict)
f1 = f1_score(self.data_test[1], y_predict, average='macro')
print("test macro f1_score %.4f " % f1)
f2 = f1_score(self.data_test[1], y_predict, average='micro')
print("test micro f1_score %.4f " % f2)
avgf1 = (f1 + f2) / 2
print("test avg_f1_score %.4f " % (avgf1))
logs['avgf1_test'] = avgf1
def get_num_lines(file_path):
fp = open(file_path, "r+")
buf = mmap.mmap(fp.fileno(), 0)
lines = 0
while buf.readline():
lines += 1
return lines
def get_embedding_matrix(word_index, Emed_path, Embed_npy):
if (os.path.exists(Embed_npy)):
return np.load(Embed_npy)
print('Indexing word vectors')
embeddings_index = {}
file_line = get_num_lines(Emed_path)
print('lines ', file_line)
with open(Emed_path, encoding='utf-8') as f:
for line in tqdm(f, total=file_line):
values = line.split()
if (len(values) < embedding_dims):
print(values)
continue
word = ' '.join(values[:-embedding_dims])
coefs = np.asarray(values[-embedding_dims:], dtype='float32')
embeddings_index[word] = coefs
f.close()
print('Total %s word vectors.' % len(embeddings_index))
print('Preparing embedding matrix')
nb_words = MAX_FEATURES # min(MAX_FEATURES, len(word_index))
all_embs = np.stack(embeddings_index.values())
print(all_embs.shape)
emb_mean, emb_std = all_embs.mean(), all_embs.std()
embedding_matrix = np.random.normal(loc=emb_mean, scale=emb_std, size=(nb_words, embedding_dims))
# embedding_matrix = np.zeros((nb_words, embedding_dims))
count = 0
for word, i in tqdm(word_index.items()):
if i >= MAX_FEATURES:
continue
embedding_vector = embeddings_index.get(word)
if embedding_vector is not None:
# words not found in embedding index will be all-zeros.
embedding_matrix[i] = embedding_vector
count += 1
np.save(Embed_npy, embedding_matrix)
print('Null word embeddings: %d' % (nb_words - count))
print('not Null word embeddings: %d' % count)
print('embedding_matrix shape', embedding_matrix.shape)
# print('Null word embeddings: %d' % np.sum(np.sum(embedding_matrix, axis=1) == 0))
return embedding_matrix
def judger(label_true, y_predict):
result = 0
l1, l2, l3 = label_true
p1, p2, p3 = y_predict
p2[p2 > 0.5] = 1
p2[p2 < 0.5] = 0
p3[p3 > 0.5] = 1
p3[p3 < 0.5] = 0
# p1 = np.reshape(p1, (-1,))
# p2 = np.reshape(p2, (-1,))
# p3 = np.reshape(p3, (-1,))
for i in range(len(y_predict)):
yp = round(p1[i][0])
dp = p2[i][0]
lp = p3[i][0]
yt = l1[i][0]
dt = l2[i][0]
lt = l3[i][0]
sc = 0
if dt == 1:
if dp ==1:
sc = 1
elif lt == 1:
if lp==1:
sc = 1
else:
v1 =yt
v2 = yp
v = abs(np.log(v1 + 1) - np.log(v2 + 1))
if v <= 0.2:
sc = 1
elif v <= 0.4:
sc = 0.8
elif v <= 0.6:
sc = 0.6
elif v <= 0.8:
sc = 0.4
elif v <= 1.0:
sc = 0.2
else:
sc = 0
sc = sc * 1.0
result += sc
return result / len(y_predict)
class ImprisonCallback(Callback):
def __init__(self, predict_batch_size=1024, include_on_batch=False):
super(ImprisonCallback, self).__init__()
self.predict_batch_size = predict_batch_size
self.include_on_batch = include_on_batch
def on_batch_begin(self, batch, logs={}):
pass
def on_train_begin(self, logs={}):
if not ('avg_f1_score_val' in self.params['metrics']):
self.params['metrics'].append('avg_f1_score_val')
def on_batch_end(self, batch, logs={}):
if (self.include_on_batch):
logs['avg_f1_score_val'] = float('-inf')
def on_epoch_end(self, epoch, logs={}):
logs['avg_f1_score_val'] = float('-inf')
if (self.validation_data):
y_predict = self.model.predict(self.validation_data[0],
batch_size=self.predict_batch_size)
label = self.validation_data[1], self.validation_data[2], self.validation_data[3]
logs['avg_f1_score_val'] = judger(label, y_predict)
|
[
"[email protected]"
] | |
2c3be747e324140bce05946aeaa349adbce9a3a5
|
539e4522a3a47b0234a3972b633ca8d55f0c71ec
|
/data2csv
|
40a8ad623e8b66de0133d90014f58c748f1d6b60
|
[
"MIT"
] |
permissive
|
charnley/data2csv
|
a060c2fa4d5a239e67dd95050bc73b13f6853563
|
ac8b5516b0932f444203d17a270217d827633288
|
refs/heads/master
| 2020-07-03T21:50:51.715289 | 2018-06-15T20:43:45 | 2018-06-15T20:43:45 | 74,230,997 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,688 |
#!/usr/bin/env python
from __future__ import print_function
import ConfigParser
import sys
import os
import subprocess
import re
import numpy as np
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def get_floats(shell_cmd):
""" Return all the floats for each line """
FNULL = open(os.devnull, 'w')
string = subprocess.Popen(shell_cmd, shell=True, stdout=subprocess.PIPE, stderr=FNULL).communicate()[0]
string = string.split('\n')
regex = r'[\-]*\d+\.\d+[eE\-]*\d*'
if string == ['']:
eprint('nan:', shell_cmd)
return 'nan'
floats = []
for line in string:
numbers = re.findall(regex, line)
if len(numbers) > 0:
floats.append([])
for number in numbers:
try:
number = float(number)
except ValueError:
number = float("nan")
floats[-1].append(number)
# floats.append([float(number) for number in numbers])
return floats
def config_section_map(section, Config):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.get(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
eprint("exception on %s!" % option)
dict1[option] = None
return dict1
if __name__ == '__main__':
args = sys.argv[1:]
usage = """
get_data <ini structure> <list of molecules>
"""
if len(args) < 2:
eprint(usage)
quit()
ini_file = args[0]
structures_file = args[1]
config = ConfigParser.ConfigParser()
config.read(ini_file)
sections = config.sections()
structures = []
f = open(structures_file)
for line in f:
structures.append(line.replace("\n", ""))
f.close()
data = {}
for structure in structures:
data[structure] = []
# TODO structures
for i, section in enumerate(sections):
secdic = config_section_map(section, config)
try:
grep_cmd = secdic['grep']
except KeyError:
grep_cmd = ""
try:
cmd_cmd = secdic['cmd']
except KeyError:
cmd_cmd = ""
folder = secdic['folder']
if folder[-1] != "/":
folder += "/"
extension = secdic['extension']
unit = secdic['unit']
indexcol = int(secdic['indexcol'])
indexrow = int(secdic['indexrow'])
if grep_cmd != "":
cmd = grep_cmd + " " + "{:}" + "." + extension
if cmd_cmd != "":
cmd = cmd_cmd
cmd = cmd.replace("{:}", "{:}."+extension)
for structure in structures:
floats = get_floats(cmd.format(folder+structure))
if isinstance(floats, basestring):
value = "nan"
else:
try:
value = floats[indexrow][indexcol]
except IndexError:
eprint("nan:", cmd)
eprint(floats)
eprint()
value = "nan"
# exit('Wrong row,col index in ini file. Does not match output.')
data[structure].append(value)
# change the name of section
if unit != "kcal/mol":
sections[i] += " [" + unit + "]"
strprint = "{0}"
for x in range(len(sections)):
strprint += ", {"+str(x+1)+"}"
print(strprint.format("molecule", *sections))
for structure in structures:
print(strprint.format(structure, *data[structure]))
|
[
"[email protected]"
] | ||
f41309b669d164c057a8575be6894c0ae30aa544
|
eb4f61315e8f0b139d0af3a95c59a2907845ebfd
|
/7-8(am)/corePython/control stmtss/cakediscount.py
|
177bcc1cf85eb01053c25aba35836b2d73d33abf
|
[] |
no_license
|
vamsikrishna6668/python-core
|
c1d368792fa6f89bf51ae690d20b45cb5ae0fb98
|
b66ad0f6ad0deffbc350d5a7996f23220940e187
|
refs/heads/master
| 2020-04-02T10:00:39.641928 | 2018-10-23T12:19:14 | 2018-10-23T12:19:14 | 154,320,508 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,136 |
py
|
veg_cake=int(input('No of Veg cakes:'))
nonveg_cake=int(input('No of nonVeg cakes:'))
vegcake_cost=300
nonvegcake_cost=350
vegcake_discount=(veg_cake*vegcake_cost)*0.05
nonvegcake_discount=(nonveg_cake*nonvegcake_cost)*0.04
total_vegcake_cost=(veg_cake*vegcake_cost)-vegcake_discount
total_nonvegcake_cost=(nonveg_cake*nonvegcake_cost)-nonvegcake_discount
if total_vegcake_cost>=600 and total_nonvegcake_cost>=700:
print('The no of items of veg cake:',veg_cake)
print('The discount got for a veg cake:',vegcake_discount)
print('The total amount for the veg cake cost:',total_vegcake_cost)
print('The no of items of non-veg cake cost',nonveg_cake)
print('The discount for the non-veg cake cost',nonvegcake_discount)
print('The total amount for the non-veg cake cost:',total_nonvegcake_cost)
else:
print('The no of items of a veg cake:',veg_cake)
print('The total cost for a veg cake:',total_vegcake_cost)
print('The no of items of a non -veg cake cost:',nonveg_cake)
print('The total cost for a non veg cake:',total_nonvegcake_cost)
print('The Welcome for ordering in uber eats visit once again')
|
[
"[email protected]"
] | |
80a6ff4cb5ecc084d016f9301da878f259878fa9
|
68d38b305b81e0216fa9f6769fe47e34784c77f2
|
/alascrapy/spiders/digitalspy.py
|
0a6d6b49df371dfbc569048703b9ba89f5d402f1
|
[] |
no_license
|
ADJet1437/ScrapyProject
|
2a6ed472c7c331e31eaecff26f9b38b283ffe9c2
|
db52844411f6dac1e8bd113cc32a814bd2ea3632
|
refs/heads/master
| 2022-11-10T05:02:54.871344 | 2020-02-06T08:01:17 | 2020-02-06T08:01:17 | 237,448,562 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,075 |
py
|
# -*- coding: utf8 -*-
from datetime import datetime
from scrapy.http import Request
from alascrapy.spiders.base_spiders.ala_spider import AlaSpider
from alascrapy.lib.generic import get_full_url
import alascrapy.lib.dao.incremental_scraping as incremental_utils
class DigitalSpySpider(AlaSpider):
name = 'digitalspy'
allowed_domains = ['digitalspy.co.uk']
start_urls = ['http://www.digitalspy.co.uk/tech/review/']
def __init__(self, *args, **kwargs):
super(DigitalSpySpider, self).__init__(self, *args, **kwargs)
self.stored_last_date = incremental_utils.get_latest_pro_review_date(self.mysql_manager, self.spider_conf["source_id"])
def parse(self, response):
next_page_xpath = "//*[contains(@class, 'pagination')]//a[@title='Next']/@href"
review_urls = self.extract_list(
response.xpath("//*[@class='content_area']//a[@class='component']/@href"))
for review_url in review_urls:
review_url = get_full_url(response, review_url)
request = Request(review_url, callback=self.parse_review)
yield request
if self.continue_to_next_page(response):
next_page = self.extract(response.xpath(next_page_xpath))
if next_page:
next_page = get_full_url(response, next_page)
request = Request(next_page, callback=self.parse)
yield request
def continue_to_next_page(self, response):
if not self.stored_last_date:
return True
review_date_xpath = "//*[@class='content_area']//time/@datetime"
review_dates = self.extract_list(response.xpath(review_date_xpath))
if review_dates:
last_date_string = review_dates[-1]
last_review_date = datetime.strptime(last_date_string[0:-4], "%Y-%m-%d:%H:%M")
if self.stored_last_date > last_review_date:
return False
return True
def parse_review(self, response):
product_xpaths = { "ProductName": "(//*[@id='articleimage'])[1]//img/@alt",
"PicURL": "(//*[@property='og:image'])[1]/@content",
"OriginalCategoryName": "(//*[@class='category-chicklets']/li)[last()]//text()"
}
review_xpaths = { "ProductName": "(//*[@id='articleimage'])[1]//img/@alt",
"TestTitle": "//*[@property='og:title']/@content",
"TestSummary": "//*[@property='og:description']/@content",
"Author": "//a[@rel='author']/text()",
"TestDateText": "//time/@datetime"
}
product = self.init_item_by_xpaths(response, "product", product_xpaths)
review = self.init_item_by_xpaths(response, "review", review_xpaths)
review["DBaseCategoryName"] = "PRO"
review["TestDateText"] = datetime.strptime(review["TestDateText"][0:-4],
"%Y-%m-%d:%H:%M").strftime("%Y-%m-%d %H:%M:00")
yield product
yield review
|
[
"[email protected]"
] | |
bb1c11a5dec23753cf0d15af74e5a1e3f8fb3803
|
0b88201be895a25c8c321481615b4965f529d6da
|
/CDTB_Seg/model/sentence.py
|
c4f574f16d8c9e287b9e6e110b3fa970a1cc8c39
|
[
"BSD-2-Clause",
"MIT"
] |
permissive
|
NLP-Discourse-SoochowU/segmenter2020
|
1e8335da56b26f52ed48eb462047b9fe9b1e10df
|
fd71b353c59bcb82ec2cd0bebf943040756faa63
|
refs/heads/master
| 2023-01-13T23:14:37.078780 | 2020-11-24T05:07:26 | 2020-11-24T05:07:26 | 283,890,012 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 748 |
py
|
# -*- coding: utf-8 -*-
"""
@Author: Lyzhang
@Date:
@Description: 封装句子,句法信息等
"""
from stanfordcorenlp import StanfordCoreNLP
from model.edu import EDU
path_to_jar = 'stanford-corenlp-full-2018-02-27'
nlp = StanfordCoreNLP(path_to_jar)
class Sentence:
def __init__(self, sentence, edus_list):
self.edus = self.build_edus(edus_list)
self.sentence_txt = sentence
self.dependency = self.gen_dependency()
@staticmethod
def build_edus(edus_list):
edus_ = list()
for edu in edus_list:
edus_.append(EDU(edu, nlp))
return edus_
def gen_dependency(self):
dep = nlp.dependency_parse(self.sentence_txt)
return dep
|
[
"[email protected]"
] | |
40e1d47660e1a1180f87336736369de34c5b4c7a
|
5a93d5eadf55513020c0c5149b2bc8a52d8ea4c0
|
/core/ui_mixins/input_panel.py
|
c491e98d7997e5d3f0827ff5c301b670910427f5
|
[
"MIT"
] |
permissive
|
jmcollis/GitSavvy
|
b14dfe7485aa2d7c37c9bd3615a6b9be9fe274e8
|
de7d01539931b4344a296bd71ed87d9754389f6a
|
refs/heads/master
| 2022-10-06T23:38:36.821013 | 2022-09-26T14:21:33 | 2022-09-26T14:21:33 | 165,129,291 | 0 | 0 |
MIT
| 2019-01-10T20:51:57 | 2019-01-10T20:51:57 | null |
UTF-8
|
Python
| false | false | 398 |
py
|
import sublime
def show_single_line_input_panel(
caption, initial_text, on_done, on_change=None, on_cancel=None, select_text=True):
window = sublime.active_window()
v = window.show_input_panel(caption, initial_text, on_done, on_change, on_cancel)
if select_text:
v.run_command("select_all")
v.settings().set("git_savvy.single_line_input_panel", True)
return v
|
[
"[email protected]"
] | |
9d0ff7677ff7081a08604ac32ba0f7d3910f7769
|
9d42a3bf2447340bb01f3472077653fbb4152f89
|
/WEEK_1/XDG_CACHE_HOME/Microsoft/Python Language Server/stubs.v1/Y9_gspj6TFoJYmKVcpQxqeHzeDzbHpuurO4m3_k-EtQ=/convolve.cpython-36m-x86_64-linux-gnu.pyi
|
00fc1e4db134935294f855e6a7075ae8aa227862
|
[] |
no_license
|
krausce/Integrify
|
100ec19333916786c0f035b2819e0b3479483a01
|
43127726a8e82a48475dbdeabb91a85e3cdf760c
|
refs/heads/master
| 2020-07-10T00:04:23.818280 | 2019-06-25T14:16:49 | 2019-06-25T14:16:49 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,258 |
pyi
|
__doc__ = "This module 'convolve' is auto-generated with f2py (version:2).\nFunctions:\n omega = init_convolution_kernel(n,kernel_func,d=0,zero_nyquist=d%2,kernel_func_extra_args=())\n destroy_convolve_cache()\n y = convolve(x,omega,swap_real_imag=0,overwrite_x=0)\n y = convolve_z(x,omega_real,omega_imag,overwrite_x=0)\n."
__file__ = '/home/chris/anaconda3/lib/python3.6/site-packages/scipy/fftpack/convolve.cpython-36m-x86_64-linux-gnu.so'
__name__ = 'scipy.fftpack.convolve'
__package__ = 'scipy.fftpack'
__version__ = b'$Revision: $'
def convolve():
"y = convolve(x,omega,[swap_real_imag,overwrite_x])\n\nWrapper for ``convolve``.\n\nParameters\n----------\nx : input rank-1 array('d') with bounds (n)\nomega : input rank-1 array('d') with bounds (n)\n\nOther Parameters\n----------------\noverwrite_x : input int, optional\n Default: 0\nswap_real_imag : input int, optional\n Default: 0\n\nReturns\n-------\ny : rank-1 array('d') with bounds (n) and x storage\n"
pass
def convolve_z():
"y = convolve_z(x,omega_real,omega_imag,[overwrite_x])\n\nWrapper for ``convolve_z``.\n\nParameters\n----------\nx : input rank-1 array('d') with bounds (n)\nomega_real : input rank-1 array('d') with bounds (n)\nomega_imag : input rank-1 array('d') with bounds (n)\n\nOther Parameters\n----------------\noverwrite_x : input int, optional\n Default: 0\n\nReturns\n-------\ny : rank-1 array('d') with bounds (n) and x storage\n"
pass
def destroy_convolve_cache():
'destroy_convolve_cache()\n\nWrapper for ``destroy_convolve_cache``.\n\n'
pass
def init_convolution_kernel():
"omega = init_convolution_kernel(n,kernel_func,[d,zero_nyquist,kernel_func_extra_args])\n\nWrapper for ``init_convolution_kernel``.\n\nParameters\n----------\nn : input int\nkernel_func : call-back function\n\nOther Parameters\n----------------\nd : input int, optional\n Default: 0\nkernel_func_extra_args : input tuple, optional\n Default: ()\nzero_nyquist : input int, optional\n Default: d%2\n\nReturns\n-------\nomega : rank-1 array('d') with bounds (n)\n\nNotes\n-----\nCall-back functions::\n\n def kernel_func(k): return kernel_func\n Required arguments:\n k : input int\n Return objects:\n kernel_func : float\n"
pass
|
[
"[email protected]"
] | |
0592131ab071183ad4eb44c54e560f11e46ede34
|
1f0ebcb6f428244c3283466c7f98944349f3df48
|
/greendoge/wallet/transaction_record.py
|
256fa58a22fee0e4a5e238d5ab70ed037423f6ee
|
[
"Apache-2.0"
] |
permissive
|
ymcage/greendoge-blockchain
|
4b53433c26221ea6cf5665b9a134fff25c676e22
|
42d5440c3899419f4aa544908a50b1ed78799c13
|
refs/heads/main
| 2023-06-10T06:54:40.391343 | 2021-07-07T11:39:31 | 2021-07-07T11:39:31 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,501 |
py
|
from dataclasses import dataclass
from typing import List, Optional, Tuple
from greendoge.consensus.coinbase import pool_parent_id, farmer_parent_id
from greendoge.types.blockchain_format.coin import Coin
from greendoge.types.blockchain_format.sized_bytes import bytes32
from greendoge.types.mempool_inclusion_status import MempoolInclusionStatus
from greendoge.types.spend_bundle import SpendBundle
from greendoge.util.ints import uint8, uint32, uint64
from greendoge.util.streamable import Streamable, streamable
from greendoge.wallet.util.transaction_type import TransactionType
@dataclass(frozen=True)
@streamable
class TransactionRecord(Streamable):
"""
Used for storing transaction data and status in wallets.
"""
confirmed_at_height: uint32
created_at_time: uint64
to_puzzle_hash: bytes32
amount: uint64
fee_amount: uint64
confirmed: bool
sent: uint32
spend_bundle: Optional[SpendBundle]
additions: List[Coin]
removals: List[Coin]
wallet_id: uint32
# Represents the list of peers that we sent the transaction to, whether each one
# included it in the mempool, and what the error message (if any) was
sent_to: List[Tuple[str, uint8, Optional[str]]]
trade_id: Optional[bytes32]
type: uint32 # TransactionType
name: bytes32
def is_in_mempool(self) -> bool:
# If one of the nodes we sent it to responded with success, we set it to success
for (_, mis, _) in self.sent_to:
if MempoolInclusionStatus(mis) == MempoolInclusionStatus.SUCCESS:
return True
# Note, transactions pending inclusion (pending) return false
return False
def height_farmed(self, genesis_challenge) -> Optional[uint32]:
if not self.confirmed:
return None
if self.type == TransactionType.FEE_REWARD or self.type == TransactionType.COINBASE_REWARD:
for block_index in range(self.confirmed_at_height, self.confirmed_at_height - 100, -1):
if block_index < 0:
return None
pool_parent = pool_parent_id(uint32(block_index), genesis_challenge)
farmer_parent = farmer_parent_id(uint32(block_index), genesis_challenge)
if pool_parent == self.additions[0].parent_coin_info:
return uint32(block_index)
if farmer_parent == self.additions[0].parent_coin_info:
return uint32(block_index)
return None
|
[
"[email protected]"
] | |
2a5e6f9ac45e880f664a9ce232fab5f208239894
|
544fe02a27cc4d987724b1bf45c2ba2994676521
|
/Q6.2_brain_teaser.py
|
f3c435d0d39f9e66d72d19cb75fb37c344115fe5
|
[
"Unlicense"
] |
permissive
|
latika18/learning
|
1e7a6dbdea399b845970317dc62089911a13df1c
|
a57c9aacc0157bf7c318f46c1e7c4971d1d55aea
|
refs/heads/master
| 2021-06-16T19:20:28.146547 | 2019-09-03T06:43:28 | 2019-09-03T06:43:28 | 115,537,386 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 366 |
py
|
There is an 8x8 chess board in which two diagonally opposite corners have been cut off.
You are given 31 dominos, and a single domino can cover exactly two squares.
Can you use the 31 dominos to cover the entire board? Prove your answer (by providing an example, or showing why it’s impossible).
_
________________________________________________________________
|
[
"[email protected]"
] | |
6439de8c26738686bf36333467e5cb2eb4570782
|
c0973d6939ef419ed3d261d95167d537499a553a
|
/OnePy/builtin_module/backtest_stock/stock_bar.py
|
3c54b51c0fd2d7d4c0792869b53a43b9813f685e
|
[
"MIT"
] |
permissive
|
mj3428/OnePy
|
0c6e4be9b4bb36ae66b566dfa85cd44bae2a07de
|
8dc13fc21502daa5786aecaa4451ccba32fc8a14
|
refs/heads/master
| 2020-04-05T10:28:33.550915 | 2018-11-08T04:07:05 | 2018-11-08T04:07:05 | 134,518,682 | 0 | 0 |
MIT
| 2018-05-23T05:38:12 | 2018-05-23T05:38:11 | null |
UTF-8
|
Python
| false | false | 772 |
py
|
from OnePy.sys_module.models.base_bar import BarBase
class BarAshares(BarBase):
@property
def pre_date(self) -> str:
return self.previous_ohlc['date']
@property
def pre_open(self) -> float:
return self.previous_ohlc['open']
@property
def pre_high(self) -> float:
return self.previous_ohlc['high']
@property
def pre_low(self) -> float:
return self.previous_ohlc['low']
@property
def pre_close(self) -> float:
return self.previous_ohlc['close']
@property
def pre_volume(self) -> float:
return self.previous_ohlc['volume']
@property
def limit_up(self):
return self.pre_close*1.1
@property
def limit_down(self):
return self.pre_close*0.9
|
[
"[email protected]"
] | |
2c26f31e646ce45e3dd3e92987df40f489341d47
|
9788d21a60e7f97cd8dcc6d28a280901cfff7d99
|
/app/decorators.py
|
f926a1a5112d53b9c9baa525e6dfced11ed3b4ef
|
[] |
no_license
|
Tiierr/MovieRental
|
b928d08b3a139c7c20fbdf1351402d2d8d700ab9
|
69bfaf3726aa7bedb58ef63a47d5e7b4476b08d9
|
refs/heads/master
| 2021-06-11T23:43:12.271576 | 2017-01-02T15:16:11 | 2017-01-02T15:16:11 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 667 |
py
|
from functools import wraps
from flask import abort
from flask_login import current_user
from .models import Permission
def permission_required(permission):
"""
检查用户权限的自定义修饰器。
如果用户不具有指定权限, 则返回 ``403`` 错误码, 即 HTTP ``禁止`` 错误。
"""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not current_user.can(permission):
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
def admin_required(f):
return permission_required(Permission.ADMINISTER)(f)
|
[
"[email protected]"
] | |
17ece5964bcf66ee4adb69316fe20ee8aef56d8d
|
4fc87c7c55d431943eba76caaa76cc889e99bd3f
|
/npf/contrib/address/migrations/0002_auto_20151027_1114.py
|
c5dad8fa654fa93453863a66fbca651cc1f86e22
|
[] |
no_license
|
Bonasolvo/npf-dev-roles
|
c774359b79642ae9ca2c82daeb0591677bd8e88c
|
dbde9493f2d23fd238dd3a6d8771bbbc5a650724
|
refs/heads/master
| 2016-09-01T05:35:50.246086 | 2015-12-15T07:02:40 | 2015-12-15T07:02:40 | 48,026,149 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,988 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import fias.fields.address
class Migration(migrations.Migration):
dependencies = [
('fias', '0001_initial'),
('address', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='House',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('street', models.CharField(editable=False, blank=True, max_length=255, db_index=True, verbose_name='Улица')),
('index', models.PositiveIntegerField(blank=True, null=True, verbose_name='Почтовый индекс')),
('house', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='Дом')),
('corps', models.CharField(blank=True, max_length=2, null=True, verbose_name='Корпус')),
('apartment', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='Квартира')),
],
options={
'db_table': 'zlk_house',
},
),
migrations.CreateModel(
name='Street',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('street', models.CharField(editable=False, blank=True, max_length=255, db_index=True, verbose_name='Улица')),
],
options={
'db_table': 'zlk_street',
},
),
migrations.CreateModel(
name='Address',
fields=[
],
options={
'proxy': True,
},
bases=('fias.addrobj',),
),
migrations.CreateModel(
name='Socr',
fields=[
],
options={
'verbose_name': 'Сокращениие наименования адресного объекта',
'verbose_name_plural': 'Список сокращений',
'proxy': True,
},
bases=('fias.socrbase',),
),
migrations.AddField(
model_name='street',
name='fias_street',
field=fias.fields.address.AddressField(related_name='+', verbose_name='Улица', blank=True, to='fias.AddrObj', null=True),
),
migrations.AddField(
model_name='house',
name='fias_house',
field=fias.fields.address.AddressField(related_name='+', verbose_name='Дом', blank=True, to='fias.AddrObj', db_column='fiashouse', null=True),
),
migrations.AddField(
model_name='house',
name='fias_street',
field=fias.fields.address.AddressField(related_name='+', verbose_name='Улица', blank=True, to='fias.AddrObj', null=True),
),
]
|
[
"[email protected]"
] | |
2144800d8fcec458abe686f8dab5297deb877026
|
8a1cc9342312f794c48a857de1444a70491a75fa
|
/item/admin.py
|
6d0f120c83410ffb372120fdb92117f26cefbc13
|
[] |
no_license
|
BijoySingh/Project-Hermes-Django
|
7adaf6757bc605e9f3781d915c4250bcc348fb47
|
7cb50402e612ea287d5922a2716a30b7888a0d70
|
refs/heads/master
| 2016-09-01T07:04:05.259487 | 2016-04-09T10:25:21 | 2016-04-09T10:25:21 | 55,672,434 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 796 |
py
|
from django.contrib import admin
# Register your models here.
from item.models import Item, Comment, Photo, Rating, Reaction
@admin.register(Item)
class ItemAdmin(admin.ModelAdmin):
list_display = ['id', 'title', 'latitude', 'longitude', 'rating', 'author']
@admin.register(Comment)
class CommentAdmin(admin.ModelAdmin):
list_display = ['id', 'comment', 'upvotes', 'downvotes', 'flags', 'author']
@admin.register(Photo)
class PhotoAdmin(admin.ModelAdmin):
list_display = ['id', 'picture', 'upvotes', 'downvotes', 'flags', 'author']
@admin.register(Rating)
class RatingAdmin(admin.ModelAdmin):
list_display = ['id', 'author', 'item', 'rating']
@admin.register(Reaction)
class ReactionAdmin(admin.ModelAdmin):
list_display = ['id', 'author', 'reactable', 'reaction']
|
[
"[email protected]"
] | |
234d119020fbf20a956843715d516476ec476f75
|
7b33e9ab949ef2cd985e56abea4a7c0e5d53e5a5
|
/examples/enwik8_deepspeed/train.py
|
40980f744df6c18b7b981b8682c8aa486aabcbfc
|
[
"MIT"
] |
permissive
|
karim-ahmed/linear-attention-transformer
|
3a416226b5f0707756e3044e6dd1dd65f6239ba7
|
dd3f7b63fe2235cba97ab1a95840d0484c70c068
|
refs/heads/master
| 2023-04-04T01:14:37.871715 | 2021-04-14T03:14:44 | 2021-04-14T03:14:44 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,789 |
py
|
import deepspeed
from linear_attention_transformer import LinearAttentionTransformerLM
from linear_attention_transformer.autoregressive_wrapper import AutoregressiveWrapper
import argparse
import random
import tqdm
import gzip
import numpy as np
import torch
import torch.optim as optim
from torch.nn import functional as F
from torch.utils.data import DataLoader, Dataset
def add_argument():
parser=argparse.ArgumentParser(description='enwik8')
parser.add_argument('--with_cuda', default=False, action='store_true',
help='use CPU in case there\'s no GPU support')
parser.add_argument('--use_ema', default=False, action='store_true',
help='whether use exponential moving average')
parser.add_argument('-b', '--batch_size', default=32, type=int,
help='mini-batch size (default: 32)')
parser.add_argument('-e', '--epochs', default=30, type=int,
help='number of total epochs (default: 30)')
parser.add_argument('--local_rank', type=int, default=-1,
help='local rank passed from distributed launcher')
parser = deepspeed.add_config_arguments(parser)
args = parser.parse_args()
return args
# constants
VALIDATE_EVERY = 100
GENERATE_EVERY = 500
GENERATE_LENGTH = 1024
SEQ_LEN = 4096
# helpers
def decode_token(token):
return str(chr(max(32, token)))
def decode_tokens(tokens):
return ''.join(list(map(decode_token, tokens)))
# instantiate model
model = LinearAttentionTransformerLM(
num_tokens = 256,
dim = 512,
depth = 8,
max_seq_len = SEQ_LEN,
heads = 8,
causal = True,
reversible = True,
blindspot_size = 2,
n_local_attn_heads = (8, 8, 8, 8, 4, 4, 2, 2)
)
model = AutoregressiveWrapper(model)
model.cuda()
# prepare enwik8 data
with gzip.open('./data/enwik8.gz') as file:
X = np.fromstring(file.read(int(95e6)), dtype=np.uint8)
trX, vaX = np.split(X, [int(90e6)])
data_train, data_val = torch.from_numpy(trX), torch.from_numpy(vaX)
class TextSamplerDataset(Dataset):
def __init__(self, data, seq_len):
super().__init__()
self.data = data
self.seq_len = seq_len
def __getitem__(self, index):
rand_start = torch.randint(0, self.data.size(0) - self.seq_len - 1, (1,))
full_seq = self.data[rand_start: rand_start + self.seq_len + 1].long()
return full_seq, torch.ones_like(full_seq).bool()
def __len__(self):
return self.data.size(0) // self.seq_len
train_dataset = TextSamplerDataset(data_train, SEQ_LEN)
val_dataset = TextSamplerDataset(data_val, SEQ_LEN)
# setup deepspeed
cmd_args = add_argument()
model_engine, optimizer, trainloader, _ = deepspeed.initialize(args=cmd_args, model=model, model_parameters=model.parameters(), training_data=train_dataset)
# training
for i, (data, mask) in enumerate(trainloader):
model_engine.train()
data = data.to(model_engine.local_rank)
loss = model_engine(data, return_loss = True, randomly_truncate_sequence = True)
model_engine.backward(loss)
model_engine.step()
print(loss.item())
if i % VALIDATE_EVERY == 0:
model.eval()
with torch.no_grad():
inp, _ = random.choice(val_dataset)
loss = model(inp[None, :].cuda(), return_loss = True)
print(f'validation loss: {loss.item()}')
if i != 0 and model_engine.local_rank == 0 and i % GENERATE_EVERY == 0:
model.eval()
inp, _ = random.choice(val_dataset)
print(inp.shape, inp)
prime = decode_tokens(inp)
print(f'%s \n\n %s', (prime, '*' * 100))
sample = model.generate(inp.cuda(), GENERATE_LENGTH)
output_str = decode_tokens(sample)
print(output_str)
|
[
"[email protected]"
] | |
0e8a1b16ca9cc75722a90fa7c6b30afde32791a2
|
2e0908cf2dce87036e9b9d32f23d7b7e20b2127f
|
/tests/test_algebra_onnx_operator_mixin_syntax.py
|
76c44cd4a7b0b1415b9dfb9445e83e909f87b88a
|
[
"MIT"
] |
permissive
|
jtpils/sklearn-onnx
|
a74b621c47ee02f4b0775c6f99091f6d79873650
|
5a065b767ec0e658f671e6313c2a3392b0dc81d6
|
refs/heads/master
| 2020-06-21T10:52:57.936832 | 2019-07-17T15:53:40 | 2019-07-17T15:53:40 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,561 |
py
|
import unittest
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.cluster import KMeans
from sklearn.pipeline import make_pipeline
from skl2onnx import convert_sklearn, to_onnx, wrap_as_onnx_mixin
from skl2onnx.common.data_types import FloatTensorType
from skl2onnx.algebra.onnx_ops import OnnxSub, OnnxDiv
from skl2onnx.algebra.onnx_operator_mixin import OnnxOperatorMixin
from test_utils import dump_data_and_model
class CustomOpTransformer(BaseEstimator, TransformerMixin,
OnnxOperatorMixin):
def __init__(self):
BaseEstimator.__init__(self)
TransformerMixin.__init__(self)
def fit(self, X, y=None):
self.W_ = np.mean(X, axis=0)
self.S_ = np.std(X, axis=0)
return self
def transform(self, X):
return (X - self.W_) / self.S_
def onnx_shape_calculator(self):
def shape_calculator(operator):
operator.outputs[0].type = operator.inputs[0].type
return shape_calculator
def to_onnx_operator(self, inputs=None, outputs=('Y', )):
if inputs is None:
raise RuntimeError("inputs should contain one name")
i0 = self.get_inputs(inputs, 0)
W = self.W_
S = self.S_
return OnnxDiv(OnnxSub(i0, W), S,
output_names=outputs)
class TestOnnxOperatorMixinSyntax(unittest.TestCase):
def test_way1_convert_sklean(self):
X = np.arange(20).reshape(10, 2)
tr = KMeans(n_clusters=2)
tr.fit(X)
onx = convert_sklearn(
tr, initial_types=[('X', FloatTensorType((1, X.shape[1])))])
dump_data_and_model(
X.astype(np.float32), tr, onx,
basename="MixinWay1ConvertSklearn")
def test_way2_to_onnx(self):
X = np.arange(20).reshape(10, 2)
tr = KMeans(n_clusters=2)
tr.fit(X)
onx = to_onnx(tr, X.astype(np.float32))
dump_data_and_model(
X.astype(np.float32), tr, onx,
basename="MixinWay2ToOnnx")
def test_way3_mixin(self):
X = np.arange(20).reshape(10, 2)
tr = KMeans(n_clusters=2)
tr.fit(X)
tr_mixin = wrap_as_onnx_mixin(tr)
try:
onx = tr_mixin.to_onnx()
except RuntimeError as e:
assert "Method enumerate_initial_types" in str(e)
onx = tr_mixin.to_onnx(X.astype(np.float32))
dump_data_and_model(
X.astype(np.float32), tr, onx,
basename="MixinWay3OnnxMixin")
def test_way4_mixin_fit(self):
X = np.arange(20).reshape(10, 2)
tr = wrap_as_onnx_mixin(KMeans(n_clusters=2))
tr.fit(X)
onx = tr.to_onnx(X.astype(np.float32))
dump_data_and_model(
X.astype(np.float32), tr, onx,
basename="MixinWay4OnnxMixin2")
def test_pipe_way1_convert_sklean(self):
X = np.arange(20).reshape(10, 2)
tr = make_pipeline(CustomOpTransformer(), KMeans(n_clusters=2))
tr.fit(X)
onx = convert_sklearn(
tr, initial_types=[('X', FloatTensorType((1, X.shape[1])))])
dump_data_and_model(
X.astype(np.float32), tr, onx,
basename="MixinPipeWay1ConvertSklearn")
def test_pipe_way2_to_onnx(self):
X = np.arange(20).reshape(10, 2)
tr = make_pipeline(CustomOpTransformer(), KMeans(n_clusters=2))
tr.fit(X)
onx = to_onnx(tr, X.astype(np.float32))
dump_data_and_model(
X.astype(np.float32), tr, onx,
basename="MixinPipeWay2ToOnnx")
def test_pipe_way3_mixin(self):
X = np.arange(20).reshape(10, 2)
tr = make_pipeline(CustomOpTransformer(), KMeans(n_clusters=2))
tr.fit(X)
tr_mixin = wrap_as_onnx_mixin(tr)
try:
onx = tr_mixin.to_onnx()
except RuntimeError as e:
assert "Method enumerate_initial_types" in str(e)
onx = tr_mixin.to_onnx(X.astype(np.float32))
dump_data_and_model(
X.astype(np.float32), tr, onx,
basename="MixinPipeWay3OnnxMixin")
def test_pipe_way4_mixin_fit(self):
X = np.arange(20).reshape(10, 2)
tr = wrap_as_onnx_mixin(make_pipeline(
CustomOpTransformer(), KMeans(n_clusters=2)))
tr.fit(X)
onx = tr.to_onnx(X.astype(np.float32))
dump_data_and_model(
X.astype(np.float32), tr, onx,
basename="MixinPipeWay4OnnxMixin2")
if __name__ == "__main__":
unittest.main()
|
[
"[email protected]"
] | |
a81398619c9e57b2cc4e2944013390a7e0a0f278
|
cad762658ab8326d7f43bba6f69df35a8b770e34
|
/pymarkdown/extension_impl.py
|
3cbc7feccab92d85f19b654a097b64f69975075c
|
[
"MIT"
] |
permissive
|
ExternalRepositories/pymarkdown
|
9c248b519791a4c869d1e71fa405c06d15ce553b
|
479ace2d2d9dd5def81c72ef3b58bce6fb76f594
|
refs/heads/main
| 2023-08-28T03:45:25.536530 | 2021-10-31T19:39:22 | 2021-10-31T19:39:22 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,698 |
py
|
"""
Module to allow for the details on the extension to be encapsulated.
"""
# pylint: disable=too-many-instance-attributes
class ExtensionDetails:
"""
Class to allow for the details on the extension to be encapsulated.
"""
# pylint: disable=too-many-arguments
def __init__(
self,
extension_id,
extension_name,
extension_description,
extension_enabled_by_default,
extension_version,
extension_interface_version,
extension_url=None,
extension_configuration=None,
):
(
self.__extension_id,
self.__extension_name,
self.__extension_description,
self.__extension_enabled_by_default,
self.__extension_version,
self.__extension_interface_version,
self.__extension_url,
self.__extension_configuration,
) = (
extension_id,
extension_name,
extension_description,
extension_enabled_by_default,
extension_version,
extension_interface_version,
extension_url,
extension_configuration,
)
# pylint: enable=too-many-arguments
@property
def extension_id(self):
"""
Property to get the id of the extension.
"""
return self.__extension_id
@property
def extension_name(self):
"""
Property to get the name of the extension.
"""
return self.__extension_name
@property
def extension_description(self):
"""
Property to get the short description of the extension.
"""
return self.__extension_description
@property
def extension_enabled_by_default(self):
"""
Property to get whether the extension is enabled by default.
"""
return self.__extension_enabled_by_default
@property
def extension_version(self):
"""
Property to get the version of the extension.
"""
return self.__extension_version
@property
def extension_interface_version(self):
"""
Property to get the interface version of the extension.
"""
return self.__extension_interface_version
@property
def extension_url(self):
"""
Property to get the optional url for the extension.
"""
return self.__extension_url
@property
def extension_configuration(self):
"""
Property to get the optional configuration items for the extension.
"""
return self.__extension_configuration
# pylint: enable=too-many-instance-attributes
|
[
"[email protected]"
] | |
1392d544e99615b3a5c8ab242d8ed3a12033d286
|
85e6a839747983e3fa177e6f02464cd3ca0ac978
|
/APIDemo/urls.py
|
b5d79fe33791152866490185a7306f0797654719
|
[] |
no_license
|
wzw5566/JWT_APIDemo
|
27a8af0a722effef0bd8ec3a4af0c80d4524a6d7
|
291f781b8abc60f07494c7be3c915914e974eb94
|
refs/heads/master
| 2020-03-29T08:29:51.043583 | 2018-09-21T05:26:03 | 2018-09-21T05:26:03 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,030 |
py
|
"""APIDemo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from rest_framework_jwt.views import obtain_jwt_token
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('api.urls')), # 添加的路由地址
]
urlpatterns += [
path('auth/', obtain_jwt_token),
path('auth/', include('rest_framework.urls',
namespace='rest_framework')),
]
|
[
"[email protected]"
] | |
e8d0c61636321004aa67ee998a47d378eddd38ae
|
52a4d869976a97498bdf56a8d0ff92cac138a136
|
/Bioinformatics Textbook Track/Chapter 2/rosalind_ba2e.py
|
906b2d24c3cd9f5d49524b64c49fbd545e591f26
|
[] |
no_license
|
aakibinesar/Rosalind
|
d726369a787d848cc378976b886189978a60a3a5
|
375bbdbfb16bf11b2f980701bbd0ba74a1605cdb
|
refs/heads/master
| 2022-08-18T09:36:00.941080 | 2020-05-24T18:49:38 | 2020-05-24T18:49:38 | 264,722,651 | 0 | 0 | null | 2020-05-17T17:51:03 | 2020-05-17T17:40:59 | null |
UTF-8
|
Python
| false | false | 1,608 |
py
|
def greedymotifsearch(dna,k,t):
best = [s[:k] for s in dna]
for i in range(len(dna[0])-k+1):
tempbest = [dna[0][i:i+k]]
for m in range(1,t):
matrix = motifsToProfile(tempbest) # different from ba2d
tempbest.append(profileMostProbablekmer(dna[m],k,matrix))
if score(tempbest) < score(best):
best = tempbest
return best
def score(motifs):
z = zip(*motifs)
thescore = 0
for string in z:
score = len(string) - max([string.count('A'), string.count('C'), string.count('G'), string.count('T')])
thescore += score
return thescore
def motifsToProfile(motifs):
d = {}
n = float(len(motifs))
z = list(zip(*motifs))
for i in range(len(z)):
d.setdefault('A', []).append((z[i].count('A')+1)/n/2)
d.setdefault('C', []).append((z[i].count('C')+1)/n/2)
d.setdefault('G', []).append((z[i].count('G')+1)/n/2)
d.setdefault('T', []).append((z[i].count('T')+1)/n/2)
return d
def profileMostProbablekmer(text, k , matrix):
maxp = None
probablekmer = None
for i in range(len(text)-k+1):
kmer = text[i:i+k]
pt = 1
for j in range(k):
p = matrix[kmer[j]][j]
pt *=p
if maxp == None or pt > maxp:
maxp = pt
probablekmer = kmer
return probablekmer
with open('rosalind_ba2e.txt') as f:
k,t = map(int,f.readline().rstrip().split(' '))
strings = [st.rstrip() for st in f.readlines()]
print('\n'.join(greedymotifsearch(strings,k,t))) # bug: may be wrong , try several times
|
[
"[email protected]"
] | |
76517b2899271bb65076aab77a02cd61ec65495b
|
f4335e8e7d3010506f570167bbba18156d3a4674
|
/stubs/django/contrib/admin/options.pyi
|
4d2f440cdc079cf7fd64a5fa6ae8452acfeb3908
|
[] |
no_license
|
rtpg/typehangar
|
133686ea45ad6187b768290aeebda9cbcae25586
|
790d057497c4791a38f9e3e009b07935b4a12f45
|
refs/heads/master
| 2021-01-19T04:49:17.940793 | 2017-01-16T13:54:14 | 2017-01-16T13:54:14 | 69,260,488 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,467 |
pyi
|
# Stubs for django.contrib.admin.options (Python 3.5)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from typing import Any, Optional
from django.utils.translation import ugettext as _
IS_POPUP_VAR = ... # type: str
TO_FIELD_VAR = ... # type: str
HORIZONTAL = ... # type: Any
VERTICAL = ... # type: Any
def get_content_type_for_model(obj): ...
def get_ul_class(radio_style): ...
class IncorrectLookupParameters(Exception): ...
FORMFIELD_FOR_DBFIELD_DEFAULTS = ... # type: Any
csrf_protect_m = ... # type: Any
class BaseModelAdmin:
raw_id_fields = ... # type: Any
fields = ... # type: Any
exclude = ... # type: Any
fieldsets = ... # type: Any
form = ... # type: Any
filter_vertical = ... # type: Any
filter_horizontal = ... # type: Any
radio_fields = ... # type: Any
prepopulated_fields = ... # type: Any
formfield_overrides = ... # type: Any
readonly_fields = ... # type: Any
ordering = ... # type: Any
view_on_site = ... # type: bool
show_full_result_count = ... # type: bool
checks_class = ... # type: Any
def check(self, **kwargs): ...
def __init__(self) -> None: ...
def formfield_for_dbfield(self, db_field, **kwargs): ...
def formfield_for_choice_field(self, db_field, request: Optional[Any] = ..., **kwargs): ...
def get_field_queryset(self, db, db_field, request): ...
def formfield_for_foreignkey(self, db_field, request: Optional[Any] = ..., **kwargs): ...
def formfield_for_manytomany(self, db_field, request: Optional[Any] = ..., **kwargs): ...
def get_view_on_site_url(self, obj: Optional[Any] = ...): ...
def get_empty_value_display(self): ...
def get_fields(self, request, obj: Optional[Any] = ...): ...
def get_fieldsets(self, request, obj: Optional[Any] = ...): ...
def get_ordering(self, request): ...
def get_readonly_fields(self, request, obj: Optional[Any] = ...): ...
def get_prepopulated_fields(self, request, obj: Optional[Any] = ...): ...
def get_queryset(self, request): ...
def lookup_allowed(self, lookup, value): ...
def to_field_allowed(self, request, to_field): ...
def has_add_permission(self, request): ...
def has_change_permission(self, request, obj: Optional[Any] = ...): ...
def has_delete_permission(self, request, obj: Optional[Any] = ...): ...
def has_module_permission(self, request): ...
class ModelAdmin(BaseModelAdmin):
list_display = ... # type: Any
list_display_links = ... # type: Any
list_filter = ... # type: Any
list_select_related = ... # type: bool
list_per_page = ... # type: int
list_max_show_all = ... # type: int
list_editable = ... # type: Any
search_fields = ... # type: Any
date_hierarchy = ... # type: Any
save_as = ... # type: bool
save_on_top = ... # type: bool
paginator = ... # type: Any
preserve_filters = ... # type: bool
inlines = ... # type: Any
add_form_template = ... # type: Any
change_form_template = ... # type: Any
change_list_template = ... # type: Any
delete_confirmation_template = ... # type: Any
delete_selected_confirmation_template = ... # type: Any
object_history_template = ... # type: Any
actions = ... # type: Any
action_form = ... # type: Any
actions_on_top = ... # type: bool
actions_on_bottom = ... # type: bool
actions_selection_counter = ... # type: bool
checks_class = ... # type: Any
model = ... # type: Any
opts = ... # type: Any
admin_site = ... # type: Any
def __init__(self, model, admin_site) -> None: ...
def get_inline_instances(self, request, obj: Optional[Any] = ...): ...
def get_urls(self): ...
def urls(self): ...
urls = ... # type: Any
@property
def media(self): ...
def get_model_perms(self, request): ...
def get_fields(self, request, obj: Optional[Any] = ...): ...
def get_form(self, request, obj: Optional[Any] = ..., **kwargs): ...
def get_changelist(self, request, **kwargs): ...
def get_object(self, request, object_id, from_field: Optional[Any] = ...): ...
def get_changelist_form(self, request, **kwargs): ...
def get_changelist_formset(self, request, **kwargs): ...
def get_formsets_with_inlines(self, request, obj: Optional[Any] = ...): ...
def get_paginator(self, request, queryset, per_page, orphans: int = ..., allow_empty_first_page: bool = ...): ...
def log_addition(self, request, object, message): ...
def log_change(self, request, object, message): ...
def log_deletion(self, request, object, object_repr): ...
def action_checkbox(self, obj): ...
def get_actions(self, request): ...
def get_action_choices(self, request, default_choices: Any = ...): ...
def get_action(self, action): ...
def get_list_display(self, request): ...
def get_list_display_links(self, request, list_display): ...
def get_list_filter(self, request): ...
def get_list_select_related(self, request): ...
def get_search_fields(self, request): ...
def get_search_results(self, request, queryset, search_term): ...
def get_preserved_filters(self, request): ...
def construct_change_message(self, request, form, formsets, add: bool = ...): ...
def message_user(self, request, message, level: Any = ..., extra_tags: str = ..., fail_silently: bool = ...): ...
def save_form(self, request, form, change): ...
def save_model(self, request, obj, form, change): ...
def delete_model(self, request, obj): ...
def save_formset(self, request, form, formset, change): ...
def save_related(self, request, form, formsets, change): ...
def render_change_form(self, request, context, add: bool = ..., change: bool = ..., form_url: str = ..., obj: Optional[Any] = ...): ...
def response_add(self, request, obj, post_url_continue: Optional[Any] = ...): ...
def response_change(self, request, obj): ...
def response_post_save_add(self, request, obj): ...
def response_post_save_change(self, request, obj): ...
def response_action(self, request, queryset): ...
def response_delete(self, request, obj_display, obj_id): ...
def render_delete_form(self, request, context): ...
def get_inline_formsets(self, request, formsets, inline_instances, obj: Optional[Any] = ...): ...
def get_changeform_initial_data(self, request): ...
def changeform_view(self, request, object_id: Optional[Any] = ..., form_url: str = ..., extra_context: Optional[Any] = ...): ...
def add_view(self, request, form_url: str = ..., extra_context: Optional[Any] = ...): ...
def change_view(self, request, object_id, form_url: str = ..., extra_context: Optional[Any] = ...): ...
def changelist_view(self, request, extra_context: Optional[Any] = ...): ...
def delete_view(self, request, object_id, extra_context: Optional[Any] = ...): ...
def history_view(self, request, object_id, extra_context: Optional[Any] = ...): ...
class InlineModelAdmin(BaseModelAdmin):
model = ... # type: Any
fk_name = ... # type: Any
formset = ... # type: Any
extra = ... # type: int
min_num = ... # type: Any
max_num = ... # type: Any
template = ... # type: Any
verbose_name = ... # type: Any
verbose_name_plural = ... # type: Any
can_delete = ... # type: bool
show_change_link = ... # type: bool
checks_class = ... # type: Any
admin_site = ... # type: Any
parent_model = ... # type: Any
opts = ... # type: Any
has_registered_model = ... # type: Any
def __init__(self, parent_model, admin_site) -> None: ...
@property
def media(self): ...
def get_extra(self, request, obj: Optional[Any] = ..., **kwargs): ...
def get_min_num(self, request, obj: Optional[Any] = ..., **kwargs): ...
def get_max_num(self, request, obj: Optional[Any] = ..., **kwargs): ...
def get_formset(self, request, obj: Optional[Any] = ..., **kwargs): ...
def get_fields(self, request, obj: Optional[Any] = ...): ...
def get_queryset(self, request): ...
def has_add_permission(self, request): ...
def has_change_permission(self, request, obj: Optional[Any] = ...): ...
def has_delete_permission(self, request, obj: Optional[Any] = ...): ...
class StackedInline(InlineModelAdmin):
template = ... # type: str
class TabularInline(InlineModelAdmin):
template = ... # type: str
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.